/* Callgraph handling code.
- Copyright (C) 2003-2020 Free Software Foundation, Inc.
+ Copyright (C) 2003-2021 Free Software Foundation, Inc.
Contributed by Jan Hubicka
This file is part of GCC.
#include "attribs.h"
#include "selftest.h"
#include "tree-into-ssa.h"
+#include "ipa-inline.h"
+#include "tree-nested.h"
+#include "symtab-thunks.h"
+#include "symtab-clones.h"
/* FIXME: Only for PROP_loops, but cgraph shouldn't have to know about this. */
#include "tree-pass.h"
node->ifunc_resolver = true;
node->register_symbol ();
+ maybe_record_nested_function (node);
- if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
- {
- node->origin = cgraph_node::get_create (DECL_CONTEXT (decl));
- node->next_nested = node->origin->nested;
- node->origin->nested = node;
- }
return node;
}
? virtual_value == wi::to_wide (virtual_offset)
: virtual_value == 0);
- node->thunk.fixed_offset = fixed_offset;
- node->thunk.virtual_value = virtual_value;
- node->thunk.indirect_offset = indirect_offset;
- node->thunk.alias = real_alias;
- node->thunk.this_adjusting = this_adjusting;
- node->thunk.virtual_offset_p = virtual_offset != NULL;
- node->thunk.thunk_p = true;
+ node->thunk = true;
node->definition = true;
+ thunk_info *i;
+ thunk_info local_info;
+ if (symtab->state < CONSTRUCTION)
+ i = &local_info;
+ else
+ i = thunk_info::get_create (node);
+ i->fixed_offset = fixed_offset;
+ i->virtual_value = virtual_value;
+ i->indirect_offset = indirect_offset;
+ i->alias = real_alias;
+ i->this_adjusting = this_adjusting;
+ i->virtual_offset_p = virtual_offset != NULL;
+ if (symtab->state < CONSTRUCTION)
+ i->register_early (node);
+
return node;
}
{
tree decl;
+ cgraph_node *new_direct_callee = NULL;
+ if ((e->indirect_unknown_callee || e->speculative)
+ && (decl = gimple_call_fndecl (new_stmt)))
+ {
+ /* Constant propagation and especially inlining can turn an indirect call
+ into a direct one. */
+ new_direct_callee = cgraph_node::get (decl);
+ gcc_checking_assert (new_direct_callee);
+ }
+
/* Speculative edges has three component, update all of them
when asked to. */
- if (update_speculative && e->speculative)
+ if (update_speculative && e->speculative
+ /* If we are about to resolve the speculation by calling make_direct
+ below, do not bother going over all the speculative edges now. */
+ && !new_direct_callee)
{
cgraph_edge *direct, *indirect, *next;
ipa_ref *ref;
return e_indirect ? indirect : direct;
}
+ if (new_direct_callee)
+ e = make_direct (e, new_direct_callee);
+
/* Only direct speculative edges go to call_site_hash. */
if (e->caller->call_site_hash
&& (!e->speculative || !e->indirect_unknown_callee)
(e->call_stmt, cgraph_edge_hasher::hash (e->call_stmt));
e->call_stmt = new_stmt;
- if (e->indirect_unknown_callee
- && (decl = gimple_call_fndecl (new_stmt)))
- {
- /* Constant propagation (and possibly also inlining?) can turn an
- indirect call into a direct one. */
- cgraph_node *new_callee = cgraph_node::get (decl);
-
- gcc_checking_assert (new_callee);
- e = make_direct (e, new_callee);
- }
function *fun = DECL_STRUCT_FUNCTION (e->caller->decl);
e->can_throw_external = stmt_can_throw_external (fun, new_stmt);
= decl_maybe_in_construction_p (NULL, NULL, call_stmt,
caller->decl);
else
- edge->in_polymorphic_cdtor = caller->thunk.thunk_p;
+ edge->in_polymorphic_cdtor = caller->thunk;
+ if (callee)
+ caller->calls_declare_variant_alt |= callee->declare_variant_alt;
if (callee && symtab->state != LTO_STREAMING
&& edge->callee->comdat_local_p ())
{
cgraph_edge *tmp = edge;
if (dump_file)
- fprintf (dump_file, "Speculative call turned into direct call.\n");
+ fprintf (dump_file, "Speculative call turned into direct call.\n");
edge = e2;
e2 = tmp;
/* FIXME: If EDGE is inlined, we should scale up the frequencies
return NULL;
}
-/* Make an indirect edge with an unknown callee an ordinary edge leading to
- CALLEE. Speculations can be resolved in the process and EDGE can be removed
- and deallocated. Return the edge that now represents the call. */
+/* Make an indirect or speculative EDGE with an unknown callee an ordinary edge
+ leading to CALLEE. Speculations can be resolved in the process and EDGE can
+ be removed and deallocated. Return the edge that now represents the
+ call. */
cgraph_edge *
cgraph_edge::make_direct (cgraph_edge *edge, cgraph_node *callee)
{
- gcc_assert (edge->indirect_unknown_callee);
+ gcc_assert (edge->indirect_unknown_callee || edge->speculative);
/* If we are redirecting speculative call, make it non-speculative. */
if (edge->speculative)
|| decl == e->callee->decl)
return e->call_stmt;
+ if (decl && ipa_saved_clone_sources)
+ {
+ tree *p = ipa_saved_clone_sources->get (e->callee);
+ if (p && decl == *p)
+ {
+ gimple_call_set_fndecl (e->call_stmt, e->callee->decl);
+ return e->call_stmt;
+ }
+ }
if (flag_checking && decl)
{
- cgraph_node *node = cgraph_node::get (decl);
- gcc_assert (!node || !node->clone.param_adjustments);
+ if (cgraph_node *node = cgraph_node::get (decl))
+ {
+ clone_info *info = clone_info::get (node);
+ gcc_assert (!info || !info->param_adjustments);
+ }
}
+ clone_info *callee_info = clone_info::get (e->callee);
if (symtab->dump_file)
{
fprintf (symtab->dump_file, "updating call of %s -> %s: ",
e->caller->dump_name (), e->callee->dump_name ());
print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
- if (e->callee->clone.param_adjustments)
- e->callee->clone.param_adjustments->dump (symtab->dump_file);
- unsigned performed_len
- = vec_safe_length (e->caller->clone.performed_splits);
- if (performed_len > 0)
- fprintf (symtab->dump_file, "Performed splits records:\n");
- for (unsigned i = 0; i < performed_len; i++)
- {
- ipa_param_performed_split *sm
- = &(*e->caller->clone.performed_splits)[i];
- print_node_brief (symtab->dump_file, " dummy_decl: ", sm->dummy_decl,
- TDF_UID);
- fprintf (symtab->dump_file, ", unit_offset: %u\n", sm->unit_offset);
- }
+ if (callee_info && callee_info->param_adjustments)
+ callee_info->param_adjustments->dump (symtab->dump_file);
}
- if (ipa_param_adjustments *padjs = e->callee->clone.param_adjustments)
+ if (ipa_param_adjustments *padjs
+ = callee_info ? callee_info->param_adjustments : NULL)
{
/* We need to defer cleaning EH info on the new statement to
- fixup-cfg. We may not have dominator information at this point
+ fixup-cfg. We may not have dominator information at this point
and thus would end up with unreachable blocks and have no way
to communicate that we need to run CFG cleanup then. */
int lp_nr = lookup_stmt_eh_lp (e->call_stmt);
remove_stmt_from_eh_lp (e->call_stmt);
tree old_fntype = gimple_call_fntype (e->call_stmt);
- new_stmt = padjs->modify_call (e->call_stmt,
- e->caller->clone.performed_splits,
- e->callee->decl, false);
+ new_stmt = padjs->modify_call (e, false);
cgraph_node *origin = e->callee;
while (origin->clone_of)
origin = origin->clone_of;
}
else
{
+ if (flag_checking
+ && !fndecl_built_in_p (e->callee->decl, BUILT_IN_UNREACHABLE))
+ ipa_verify_edge_has_no_modifications (e);
new_stmt = e->call_stmt;
gimple_call_set_fndecl (new_stmt, e->callee->decl);
update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
var = get_or_create_ssa_default_def
(DECL_STRUCT_FUNCTION (e->caller->decl), var);
gimple *set_stmt = gimple_build_assign (lhs, var);
- gsi = gsi_for_stmt (new_stmt);
+ gsi = gsi_for_stmt (new_stmt);
gsi_insert_before_without_update (&gsi, set_stmt, GSI_SAME_STMT);
update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), set_stmt);
}
if (orig->clones)
for (node = orig->clones; node != orig;)
{
- cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
+ cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl,
+ new_stmt);
if (node->clones)
node = node->clones;
else if (node->next_sibling_clone)
gcc_assert (!dom_info_available_p (fn, CDI_DOMINATORS));
gcc_assert (!dom_info_available_p (fn, CDI_POST_DOMINATORS));
delete_tree_cfg_annotations (fn);
- clear_edges (fn);
+ free_cfg (fn);
fn->cfg = NULL;
}
if (fn->value_histograms)
free_histograms (fn);
gimple_set_body (decl, NULL);
/* Struct function hangs a lot of data that would leak if we didn't
- removed all pointers to it. */
+ removed all pointers to it. */
ggc_free (fn);
DECL_STRUCT_FUNCTION (decl) = NULL;
}
lto_free_function_in_decl_state_for_node (this);
lto_file_data = NULL;
}
+ if (flag_checking && clones)
+ {
+ /* It is invalid to release body before materializing clones except
+ for thunks that don't really need a body. Verify also that we do
+ not leak pointers to the call statements. */
+ for (cgraph_node *node = clones; node;
+ node = node->next_sibling_clone)
+ gcc_assert (node->thunk && !node->callees->call_stmt);
+ }
+ remove_callees ();
+ remove_all_references ();
}
/* Remove function from symbol table. */
void
cgraph_node::remove (void)
{
+ bool clone_info_set = false;
+ clone_info *info, saved_info;
if (symtab->ipa_clones_dump_file && symtab->cloned_nodes.contains (this))
fprintf (symtab->ipa_clones_dump_file,
"Callgraph removal;%s;%d;%s;%d;%d\n", asm_name (), order,
DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl),
DECL_SOURCE_COLUMN (decl));
+ if ((info = clone_info::get (this)) != NULL)
+ {
+ saved_info = *info;
+ clone_info_set = true;
+ }
symtab->call_cgraph_removal_hooks (this);
remove_callers ();
remove_callees ();
*/
force_output = false;
forced_by_abi = false;
- cgraph_node *next;
- for (cgraph_node *n = nested; n; n = next)
- {
- next = n->next_nested;
- n->origin = NULL;
- n->next_nested = NULL;
- }
- nested = NULL;
- if (origin)
- {
- cgraph_node **node2 = &origin->nested;
- while (*node2 != this)
- node2 = &(*node2)->next_nested;
- *node2 = next_nested;
- }
- unregister ();
+ unregister (clone_info_set ? &saved_info : NULL);
if (prev_sibling_clone)
prev_sibling_clone->next_sibling_clone = next_sibling_clone;
else if (clone_of)
cgraph_node *n, *next;
if (clone_of)
- {
+ {
for (n = clones; n->next_sibling_clone; n = n->next_sibling_clone)
n->clone_of = clone_of;
n->clone_of = clone_of;
clone_of->clones = clones;
}
else
- {
+ {
/* We are removing node with clones. This makes clones inconsistent,
but assume they will be removed subsequently and just keep clone
tree intact. This can happen in unreachable function removal since
fprintf (f, "(can throw external) ");
}
+/* Dump edge to stderr. */
+
+void
+cgraph_edge::debug (void)
+{
+ fprintf (stderr, "%s -> %s ", caller->dump_asm_name (),
+ callee == NULL ? "(null)" : callee->dump_asm_name ());
+ dump_edge_flags (stderr);
+ fprintf (stderr, "\n\n");
+ caller->debug ();
+ if (callee != NULL)
+ callee->debug ();
+}
+
/* Dump call graph node to file F. */
void
}
if (tp_first_run > 0)
fprintf (f, " first_run:%" PRId64, (int64_t) tp_first_run);
- if (origin)
+ if (cgraph_node *origin = nested_function_origin (this))
fprintf (f, " nested in:%s", origin->dump_asm_name ());
if (gimple_has_body_p (decl))
fprintf (f, " body");
fprintf (f, " optimize_size");
if (parallelized_function)
fprintf (f, " parallelized_function");
+ if (DECL_IS_MALLOC (decl))
+ fprintf (f, " decl_is_malloc");
if (DECL_IS_OPERATOR_NEW_P (decl))
fprintf (f, " %soperator_new",
DECL_IS_REPLACEABLE_OPERATOR (decl) ? "replaceable_" : "");
fprintf (f, "\n");
- if (thunk.thunk_p)
+ if (thunk)
{
fprintf (f, " Thunk");
- if (thunk.alias)
- fprintf (f, " of %s (asm:%s)",
- lang_hooks.decl_printable_name (thunk.alias, 2),
- IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
- fprintf (f, " fixed offset %i virtual value %i indirect_offset %i "
- "has virtual offset %i\n",
- (int)thunk.fixed_offset,
- (int)thunk.virtual_value,
- (int)thunk.indirect_offset,
- (int)thunk.virtual_offset_p);
+ thunk_info::get (this)->dump (f);
}
else if (former_thunk_p ())
- fprintf (f, " Former thunk fixed offset %i virtual value %i "
- "indirect_offset %i has virtual offset %i\n",
- (int)thunk.fixed_offset,
- (int)thunk.virtual_value,
- (int)thunk.indirect_offset,
- (int)thunk.virtual_offset_p);
- if (alias && thunk.alias
- && DECL_P (thunk.alias))
{
- fprintf (f, " Alias of %s",
- lang_hooks.decl_printable_name (thunk.alias, 2));
- if (DECL_ASSEMBLER_NAME_SET_P (thunk.alias))
- fprintf (f, " (asm:%s)",
- IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
- fprintf (f, "\n");
+ fprintf (f, " Former thunk ");
+ thunk_info::get (this)->dump (f);
}
+ else gcc_checking_assert (!thunk_info::get (this));
fprintf (f, " Called by: ");
}
fprintf (f, "\n");
- if (count.ipa ().initialized_p ())
+ if (!body_removed && count.ipa ().initialized_p ())
{
bool ok = true;
bool min = false;
FOR_EACH_ALIAS (this, ref)
if (dyn_cast <cgraph_node *> (ref->referring)->count.initialized_p ())
sum += dyn_cast <cgraph_node *> (ref->referring)->count.ipa ();
-
+
if (inlined_to
|| (symtab->state < EXPANSION
&& ultimate_alias_target () == this && only_called_directly_p ()))
{
if (edge->indirect_info->polymorphic)
{
- fprintf (f, " Polymorphic indirect call of type ");
+ fprintf (f, " Polymorphic indirect call of type ");
print_generic_expr (f, edge->indirect_info->otr_type, TDF_SLIM);
fprintf (f, " token:%i", (int) edge->indirect_info->otr_token);
}
else
- fprintf (f, " Indirect call");
+ fprintf (f, " Indirect call");
edge->dump_edge_flags (f);
if (edge->indirect_info->param_index != -1)
{
- fprintf (f, " of param:%i", edge->indirect_info->param_index);
+ fprintf (f, "of param:%i ", edge->indirect_info->param_index);
if (edge->indirect_info->agg_contents)
- fprintf (f, " loaded from %s %s at offset %i",
+ fprintf (f, "loaded from %s %s at offset %i ",
edge->indirect_info->member_ptr ? "member ptr" : "aggregate",
edge->indirect_info->by_ref ? "passed by reference":"",
(int)edge->indirect_info->offset);
if (edge->indirect_info->vptr_changed)
- fprintf (f, " (vptr maybe changed)");
+ fprintf (f, "(vptr maybe changed) ");
}
- fprintf (f, " Num speculative call targets: %i",
+ fprintf (f, "num speculative call targets: %i\n",
edge->indirect_info->num_speculative_call_targets);
- fprintf (f, "\n");
if (edge->indirect_info->polymorphic)
edge->indirect_info->context.dump (f);
}
return DECL_POSSIBLY_INLINED (decl);
}
-/* cgraph_node is no longer nested function; update cgraph accordingly. */
-void
-cgraph_node::unnest (void)
-{
- cgraph_node **node2 = &origin->nested;
- gcc_assert (origin);
-
- while (*node2 != this)
- node2 = &(*node2)->next_nested;
- *node2 = next_nested;
- origin = NULL;
-}
-
/* Return function availability. See cgraph.h for description of individual
return values. */
enum availability
ref = cref->inlined_to;
}
enum availability avail;
- if (!analyzed)
+ if (!analyzed && !in_other_partition)
avail = AVAIL_NOT_AVAILABLE;
else if (local)
avail = AVAIL_LOCAL;
Also comdat groups are always resolved in groups. */
else if ((this == ref && !has_aliases_p ())
- || (ref && get_comdat_group ()
- && get_comdat_group () == ref->get_comdat_group ()))
+ || (ref && get_comdat_group ()
+ && get_comdat_group () == ref->get_comdat_group ()))
avail = AVAIL_AVAILABLE;
/* Inline functions are safe to be analyzed even if their symbol can
be overwritten at runtime. It is not meaningful to enforce any sane
|| (avail = get_availability ()) > AVAIL_INTERPOSABLE)
{
if (callback (this, data))
- return true;
+ return true;
}
FOR_EACH_ALIAS (this, ref)
{
if (avail <= AVAIL_INTERPOSABLE)
return false;
for (e = callers; e; e = e->next_caller)
- if (e->caller->thunk.thunk_p
+ if (e->caller->thunk
&& (include_overwritable
|| e->caller->get_availability () > AVAIL_INTERPOSABLE)
&& !(exclude_virtual_thunks
- && e->caller->thunk.virtual_offset_p))
+ && thunk_info::get (e->caller)->virtual_offset_p))
if (e->caller->call_for_symbol_thunks_and_aliases (callback, data,
include_overwritable,
exclude_virtual_thunks))
set_nothrow_flag_1 (alias, nothrow, non_call, changed);
}
for (cgraph_edge *e = node->callers; e; e = e->next_caller)
- if (e->caller->thunk.thunk_p
+ if (e->caller->thunk
&& (!nothrow || e->caller->get_availability () > AVAIL_INTERPOSABLE))
set_nothrow_flag_1 (e->caller, nothrow, non_call, changed);
}
}
for (cgraph_edge *e = node->callers; e; e = e->next_caller)
- if (e->caller->thunk.thunk_p
+ if (e->caller->thunk
&& (!malloc_p || e->caller->get_availability () > AVAIL_INTERPOSABLE))
set_malloc_flag_1 (e->caller, malloc_p, changed);
}
{
if (TREE_READONLY (node->decl))
{
- TREE_READONLY (node->decl) = 0;
- DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
+ TREE_READONLY (node->decl) = 0;
+ DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
*changed = true;
}
}
{
if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
{
- DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
+ DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
*changed = true;
}
}
else if (node->binds_to_current_def_p ())
{
TREE_READONLY (node->decl) = true;
- DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
+ DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
DECL_PURE_P (node->decl) = false;
*changed = true;
}
if (!DECL_PURE_P (node->decl))
{
DECL_PURE_P (node->decl) = true;
- DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
+ DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
*changed = true;
}
else if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
{
- DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
+ DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
*changed = true;
}
}
set_const_flag_1 (alias, set_const, looping, changed);
}
for (cgraph_edge *e = node->callers; e; e = e->next_caller)
- if (e->caller->thunk.thunk_p
+ if (e->caller->thunk
&& (!set_const || e->caller->get_availability () > AVAIL_INTERPOSABLE))
{
/* Virtual thunks access virtual offset in the vtable, so they can
only be pure, never const. */
- if (set_const
- && (e->caller->thunk.virtual_offset_p
- || !node->binds_to_current_def_p (e->caller)))
+ if (set_const
+ && (thunk_info::get (e->caller)->virtual_offset_p
+ || !node->binds_to_current_def_p (e->caller)))
*changed |= e->caller->set_pure_flag (true, looping);
else
set_const_flag_1 (e->caller, set_const, looping, changed);
{
if (!DECL_PURE_P (node->decl) && !TREE_READONLY (node->decl))
{
- DECL_PURE_P (node->decl) = true;
- DECL_LOOPING_CONST_OR_PURE_P (node->decl) = info->looping;
+ DECL_PURE_P (node->decl) = true;
+ DECL_LOOPING_CONST_OR_PURE_P (node->decl) = info->looping;
info->changed = true;
}
else if (DECL_LOOPING_CONST_OR_PURE_P (node->decl)
&& !info->looping)
{
- DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
+ DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
info->changed = true;
}
}
{
if (DECL_PURE_P (node->decl))
{
- DECL_PURE_P (node->decl) = false;
- DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
+ DECL_PURE_P (node->decl) = false;
+ DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
info->changed = true;
}
}
/* If function is not being inlined, we care only about
references outside of the comdat group. */
if (!will_inline)
- for (int i = 0; next->iterate_referring (i, ref); i++)
+ for (int i = 0; next->iterate_referring (i, ref); i++)
if (ref->referring->get_comdat_group () != get_comdat_group ())
return false;
}
if (avail > AVAIL_INTERPOSABLE)
for (cs = node->callers; cs != NULL; cs = cs->next_caller)
if (!cs->indirect_inlining_edge
- && !cs->caller->thunk.thunk_p)
- redirect_callers->safe_push (cs);
+ && !cs->caller->thunk)
+ redirect_callers->safe_push (cs);
return false;
}
/* Collect all callers of cgraph_node and its aliases that are known to lead to
cgraph_node (i.e. are not overwritable). */
-vec<cgraph_edge *>
+auto_vec<cgraph_edge *>
cgraph_node::collect_callers (void)
{
- vec<cgraph_edge *> redirect_callers = vNULL;
+ auto_vec<cgraph_edge *> redirect_callers;
call_for_symbol_thunks_and_aliases (collect_callers_of_node_1,
&redirect_callers, false);
return redirect_callers;
|| node2->former_clone_of == node->decl)
return true;
- if (!node->thunk.thunk_p && !node->former_thunk_p ())
+ if (!node->thunk && !node->former_thunk_p ())
{
- while (node2 && node->decl != node2->decl)
+ while (node2
+ && node->decl != node2->decl
+ && node->decl != node2->former_clone_of)
node2 = node2->clone_of;
return node2 != NULL;
}
/* There are no virtual clones of thunks so check former_clone_of or if we
might have skipped thunks because this adjustments are no longer
necessary. */
- while (node->thunk.thunk_p || node->former_thunk_p ())
+ while (node->thunk || node->former_thunk_p ())
{
- if (!node->thunk.this_adjusting)
+ if (!thunk_info::get (node)->this_adjusting)
return false;
/* In case of instrumented expanded thunks, which can have multiple calls
in them, we do not know how to continue and just have to be
- optimistic. */
- if (node->callees->next_callee)
+ optimistic. The same applies if all calls have already been inlined
+ into the thunk. */
+ if (!node->callees || node->callees->next_callee)
return true;
node = node->callees->callee->ultimate_alias_target ();
- if (!node2->clone.param_adjustments
- || node2->clone.param_adjustments->first_param_intact_p ())
+ clone_info *info = clone_info::get (node2);
+ if (!info || !info->param_adjustments
+ || info->param_adjustments->first_param_intact_p ())
return false;
- if (node2->former_clone_of == node->decl)
+ if (node2->former_clone_of == node->decl
+ || node2->former_clone_of == node->former_clone_of)
return true;
cgraph_node *n2 = node2;
if (callees)
{
error ("Alias has call edges");
- error_found = true;
+ error_found = true;
}
for (i = 0; iterate_reference (i, ref); i++)
if (ref->use != IPA_REF_ALIAS)
}
}
- if (analyzed && thunk.thunk_p)
+ if (analyzed && thunk)
{
if (!callees)
{
error ("No edge out of thunk node");
- error_found = true;
+ error_found = true;
}
else if (callees->next_callee)
{
error ("More than one edge out of thunk node");
- error_found = true;
+ error_found = true;
}
if (gimple_has_body_p (decl) && !inlined_to)
- {
+ {
error ("Thunk is not supposed to have body");
- error_found = true;
- }
+ error_found = true;
+ }
}
else if (analyzed && gimple_has_body_p (decl)
&& !TREE_ASM_WRITTEN (decl)
}
}
- if (nested != NULL)
+ if (nested_function_info *info = nested_function_info::get (this))
{
- for (cgraph_node *n = nested; n != NULL; n = n->next_nested)
+ if (info->nested != NULL)
{
- if (n->origin == NULL)
- {
- error ("missing origin for a node in a nested list");
- error_found = true;
- }
- else if (n->origin != this)
+ for (cgraph_node *n = info->nested; n != NULL;
+ n = next_nested_function (n))
{
- error ("origin points to a different parent");
- error_found = true;
- break;
+ nested_function_info *ninfo = nested_function_info::get (n);
+ if (ninfo->origin == NULL)
+ {
+ error ("missing origin for a node in a nested list");
+ error_found = true;
+ }
+ else if (ninfo->origin != this)
+ {
+ error ("origin points to a different parent");
+ error_found = true;
+ break;
+ }
}
}
- }
- if (next_nested != NULL && origin == NULL)
- {
- error ("missing origin for a node in a nested list");
- error_found = true;
+ if (info->next_nested != NULL && info->origin == NULL)
+ {
+ error ("missing origin for a node in a nested list");
+ error_found = true;
+ }
}
if (error_found)
{
cgraph_node *node = ultimate_alias_target (availability, ref);
- while (node->thunk.thunk_p)
+ while (node->thunk)
{
enum availability a;
{
cgraph_node *node = ultimate_alias_target (availability, ref);
- while (node->thunk.thunk_p && !node->thunk.virtual_offset_p)
+ while (node->thunk && !thunk_info::get (node)->virtual_offset_p)
{
enum availability a;
}
/* When doing LTO, read cgraph_node's body from disk if it is not already
- present. */
+ present. Also perform any necessary clone materializations. */
bool
-cgraph_node::get_untransformed_body (void)
+cgraph_node::get_untransformed_body ()
{
lto_file_decl_data *file_data;
const char *data, *name;
size_t len;
tree decl = this->decl;
+ /* See if there is clone to be materialized.
+ (inline clones does not need materialization, but we can be seeing
+ an inline clone of real clone). */
+ cgraph_node *p = this;
+ for (cgraph_node *c = clone_of; c; c = c->clone_of)
+ {
+ if (c->decl != decl)
+ p->materialize_clone ();
+ p = c;
+ }
+
/* Check if body is already there. Either we have gimple body or
the function is thunk and in that case we set DECL_ARGUMENTS. */
if (DECL_ARGUMENTS (decl) || gimple_has_body_p (decl))
void
cgraph_c_finalize (void)
{
+ nested_function_info::release ();
+ thunk_info::release ();
+ clone_info::release ();
symtab = NULL;
x_cgraph_nodes_queue = NULL;
cgraph_node::has_thunk_p (cgraph_node *node, void *)
{
for (cgraph_edge *e = node->callers; e; e = e->next_caller)
- if (e->caller->thunk.thunk_p)
+ if (e->caller->thunk)
return true;
return false;
}
return false;
}
+/* Return true if this node represents a former, i.e. an expanded, thunk. */
+
+bool
+cgraph_node::former_thunk_p (void)
+{
+ if (thunk)
+ return false;
+ thunk_info *i = thunk_info::get (this);
+ if (!i)
+ return false;
+ gcc_checking_assert (i->fixed_offset || i->virtual_offset_p
+ || i->indirect_offset);
+ return true;
+}
+
/* A stashed copy of "symtab" for use by selftest::symbol_table_test.
This needs to be a global so that it can be a GC root, and thus
prevent the stashed copy from being garbage-collected if the GC runs