symtab_node::get_dump_name uses node order to identify nodes.
Order is no longer unique because of Incremental LTO patches.
This patch moves uid from cgraph_node node to symtab_node,
so get_dump_name can use uid instead and get back unique dump names.
In inlining passes, uid is replaced with more appropriate (more compact
for indexing) summary id.
Bootstrapped/regtested on x86_64-linux.
Ok for trunk?
gcc/ChangeLog:
* cgraph.cc (symbol_table::create_empty):
Move uid to symtab_node.
(test_symbol_table_test): Change expected dump id.
* cgraph.h (struct cgraph_node):
Move uid to symtab_node.
(symbol_table::register_symbol): Likewise.
* dumpfile.cc (test_capture_of_dump_calls):
Change expected dump id.
* ipa-inline.cc (update_caller_keys):
Use summary id instead of uid.
(update_callee_keys): Likewise.
* symtab.cc (symtab_node::get_dump_name):
Use uid instead of order.
gcc/testsuite/ChangeLog:
* gcc.dg/live-patching-1.c: Change expected dump id.
* gcc.dg/live-patching-4.c: Likewise.
symbol_table::create_empty (void)
{
cgraph_count++;
- return new (ggc_alloc<cgraph_node> ()) cgraph_node (cgraph_max_uid++);
+ return new (ggc_alloc<cgraph_node> ()) cgraph_node ();
}
/* Register HOOK to be called with DATA on each removed edge. */
/* Verify that the node has order 0 on both iterations,
and thus that nodes have predictable dump names in selftests. */
ASSERT_EQ (node->order, 0);
- ASSERT_STREQ (node->dump_name (), "test_decl/0");
+ ASSERT_STREQ (node->dump_name (), "test_decl/1");
}
}
order (-1), next_sharing_asm_name (NULL),
previous_sharing_asm_name (NULL), same_comdat_group (NULL), ref_list (),
alias_target (NULL), lto_file_data (NULL), aux (NULL),
- x_comdat_group (NULL_TREE), x_section (NULL)
+ x_comdat_group (NULL_TREE), x_section (NULL), m_uid (-1)
{}
/* Return name. */
/* Perform internal consistency checks, if they are enabled. */
static inline void checking_verify_symtab_nodes (void);
+ /* Get unique identifier of the node. */
+ inline int get_uid ()
+ {
+ return m_uid;
+ }
+
/* Type of the symbol. */
ENUM_BITFIELD (symtab_type) type : 8;
void *data,
bool include_overwrite);
private:
+ /* Unique id of the node. */
+ int m_uid;
+
/* Workers for set_section. */
static bool set_section_from_string (symtab_node *n, void *s);
static bool set_section_from_node (symtab_node *n, void *o);
friend class symbol_table;
/* Constructor. */
- explicit cgraph_node (int uid)
+ explicit cgraph_node ()
: symtab_node (SYMTAB_FUNCTION), callees (NULL), callers (NULL),
indirect_calls (NULL),
next_sibling_clone (NULL), prev_sibling_clone (NULL), clones (NULL),
redefined_extern_inline (false), tm_may_enter_irr (false),
ipcp_clone (false), gc_candidate (false),
called_by_ifunc_resolver (false), has_omp_variant_constructs (false),
- m_uid (uid), m_summary_id (-1)
+ m_summary_id (-1)
{}
/* Remove the node from cgraph and all inline clones inlined into it.
dump_cgraph (stderr);
}
- /* Get unique identifier of the node. */
- inline int get_uid ()
- {
- return m_uid;
- }
-
/* Get summary id of the node. */
inline int get_summary_id ()
{
unsigned has_omp_variant_constructs : 1;
private:
- /* Unique id of the node. */
- int m_uid;
/* Summary id that is recycled. */
int m_summary_id;
nodes->previous = node;
nodes = node;
+ nodes->m_uid = cgraph_max_uid++;
+
if (node->order == -1)
node->order = order++;
}
dump_printf (MSG_NOTE, "node: %C", node);
const int expected_impl_line = __LINE__ - 1;
- ASSERT_DUMPED_TEXT_EQ (tmp, "node: test_decl/0");
+ ASSERT_DUMPED_TEXT_EQ (tmp, "node: test_decl/1");
if (with_optinfo)
{
optinfo *info = tmp.get_pending_optinfo ();
ASSERT_EQ (info->get_kind (), OPTINFO_KIND_NOTE);
ASSERT_EQ (info->num_items (), 2);
ASSERT_IS_TEXT (info->get_item (0), "node: ");
- ASSERT_IS_SYMTAB_NODE (info->get_item (1), decl_loc, "test_decl/0");
+ ASSERT_IS_SYMTAB_NODE (info->get_item (1), decl_loc, "test_decl/1");
ASSERT_IMPL_LOCATION_EQ (info->get_impl_location (),
"dumpfile.cc", expected_impl_line,
"test_capture_of_dump_calls");
dump_symtab_node (MSG_NOTE, node);
const int expected_impl_line = __LINE__ - 1;
- ASSERT_DUMPED_TEXT_EQ (tmp, "test_decl/0");
+ ASSERT_DUMPED_TEXT_EQ (tmp, "test_decl/1");
if (with_optinfo)
{
optinfo *info = tmp.get_pending_optinfo ();
ASSERT_TRUE (info != NULL);
ASSERT_EQ (info->get_kind (), OPTINFO_KIND_NOTE);
ASSERT_EQ (info->num_items (), 1);
- ASSERT_IS_SYMTAB_NODE (info->get_item (0), decl_loc, "test_decl/0");
+ ASSERT_IS_SYMTAB_NODE (info->get_item (0), decl_loc, "test_decl/1");
ASSERT_IMPL_LOCATION_EQ (info->get_impl_location (),
"dumpfile.cc", expected_impl_line,
"test_capture_of_dump_calls");
if ((!node->alias && !ipa_fn_summaries->get (node)->inlinable)
|| node->inlined_to)
return;
- if (!bitmap_set_bit (updated_nodes, node->get_uid ()))
+ if (!bitmap_set_bit (updated_nodes, node->get_summary_id ()))
return;
FOR_EACH_ALIAS (node, ref)
if (e->aux
&& !bitmap_bit_p (updated_nodes,
e->callee->ultimate_alias_target
- (&avail, e->caller)->get_uid ()))
+ (&avail, e->caller)->get_summary_id ()))
update_edge_key (heap, e);
}
/* We do not reset callee growth cache here. Since we added a new call,
&& avail >= AVAIL_AVAILABLE
&& ipa_fn_summaries->get (callee) != NULL
&& ipa_fn_summaries->get (callee)->inlinable
- && !bitmap_bit_p (updated_nodes, callee->get_uid ()))
+ && !bitmap_bit_p (updated_nodes, callee->get_summary_id ()))
{
if (can_inline_edge_p (e, false)
&& want_inline_small_function_p (e, false)
unsigned l = strlen (fname);
char *s = (char *)ggc_internal_cleared_alloc (l + EXTRA);
- snprintf (s, l + EXTRA, "%s/%d", fname, order);
+ snprintf (s, l + EXTRA, "%s/%d", fname, m_uid);
return s;
}
return 0;
}
-/* { dg-final { scan-ipa-dump "foo/0 function has external linkage when the user requests only inlining static for live patching" "inline" } } */
+/* { dg-final { scan-ipa-dump "foo/1 function has external linkage when the user requests only inlining static for live patching" "inline" } } */
return 0;
}
-/* { dg-final { scan-tree-dump "Inlining foo/0 into main/2" "einline" } } */
+/* { dg-final { scan-tree-dump "Inlining foo/1 into main/3" "einline" } } */