+2014-07-24 Martin Liska <mliska@suse.cz>
+
+ * cgraph.h (symtab_node):
+ (void register_symbol (void)): created from symtab_register_node
+ (void remove (void)): created from symtab_remove_node
+ (void dump (FILE *f)): created from dump_symtab_node
+ (void DEBUG_FUNCTION debug (void)): created from debug_symtab_node
+ (void DEBUG_FUNCTION verify (void)): created from verify_symtab_node
+ (struct ipa_ref *add_reference (symtab_node *referred_node,
+ enum ipa_ref_use use_type)): created from add_reference
+ (struct ipa_ref *add_reference (symtab_node *referred_node,
+ enum ipa_ref_use use_type, gimple stmt)): created from add_reference
+ (struct ipa_ref *maybe_add_reference (tree val, enum ipa_ref_use use_type,
+ gimple stmt)): created from maybe_add_reference
+ (bool semantically_equivalent_p (symtab_node *target)): created from
+ symtab_semantically_equivalent_p
+ (void remove_from_same_comdat_group (void)): created from
+ remove_from_same_comdat_group
+ (void add_to_same_comdat_group (symtab_node *old_node)): created from
+ symtab_add_to_same_comdat_group
+ (void dissolve_same_comdat_group_list (void)): created from
+ symtab_dissolve_same_comdat_group_list
+ (bool used_from_object_file_p (void)): created from symtab_used_from_object_file_p
+ (symtab_node *ultimate_alias_target (enum availability *avail = NULL)):
+ created from symtab_alias_ultimate_target
+ (inline symtab_node *next_defined_symbol (void)): created from
+ symtab_next_defined_symbol
+ (bool resolve_alias (symtab_node *target)): created from
+ symtab_resolve_alias
+ (bool call_for_symbol_and_aliases (bool (*callback) (symtab_node *, void *),
+ void *data, bool include_overwrite)): created from symtab_for_node_and_aliases
+ (symtab_node *noninterposable_alias (void)): created from symtab_nonoverwritable_alias
+ (inline symtab_node *get_alias_target (void)): created from symtab_alias_target
+ (void set_section (const char *section)): created from set_section_1
+ (enum availability get_availability (void)): created from symtab_node_availability
+ (void make_decl_local (void)): created from symtab_make_decl_local
+ (bool real_symbol_p (void)): created from symtab_read_node
+ (can_be_discarded_p (void)): created from symtab_can_be_discarded
+ (inline bool comdat_local_p (void)): created from symtab_comdat_local_p
+ (inline bool in_same_comdat_group_p (symtab_node *target)): created from
+ symtab_in_same_comdat_p;
+ (bool address_taken_from_non_vtable_p (void)): created from
+ address_taken_from_non_vtable_p
+ (static inline symtab_node *get (const_tree decl)): created from symtab_get_node
+ (static void dump_table (FILE *)): created from dump_symtab
+ (static inline DEBUG_FUNCTION void debug_symtab (void)): created from debug_symtab
+ (static DEBUG_FUNCTION void verify_symtab_nodes (void)): created from verify_symtab
+ (static bool used_from_object_file_p_worker (symtab_node *node)): created from
+ symtab_used_from_object_file_p
+ (void dump_base (FILE *)): created from dump_symtab_base
+ (bool DEBUG_FUNCTION verify_base (void)): created from verify_symtab_base
+ (void unregister (void)): created from symtab_unregister_node
+ (struct symbol_priority_map *priority_info (void)): created from symtab_priority_info
+ (static bool set_implicit_section (symtab_node *n, void *)): created from set_implicit_section
+ (static bool noninterposable_alias (symtab_node *node, void *data)): created from
+ symtab_nonoverwritable_alias_1
+ * cgraph.h (cgraph_node):
+ (bool remove_symbol_and_inline_clones (cgraph_node *forbidden_node = NULL)):
+ created from cgraph_remove_node_and_inline_clones
+ (void record_stmt_references (gimple stmt)): created from ipa_record_stmt_references
+ (void set_call_stmt_including_clones (gimple old_stmt, gimple new_stmt,
+ bool update_speculative = true)): created from cgraph_set_call_stmt_including_clones
+ (cgraph_node *function_symbol (enum availability *avail = NULL)):
+ created from cgraph_function_node
+ (cgraph_node *create_clone (tree decl, gcov_type count, int freq, bool update_original,
+ vec<cgraph_edge *> redirect_callers, bool call_duplication_hook,
+ struct cgraph_node *new_inlined_to, bitmap args_to_skip)):
+ created from cgraph_create_clone
+ (cgraph_node *create_virtual_clone (vec<cgraph_edge *> redirect_callers,
+ vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip, const char * suffix)):
+ created from cgraph_create_virtual_clone
+ (cgraph_node *find_replacement (void)): created from cgraph_find_replacement_node
+ (cgraph_node *create_version_clone (tree new_decl, vec<cgraph_edge *> redirect_callers,
+ bitmap bbs_to_copy)): created from cgraph_copy_node_for_versioning
+ (cgraph_node *create_version_clone_with_body (vec<cgraph_edge *> redirect_callers,
+ vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip, bool skip_return,
+ bitmap bbs_to_copy, basic_block new_entry_block, const char *clone_name)):
+ created from cgraph_function_version_info
+ (struct cgraph_function_version_info *insert_new_function_version (void)):
+ created from insert_new_cgraph_node_version
+ (struct cgraph_function_version_info *function_version (void)): created from
+ get_cgraph_node_version
+ (void analyze (void)): created from analyze_function
+ (cgraph_node * create_thunk (tree alias, tree, bool this_adjusting,
+ HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value, tree virtual_offset,
+ tree real_alias) cgraph_add_thunk
+ (inline cgraph_node *get_alias_target (void)): created from cgraph_alias_target
+ (cgraph_node *ultimate_alias_target (availability *availability = NULL)):
+ created from cgraph_function_or_thunk_node
+ (bool expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)):
+ created from expand_thunk
+ (void reset (void)): created from cgraph_reset_node
+ (void create_wrapper (cgraph_node *target)): created from cgraph_make_wrapper
+ (void DEBUG_FUNCTION verify_node (void)): created from verify_cgraph_node
+ (void remove (void)): created from cgraph_remove_node
+ (void dump (FILE *f)): created from dump_cgraph_node
+ (void DEBUG_FUNCTION debug (void)): created from debug_cgraph_node
+ (bool get_body (void)): created from cgraph_get_body
+ (void release_body (void)): created from cgraph_release_function_body
+ (void unnest (void)): created from cgraph_unnest_node
+ (void make_local (void)): created from cgraph_make_node_local
+ (void mark_address_taken (void)): created from cgraph_mark_address_taken_node
+ (struct cgraph_edge *create_edge (cgraph_node *callee, gimple call_stmt,
+ gcov_type count, int freq)): created from cgraph_create_edge
+ (struct cgraph_edge *create_indirect_edge (gimple call_stmt, int ecf_flags,
+ gcov_type count, int freq)): created from cgraph_create_indirect_edge
+ (void create_edge_including_clones (struct cgraph_node *callee, gimple old_stmt,
+ gimple stmt, gcov_type count, int freq, cgraph_inline_failed_t reason)):
+ created from cgraph_create_edge_including_clones
+ (cgraph_edge *get_edge (gimple call_stmt)): created from cgraph_edge
+ (vec<cgraph_edge *> collect_callers (void)): created from collect_callers_of_node
+ (void remove_callers (void)): created from cgraph_node_remove_callers
+ (void remove_callees (void)): created from cgraph_node_remove_callees
+ (enum availability get_availability (void)): created from cgraph_function_body_availability
+ (void set_nothrow_flag (bool nothrow)): created from cgraph_set_nothrow_flag
+ (void set_const_flag (bool readonly, bool looping)): created from cgraph_set_const_flag
+ (void set_pure_flag (bool pure, bool looping)): created from cgraph_set_pure_flag
+ (void call_duplication_hooks (cgraph_node *node2)): created from
+ cgraph_call_node_duplication_hooks
+ (bool call_for_symbol_and_aliases (bool (*callback) (cgraph_node *, void *),
+ void *data, bool include_overwritable)): created from cgraph_for_node_and_aliases
+ (bool call_for_symbol_thunks_and_aliases (bool (*callback) (cgraph_node *node, void *data),
+ void *data, bool include_overwritable)): created from cgraph_for_node_thunks_and_aliases
+ (void call_function_insertion_hooks (void)):
+ created from cgraph_call_function_insertion_hooks
+ (inline void mark_force_output (void)): created from cgraph_mark_force_output_node
+ (bool local_p (void)): created from cgraph_local_node
+ (bool can_be_local_p (void)): created from cgraph_node_can_be_local_p
+ (bool cannot_return_p (void)): created from cgraph_node_cannot_return
+ (bool only_called_directly_p (void)): created from cgraph_only_called_directly_p
+ (inline bool only_called_directly_or_aliased_p (void)):
+ created from cgraph_only_called_directly_or_aliased_p
+ (bool will_be_removed_from_program_if_no_direct_calls_p (void)):
+ created from cgraph_will_be_removed_from_program_if_no_direct_calls
+ (bool can_remove_if_no_direct_calls_and_refs_p (void)):
+ created from cgraph_can_remove_if_no_direct_calls_and_refs_p
+ (bool can_remove_if_no_direct_calls_p (void)):
+ created from cgraph_can_remove_if_no_direct_calls_p
+ (inline bool has_gimple_body_p (void)):
+ created from cgraph_function_with_gimple_body_p
+ (bool optimize_for_size_p (void)): created from cgraph_optimize_for_size_p
+ (static void dump_cgraph (FILE *f)): created from dump_cgraph
+ (static inline void debug_cgraph (void)): created from debug_cgraph
+ (static void record_function_versions (tree decl1, tree decl2)):
+ created from record_function_versions
+ (static void delete_function_version (tree decl)):
+ created from delete_function_version
+ (static void add_new_function (tree fndecl, bool lowered)):
+ created from cgraph_add_new_function
+ (static inline cgraph_node *get (const_tree decl)): created from cgraph_get_node
+ (static cgraph_node * create (tree decl)): created from cgraph_create_node
+ (static cgraph_node * create_empty (void)): created from cgraph_create_empty_node
+ (static cgraph_node * get_create (tree)): created from cgraph_get_create_node
+ (static cgraph_node *get_for_asmname (tree asmname)):
+ created from cgraph_node_for_asm
+ (static cgraph_node * create_same_body_alias (tree alias, tree decl)):
+ created from cgraph_same_body_alias
+ (static bool used_from_object_file_p_worker (cgraph_node *node,
+ void *): new function
+ (static bool non_local_p (cgraph_node *node, void *)):
+ created from cgraph_non_local_node_p_1
+ (static void DEBUG_FUNCTION verify_cgraph_nodes (void)):
+ created from verify_cgraph
+ (static bool make_local (cgraph_node *node, void *)):
+ created from cgraph_make_node_local
+ (static cgraph_node *create_alias (tree alias, tree target)):
+ created from cgraph_create_function_alias
+ (static cgraph_edge * create_edge (cgraph_node *caller, cgraph_node *callee,
+ gimple call_stmt, gcov_type count, int freq, bool indir_unknown_callee)):
+ created from cgraph_create_edge_1
+ * cgraph.h (varpool_node):
+ (void remove (void)): created from varpool_remove_node
+ (void dump (FILE *f)): created from dump_varpool_node
+
2014-07-24 Richard Biener <rguenther@suse.de>
PR ipa/61823
to be an array of such vars, putting padding in there
breaks this assumption. */
|| (DECL_SECTION_NAME (decl) != NULL
- && !symtab_get_node (decl)->implicit_section)
+ && !symtab_node::get (decl)->implicit_section)
|| DECL_SIZE (decl) == 0
|| ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
|| !valid_constant_size_p (DECL_SIZE_UNIT (decl))
}
/* Dump all nested functions now. */
- cgn = cgraph_get_create_node (fndecl);
+ cgn = cgraph_node::get_create (fndecl);
for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
c_genericize (cgn->decl);
}
gcc_assert (cfun->decl == outer);
push_cfun (f);
- cgraph_create_node (fndecl);
+ cgraph_node::create (fndecl);
pop_cfun_to (outer);
}
if (TREE_CODE (newdecl) == FUNCTION_DECL
|| TREE_CODE (newdecl) == VAR_DECL)
{
- struct symtab_node *snode = symtab_get_node (newdecl);
+ struct symtab_node *snode = symtab_node::get (newdecl);
if (snode)
- symtab_remove_node (snode);
+ snode->remove ();
}
ggc_free (newdecl);
return true;
This should be cleaned up later and this conditional removed. */
if (cgraph_global_info_ready)
{
- cgraph_add_new_function (fndecl, false);
+ cgraph_node::add_new_function (fndecl, false);
return;
}
cgraph_finalize_function (fndecl, false);
/* Register this function with cgraph just far enough to get it
added to our parent's nested function list. Handy, since the
C front end doesn't have such a list. */
- (void) cgraph_get_create_node (fndecl);
+ (void) cgraph_node::get_create (fndecl);
}
}
/* FIXME: Only for PROP_loops, but cgraph shouldn't have to know about this. */
#include "tree-pass.h"
-static void cgraph_node_remove_callers (struct cgraph_node *node);
static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
/* Get the cgraph_function_version_info node corresponding to node. */
struct cgraph_function_version_info *
-get_cgraph_node_version (struct cgraph_node *node)
+cgraph_node::function_version (void)
{
struct cgraph_function_version_info *ret;
struct cgraph_function_version_info key;
- key.this_node = node;
+ key.this_node = this;
if (cgraph_fnver_htab == NULL)
return NULL;
/* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
corresponding to cgraph_node NODE. */
struct cgraph_function_version_info *
-insert_new_cgraph_node_version (struct cgraph_node *node)
+cgraph_node::insert_new_function_version (void)
{
void **slot;
version_info_node = NULL;
version_info_node = ggc_cleared_alloc<cgraph_function_version_info> ();
- version_info_node->this_node = node;
+ version_info_node->this_node = this;
if (cgraph_fnver_htab == NULL)
cgraph_fnver_htab = htab_create_ggc (2, cgraph_fnver_htab_hash,
/* Remove the cgraph_function_version_info and cgraph_node for DECL. This
DECL is a duplicate declaration. */
void
-delete_function_version (tree decl)
+cgraph_node::delete_function_version (tree decl)
{
- struct cgraph_node *decl_node = cgraph_get_node (decl);
+ struct cgraph_node *decl_node = cgraph_node::get (decl);
struct cgraph_function_version_info *decl_v = NULL;
if (decl_node == NULL)
return;
- decl_v = get_cgraph_node_version (decl_node);
+ decl_v = decl_node->function_version ();
if (decl_v == NULL)
return;
if (cgraph_fnver_htab != NULL)
htab_remove_elt (cgraph_fnver_htab, decl_v);
- cgraph_remove_node (decl_node);
+ decl_node->remove ();
}
/* Record that DECL1 and DECL2 are semantically identical function
versions. */
void
-record_function_versions (tree decl1, tree decl2)
+cgraph_node::record_function_versions (tree decl1, tree decl2)
{
- struct cgraph_node *decl1_node = cgraph_get_create_node (decl1);
- struct cgraph_node *decl2_node = cgraph_get_create_node (decl2);
+ struct cgraph_node *decl1_node = cgraph_node::get_create (decl1);
+ struct cgraph_node *decl2_node = cgraph_node::get_create (decl2);
struct cgraph_function_version_info *decl1_v = NULL;
struct cgraph_function_version_info *decl2_v = NULL;
struct cgraph_function_version_info *before;
struct cgraph_function_version_info *after;
gcc_assert (decl1_node != NULL && decl2_node != NULL);
- decl1_v = get_cgraph_node_version (decl1_node);
- decl2_v = get_cgraph_node_version (decl2_node);
+ decl1_v = decl1_node->function_version ();
+ decl2_v = decl2_node->function_version ();
if (decl1_v != NULL && decl2_v != NULL)
return;
if (decl1_v == NULL)
- decl1_v = insert_new_cgraph_node_version (decl1_node);
+ decl1_v = decl1_node->insert_new_function_version ();
if (decl2_v == NULL)
- decl2_v = insert_new_cgraph_node_version (decl2_node);
+ decl2_v = decl2_node->insert_new_function_version ();
/* Chain decl2_v and decl1_v. All semantically identical versions
will be chained together. */
/* Macros to access the next item in the list of free cgraph nodes and
edges. */
-#define NEXT_FREE_NODE(NODE) cgraph ((NODE)->next)
+#define NEXT_FREE_NODE(NODE) dyn_cast<cgraph_node *> ((NODE)->next)
#define SET_NEXT_FREE_NODE(NODE,NODE2) ((NODE))->next = NODE2
#define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
/* Call all node insertion hooks. */
void
-cgraph_call_function_insertion_hooks (struct cgraph_node *node)
+cgraph_node::call_function_insertion_hooks (void)
{
struct cgraph_node_hook_list *entry = first_cgraph_function_insertion_hook;
while (entry)
{
- entry->hook (node, entry->data);
+ entry->hook (this, entry->data);
entry = entry->next;
}
}
/* Call all node duplication hooks. */
void
-cgraph_call_node_duplication_hooks (struct cgraph_node *node1,
- struct cgraph_node *node2)
+cgraph_node::call_duplication_hooks (struct cgraph_node *node2)
{
struct cgraph_2node_hook_list *entry = first_cgraph_node_duplicated_hook;
while (entry)
{
- entry->hook (node1, node2, entry->data);
+ entry->hook (this, node2, entry->data);
entry = entry->next;
}
}
/* Allocate new callgraph node and insert it into basic data structures. */
-struct cgraph_node *
-cgraph_create_empty_node (void)
+cgraph_node *
+cgraph_node::create_empty (void)
{
struct cgraph_node *node = cgraph_allocate_node ();
/* Return cgraph node assigned to DECL. Create new one when needed. */
-struct cgraph_node *
-cgraph_create_node (tree decl)
+cgraph_node *
+cgraph_node::create (tree decl)
{
- struct cgraph_node *node = cgraph_create_empty_node ();
+ struct cgraph_node *node = cgraph_node::create_empty ();
gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
node->decl = decl;
- symtab_register_node (node);
+ node->register_symbol ();
if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
{
- node->origin = cgraph_get_create_node (DECL_CONTEXT (decl));
+ node->origin = cgraph_node::get_create (DECL_CONTEXT (decl));
node->next_nested = node->origin->nested;
node->origin->nested = node;
}
/* Try to find a call graph node for declaration DECL and if it does not exist
or if it corresponds to an inline clone, create a new one. */
-struct cgraph_node *
-cgraph_get_create_node (tree decl)
+cgraph_node *
+cgraph_node::get_create (tree decl)
{
- struct cgraph_node *first_clone = cgraph_get_node (decl);
+ struct cgraph_node *first_clone = cgraph_node::get (decl);
if (first_clone && !first_clone->global.inlined_to)
return first_clone;
- struct cgraph_node *node = cgraph_create_node (decl);
+ struct cgraph_node *node = cgraph_node::create (decl);
if (first_clone)
{
first_clone->clone_of = node;
/* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
the function body is associated with (not necessarily cgraph_node (DECL). */
-struct cgraph_node *
-cgraph_create_function_alias (tree alias, tree target)
+cgraph_node *
+cgraph_node::create_alias (tree alias, tree target)
{
- struct cgraph_node *alias_node;
+ cgraph_node *alias_node;
gcc_assert (TREE_CODE (target) == FUNCTION_DECL
|| TREE_CODE (target) == IDENTIFIER_NODE);
gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
- alias_node = cgraph_get_create_node (alias);
+ alias_node = cgraph_node::get_create (alias);
gcc_assert (!alias_node->definition);
alias_node->alias_target = target;
alias_node->definition = true;
/* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
and NULL otherwise.
Same body aliases are output whenever the body of DECL is output,
- and cgraph_get_node (ALIAS) transparently returns cgraph_get_node (DECL). */
+ and cgraph_node::get (ALIAS) transparently returns
+ cgraph_node::get (DECL). */
struct cgraph_node *
-cgraph_same_body_alias (struct cgraph_node *decl_node ATTRIBUTE_UNUSED, tree alias, tree decl)
+cgraph_node::create_same_body_alias (tree alias, tree decl)
{
struct cgraph_node *n;
#ifndef ASM_OUTPUT_DEF
if (cgraph_global_info_ready)
return NULL;
- n = cgraph_create_function_alias (alias, decl);
+ n = cgraph_node::create_alias (alias, decl);
n->cpp_implicit_alias = true;
if (cpp_implicit_aliases_done)
- symtab_resolve_alias (n,
- cgraph_get_node (decl));
+ n->resolve_alias (cgraph_node::get (decl));
return n;
}
See comments in thunk_adjust for detail on the parameters. */
struct cgraph_node *
-cgraph_add_thunk (struct cgraph_node *decl_node ATTRIBUTE_UNUSED,
- tree alias, tree decl ATTRIBUTE_UNUSED,
- bool this_adjusting,
- HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value,
- tree virtual_offset,
- tree real_alias)
+cgraph_node::create_thunk (tree alias, tree, bool this_adjusting,
+ HOST_WIDE_INT fixed_offset,
+ HOST_WIDE_INT virtual_value,
+ tree virtual_offset,
+ tree real_alias)
{
struct cgraph_node *node;
- node = cgraph_get_node (alias);
+ node = cgraph_node::get (alias);
if (node)
- cgraph_reset_node (node);
+ node->reset ();
else
- node = cgraph_create_node (alias);
+ node = cgraph_node::create (alias);
gcc_checking_assert (!virtual_offset
|| wi::eq_p (virtual_offset, virtual_value));
node->thunk.fixed_offset = fixed_offset;
/* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
Return NULL if there's no such node. */
-struct cgraph_node *
-cgraph_node_for_asm (tree asmname)
+cgraph_node *
+cgraph_node::get_for_asmname (tree asmname)
{
/* We do not want to look at inline clones. */
for (symtab_node *node = symtab_node_for_asm (asmname);
/* Return the callgraph edge representing the GIMPLE_CALL statement
CALL_STMT. */
-struct cgraph_edge *
-cgraph_edge (struct cgraph_node *node, gimple call_stmt)
+cgraph_edge *
+cgraph_node::get_edge (gimple call_stmt)
{
struct cgraph_edge *e, *e2;
int n = 0;
- if (node->call_site_hash)
+ if (call_site_hash)
return (struct cgraph_edge *)
- htab_find_with_hash (node->call_site_hash, call_stmt,
+ htab_find_with_hash (call_site_hash, call_stmt,
htab_hash_pointer (call_stmt));
/* This loop may turn out to be performance problem. In such case adding
solution. It is not good idea to add pointer into CALL_EXPR itself
because we want to make possible having multiple cgraph nodes representing
different clones of the same body before the body is actually cloned. */
- for (e = node->callees; e; e = e->next_callee)
+ for (e = callees; e; e = e->next_callee)
{
if (e->call_stmt == call_stmt)
break;
}
if (!e)
- for (e = node->indirect_calls; e; e = e->next_callee)
+ for (e = indirect_calls; e; e = e->next_callee)
{
if (e->call_stmt == call_stmt)
break;
if (n > 100)
{
- node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
- for (e2 = node->callees; e2; e2 = e2->next_callee)
+ call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
+ for (e2 = callees; e2; e2 = e2->next_callee)
cgraph_add_edge_to_call_site_hash (e2);
- for (e2 = node->indirect_calls; e2; e2 = e2->next_callee)
+ for (e2 = indirect_calls; e2; e2 = e2->next_callee)
cgraph_add_edge_to_call_site_hash (e2);
}
{
/* Constant propagation (and possibly also inlining?) can turn an
indirect call into a direct one. */
- struct cgraph_node *new_callee = cgraph_get_node (decl);
+ struct cgraph_node *new_callee = cgraph_node::get (decl);
gcc_checking_assert (new_callee);
e = cgraph_make_edge_direct (e, new_callee);
parameters of which only CALLEE can be NULL (when creating an indirect call
edge). */
-static struct cgraph_edge *
-cgraph_create_edge_1 (struct cgraph_node *caller, struct cgraph_node *callee,
- gimple call_stmt, gcov_type count, int freq,
- bool indir_unknown_callee)
+cgraph_edge *
+cgraph_node::create_edge (cgraph_node *caller, cgraph_node *callee,
+ gimple call_stmt, gcov_type count, int freq,
+ bool indir_unknown_callee)
{
- struct cgraph_edge *edge;
+ cgraph_edge *edge;
/* LTO does not actually have access to the call_stmt since these
have not been loaded yet. */
construction of call stmt hashtable. */
#ifdef ENABLE_CHECKING
struct cgraph_edge *e;
- gcc_checking_assert (!(e=cgraph_edge (caller, call_stmt)) || e->speculative);
+ gcc_checking_assert (
+ !(e = caller->get_edge (call_stmt)) || e->speculative);
#endif
gcc_assert (is_gimple_call (call_stmt));
return edge;
}
-/* Create edge from CALLER to CALLEE in the cgraph. */
+/* Create edge from a given function to CALLEE in the cgraph. */
struct cgraph_edge *
-cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
- gimple call_stmt, gcov_type count, int freq)
+cgraph_node::create_edge (struct cgraph_node *callee,
+ gimple call_stmt, gcov_type count, int freq)
{
- struct cgraph_edge *edge = cgraph_create_edge_1 (caller, callee, call_stmt,
- count, freq, false);
+ cgraph_edge *edge = cgraph_node::create_edge (this, callee, call_stmt,
+ count, freq, false);
initialize_inline_failed (edge);
edge->next_caller = callee->callers;
if (callee->callers)
callee->callers->prev_caller = edge;
- edge->next_callee = caller->callees;
- if (caller->callees)
- caller->callees->prev_callee = edge;
- caller->callees = edge;
+ edge->next_callee = callees;
+ if (callees)
+ callees->prev_callee = edge;
+ callees = edge;
callee->callers = edge;
return edge;
PARAM_INDEX. */
struct cgraph_edge *
-cgraph_create_indirect_edge (struct cgraph_node *caller, gimple call_stmt,
- int ecf_flags,
- gcov_type count, int freq)
+cgraph_node::create_indirect_edge (gimple call_stmt, int ecf_flags,
+ gcov_type count, int freq)
{
- struct cgraph_edge *edge = cgraph_create_edge_1 (caller, NULL, call_stmt,
- count, freq, true);
+ struct cgraph_edge *edge = cgraph_node::create_edge (this, NULL, call_stmt,
+ count, freq, true);
tree target;
initialize_inline_failed (edge);
HOST_WIDE_INT otr_token;
ipa_polymorphic_call_context context;
- get_polymorphic_call_info (caller->decl,
+ get_polymorphic_call_info (decl,
target,
&otr_type, &otr_token,
&context, call_stmt);
edge->indirect_info->maybe_derived_type = context.maybe_derived_type;
}
- edge->next_callee = caller->indirect_calls;
- if (caller->indirect_calls)
- caller->indirect_calls->prev_callee = edge;
- caller->indirect_calls = edge;
+ edge->next_callee = indirect_calls;
+ if (indirect_calls)
+ indirect_calls->prev_callee = edge;
+ indirect_calls = edge;
return edge;
}
xstrdup (n2->name ()), n2->order);
}
e->speculative = true;
- e2 = cgraph_create_edge (n, n2, e->call_stmt, direct_count, direct_frequency);
+ e2 = n->create_edge (n2, e->call_stmt, direct_count, direct_frequency);
initialize_inline_failed (e2);
e2->speculative = true;
if (TREE_NOTHROW (n2->decl))
ref = n->add_reference (n2, IPA_REF_ADDR, e->call_stmt);
ref->lto_stmt_uid = e->lto_stmt_uid;
ref->speculative = e->speculative;
- cgraph_mark_address_taken_node (n2);
+ n2->mark_address_taken ();
return e2;
}
/* We can take advantage of the call stmt hash. */
if (e2->call_stmt)
{
- e = cgraph_edge (e->caller, e2->call_stmt);
+ e = e->caller->get_edge (e2->call_stmt);
gcc_assert (e->speculative && !e->indirect_unknown_callee);
}
else
gcc_assert (edge->speculative);
cgraph_speculative_call_info (edge, e2, edge, ref);
if (!callee_decl
- || !symtab_semantically_equivalent_p (ref->referred,
- symtab_get_node (callee_decl)))
+ || !ref->referred->semantically_equivalent_p
+ (symtab_node::get (callee_decl)))
{
if (dump_file)
{
if (e2->indirect_unknown_callee || e2->inline_failed)
cgraph_remove_edge (e2);
else
- cgraph_remove_node_and_inline_clones (e2->callee, NULL);
+ e2->callee->remove_symbol_and_inline_clones ();
if (edge->caller->call_site_hash)
cgraph_update_edge_in_call_site_hash (edge);
return edge;
(int64_t)e->count);
gcc_assert (e2->speculative);
push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
- new_stmt = gimple_ic (e->call_stmt, cgraph (ref->referred),
+ new_stmt = gimple_ic (e->call_stmt, dyn_cast<cgraph_node *> (ref->referred),
e->count || e2->count
? RDIV (e->count * REG_BR_PROB_BASE,
e->count + e2->count)
: REG_BR_PROB_BASE / 2,
e->count, e->count + e2->count);
e->speculative = false;
- cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt,
- new_stmt, false);
+ e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt,
+ false);
e->frequency = compute_call_stmt_bb_frequency
(e->caller->decl, gimple_bb (e->call_stmt));
e2->frequency = compute_call_stmt_bb_frequency
#ifdef ENABLE_CHECKING
if (decl)
{
- node = cgraph_get_node (decl);
+ node = cgraph_node::get (decl);
gcc_assert (!node || !node->clone.combined_args_to_skip);
}
#endif
update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
}
- cgraph_set_call_stmt_including_clones (e->caller, e->call_stmt, new_stmt, false);
+ e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt, false);
if (cgraph_dump_file)
{
into different builtin. */
if (old_call != new_call)
{
- struct cgraph_edge *e = cgraph_edge (node, old_stmt);
+ struct cgraph_edge *e = node->get_edge (old_stmt);
struct cgraph_edge *ne = NULL;
gcov_type count;
int frequency;
if (e->indirect_unknown_callee || e->inline_failed)
cgraph_remove_edge (e);
else
- cgraph_remove_node_and_inline_clones (e->callee, NULL);
+ e->callee->remove_symbol_and_inline_clones ();
}
else if (new_call)
{
if (new_call)
{
- ne = cgraph_create_edge (node, cgraph_get_create_node (new_call),
- new_stmt, count, frequency);
+ ne = node->create_edge (cgraph_node::get_create (new_call),
+ new_stmt, count, frequency);
gcc_assert (ne->inline_failed);
}
}
/* We only updated the call stmt; update pointer in cgraph edge.. */
else if (old_stmt != new_stmt)
- cgraph_set_call_stmt (cgraph_edge (node, old_stmt), new_stmt);
+ cgraph_set_call_stmt (node->get_edge (old_stmt), new_stmt);
}
/* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
void
cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
{
- struct cgraph_node *orig = cgraph_get_node (cfun->decl);
+ struct cgraph_node *orig = cgraph_node::get (cfun->decl);
struct cgraph_node *node;
gcc_checking_assert (orig);
/* Remove all callees from the node. */
void
-cgraph_node_remove_callees (struct cgraph_node *node)
+cgraph_node::remove_callees (void)
{
struct cgraph_edge *e, *f;
/* It is sufficient to remove the edges from the lists of callers of
the callees. The callee list of the node can be zapped with one
assignment. */
- for (e = node->callees; e; e = f)
+ for (e = callees; e; e = f)
{
f = e->next_callee;
cgraph_call_edge_removal_hooks (e);
cgraph_edge_remove_callee (e);
cgraph_free_edge (e);
}
- for (e = node->indirect_calls; e; e = f)
+ for (e = indirect_calls; e; e = f)
{
f = e->next_callee;
cgraph_call_edge_removal_hooks (e);
cgraph_edge_remove_callee (e);
cgraph_free_edge (e);
}
- node->indirect_calls = NULL;
- node->callees = NULL;
- if (node->call_site_hash)
+ indirect_calls = NULL;
+ callees = NULL;
+ if (call_site_hash)
{
- htab_delete (node->call_site_hash);
- node->call_site_hash = NULL;
+ htab_delete (call_site_hash);
+ call_site_hash = NULL;
}
}
/* Remove all callers from the node. */
-static void
-cgraph_node_remove_callers (struct cgraph_node *node)
+void
+cgraph_node::remove_callers (void)
{
struct cgraph_edge *e, *f;
/* It is sufficient to remove the edges from the lists of callees of
the callers. The caller list of the node can be zapped with one
assignment. */
- for (e = node->callers; e; e = f)
+ for (e = callers; e; e = f)
{
f = e->next_caller;
cgraph_call_edge_removal_hooks (e);
cgraph_edge_remove_caller (e);
cgraph_free_edge (e);
}
- node->callers = NULL;
+ callers = NULL;
}
/* Helper function for cgraph_release_function_body and free_lang_data.
DECL_SAVED_TREE (decl) = NULL;
}
-/* Release memory used to represent body of function NODE.
+/* Release memory used to represent body of function.
Use this only for functions that are released before being translated to
target code (i.e. RTL). Functions that are compiled to RTL and beyond
are free'd in final.c via free_after_compilation(). */
void
-cgraph_release_function_body (struct cgraph_node *node)
+cgraph_node::release_body (void)
{
- node->ipa_transforms_to_apply.release ();
- if (!node->used_as_abstract_origin && cgraph_state != CGRAPH_STATE_PARSING)
+ ipa_transforms_to_apply.release ();
+ if (!used_as_abstract_origin && cgraph_state != CGRAPH_STATE_PARSING)
{
- DECL_RESULT (node->decl) = NULL;
- DECL_ARGUMENTS (node->decl) = NULL;
+ DECL_RESULT (decl) = NULL;
+ DECL_ARGUMENTS (decl) = NULL;
}
/* If the node is abstract and needed, then do not clear DECL_INITIAL
of its associated function function declaration because it's
needed to emit debug info later. */
- if (!node->used_as_abstract_origin && DECL_INITIAL (node->decl))
- DECL_INITIAL (node->decl) = error_mark_node;
- release_function_body (node->decl);
- if (node->lto_file_data)
- lto_free_function_in_decl_state_for_node (node);
+ if (!used_as_abstract_origin && DECL_INITIAL (decl))
+ DECL_INITIAL (decl) = error_mark_node;
+ release_function_body (decl);
+ if (lto_file_data)
+ lto_free_function_in_decl_state_for_node (this);
}
-/* Remove the node from cgraph. */
+/* Remove function from symbol table. */
void
-cgraph_remove_node (struct cgraph_node *node)
+cgraph_node::remove (void)
{
struct cgraph_node *n;
- int uid = node->uid;
+ int uid = this->uid;
- cgraph_call_node_removal_hooks (node);
- cgraph_node_remove_callers (node);
- cgraph_node_remove_callees (node);
- node->ipa_transforms_to_apply.release ();
+ cgraph_call_node_removal_hooks (this);
+ remove_callers ();
+ remove_callees ();
+ ipa_transforms_to_apply.release ();
/* Incremental inlining access removed nodes stored in the postorder list.
*/
- node->force_output = false;
- node->forced_by_abi = false;
- for (n = node->nested; n; n = n->next_nested)
+ force_output = false;
+ forced_by_abi = false;
+ for (n = nested; n; n = n->next_nested)
n->origin = NULL;
- node->nested = NULL;
- if (node->origin)
+ nested = NULL;
+ if (origin)
{
- struct cgraph_node **node2 = &node->origin->nested;
+ struct cgraph_node **node2 = &origin->nested;
- while (*node2 != node)
+ while (*node2 != this)
node2 = &(*node2)->next_nested;
- *node2 = node->next_nested;
+ *node2 = next_nested;
}
- symtab_unregister_node (node);
- if (node->prev_sibling_clone)
- node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
- else if (node->clone_of)
- node->clone_of->clones = node->next_sibling_clone;
- if (node->next_sibling_clone)
- node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
- if (node->clones)
+ unregister ();
+ if (prev_sibling_clone)
+ prev_sibling_clone->next_sibling_clone = next_sibling_clone;
+ else if (clone_of)
+ clone_of->clones = next_sibling_clone;
+ if (next_sibling_clone)
+ next_sibling_clone->prev_sibling_clone = prev_sibling_clone;
+ if (clones)
{
struct cgraph_node *n, *next;
- if (node->clone_of)
+ if (clone_of)
{
- for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
- n->clone_of = node->clone_of;
- n->clone_of = node->clone_of;
- n->next_sibling_clone = node->clone_of->clones;
- if (node->clone_of->clones)
- node->clone_of->clones->prev_sibling_clone = n;
- node->clone_of->clones = node->clones;
+ for (n = clones; n->next_sibling_clone; n = n->next_sibling_clone)
+ n->clone_of = clone_of;
+ n->clone_of = clone_of;
+ n->next_sibling_clone = clone_of->clones;
+ if (clone_of->clones)
+ clone_of->clones->prev_sibling_clone = n;
+ clone_of->clones = clones;
}
else
{
tree intact. This can happen in unreachable function removal since
we remove unreachable functions in random order, not by bottom-up
walk of clone trees. */
- for (n = node->clones; n; n = next)
+ for (n = clones; n; n = next)
{
next = n->next_sibling_clone;
n->next_sibling_clone = NULL;
*/
if (cgraph_state != CGRAPH_LTO_STREAMING)
{
- n = cgraph_get_node (node->decl);
+ n = cgraph_node::get (decl);
if (!n
|| (!n->clones && !n->clone_of && !n->global.inlined_to
&& (cgraph_global_info_ready
|| DECL_EXTERNAL (n->decl)
|| !n->analyzed
|| (!flag_wpa && n->in_other_partition)))))
- cgraph_release_function_body (node);
+ release_body ();
}
- node->decl = NULL;
- if (node->call_site_hash)
+ decl = NULL;
+ if (call_site_hash)
{
- htab_delete (node->call_site_hash);
- node->call_site_hash = NULL;
+ htab_delete (call_site_hash);
+ call_site_hash = NULL;
}
cgraph_n_nodes--;
/* Clear out the node to NULL all pointers and add the node to the free
list. */
- memset (node, 0, sizeof (*node));
- node->type = SYMTAB_FUNCTION;
- node->uid = uid;
- SET_NEXT_FREE_NODE (node, free_nodes);
- free_nodes = node;
+ memset (this, 0, sizeof (*this));
+ type = SYMTAB_FUNCTION;
+ this->uid = uid;
+ SET_NEXT_FREE_NODE (this, free_nodes);
+ free_nodes = this;
}
/* Likewise indicate that a node is having address taken. */
void
-cgraph_mark_address_taken_node (struct cgraph_node *node)
+cgraph_node::mark_address_taken (void)
{
/* Indirect inlining can figure out that all uses of the address are
inlined. */
- if (node->global.inlined_to)
+ if (global.inlined_to)
{
gcc_assert (cfun->after_inlining);
- gcc_assert (node->callers->indirect_inlining_edge);
+ gcc_assert (callers->indirect_inlining_edge);
return;
}
/* FIXME: address_taken flag is used both as a shortcut for testing whether
of the object was taken (and thus it should be set on node alias is
referring to). We should remove the first use and the remove the
following set. */
- node->address_taken = 1;
- node = cgraph_function_or_thunk_node (node, NULL);
+ address_taken = 1;
+ cgraph_node *node = ultimate_alias_target ();
node->address_taken = 1;
}
struct cgraph_node *node;
gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
- node = cgraph_get_node (decl);
+ node = cgraph_node::get (decl);
if (!node)
return NULL;
return &node->local;
struct cgraph_node *node;
gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
- node = cgraph_get_node (decl);
+ node = cgraph_node::get (decl);
if (!node)
return NULL;
return &node->global;
struct cgraph_node *node;
gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
- node = cgraph_get_node (decl);
+ node = cgraph_node::get (decl);
if (!node
|| (decl != current_function_decl
&& !TREE_ASM_WRITTEN (node->decl)))
{"unset", "not_available", "overwritable", "available", "local"};
-/* Dump call graph node NODE to file F. */
+/* Dump call graph node to file F. */
void
-dump_cgraph_node (FILE *f, struct cgraph_node *node)
+cgraph_node::dump (FILE *f)
{
struct cgraph_edge *edge;
int indirect_calls_count = 0;
- dump_symtab_base (f, node);
+ dump_base (f);
- if (node->global.inlined_to)
+ if (global.inlined_to)
fprintf (f, " Function %s/%i is inline copy in %s/%i\n",
- xstrdup (node->name ()),
- node->order,
- xstrdup (node->global.inlined_to->name ()),
- node->global.inlined_to->order);
- if (node->clone_of)
+ xstrdup (name ()),
+ order,
+ xstrdup (global.inlined_to->name ()),
+ global.inlined_to->order);
+ if (clone_of)
fprintf (f, " Clone of %s/%i\n",
- node->clone_of->asm_name (),
- node->clone_of->order);
+ clone_of->asm_name (),
+ clone_of->order);
if (cgraph_function_flags_ready)
fprintf (f, " Availability: %s\n",
- cgraph_availability_names [cgraph_function_body_availability (node)]);
+ cgraph_availability_names [get_availability ()]);
- if (node->profile_id)
+ if (profile_id)
fprintf (f, " Profile id: %i\n",
- node->profile_id);
- fprintf (f, " First run: %i\n", node->tp_first_run);
+ profile_id);
+ fprintf (f, " First run: %i\n", tp_first_run);
fprintf (f, " Function flags:");
- if (node->count)
+ if (count)
fprintf (f, " executed %"PRId64"x",
- (int64_t)node->count);
- if (node->origin)
- fprintf (f, " nested in: %s", node->origin->asm_name ());
- if (gimple_has_body_p (node->decl))
+ (int64_t)count);
+ if (origin)
+ fprintf (f, " nested in: %s", origin->asm_name ());
+ if (gimple_has_body_p (decl))
fprintf (f, " body");
- if (node->process)
+ if (process)
fprintf (f, " process");
- if (node->local.local)
+ if (local.local)
fprintf (f, " local");
- if (node->local.redefined_extern_inline)
+ if (local.redefined_extern_inline)
fprintf (f, " redefined_extern_inline");
- if (node->only_called_at_startup)
+ if (only_called_at_startup)
fprintf (f, " only_called_at_startup");
- if (node->only_called_at_exit)
+ if (only_called_at_exit)
fprintf (f, " only_called_at_exit");
- if (node->tm_clone)
+ if (tm_clone)
fprintf (f, " tm_clone");
- if (DECL_STATIC_CONSTRUCTOR (node->decl))
- fprintf (f," static_constructor (priority:%i)", node->get_init_priority ());
- if (DECL_STATIC_DESTRUCTOR (node->decl))
- fprintf (f," static_destructor (priority:%i)", node->get_fini_priority ());
+ if (DECL_STATIC_CONSTRUCTOR (decl))
+ fprintf (f," static_constructor (priority:%i)", get_init_priority ());
+ if (DECL_STATIC_DESTRUCTOR (decl))
+ fprintf (f," static_destructor (priority:%i)", get_fini_priority ());
fprintf (f, "\n");
- if (node->thunk.thunk_p)
+ if (thunk.thunk_p)
{
fprintf (f, " Thunk");
- if (node->thunk.alias)
+ if (thunk.alias)
fprintf (f, " of %s (asm: %s)",
- lang_hooks.decl_printable_name (node->thunk.alias, 2),
- IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->thunk.alias)));
+ lang_hooks.decl_printable_name (thunk.alias, 2),
+ IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
fprintf (f, " fixed offset %i virtual value %i has "
"virtual offset %i)\n",
- (int)node->thunk.fixed_offset,
- (int)node->thunk.virtual_value,
- (int)node->thunk.virtual_offset_p);
+ (int)thunk.fixed_offset,
+ (int)thunk.virtual_value,
+ (int)thunk.virtual_offset_p);
}
- if (node->alias && node->thunk.alias
- && DECL_P (node->thunk.alias))
+ if (alias && thunk.alias
+ && DECL_P (thunk.alias))
{
fprintf (f, " Alias of %s",
- lang_hooks.decl_printable_name (node->thunk.alias, 2));
- if (DECL_ASSEMBLER_NAME_SET_P (node->thunk.alias))
+ lang_hooks.decl_printable_name (thunk.alias, 2));
+ if (DECL_ASSEMBLER_NAME_SET_P (thunk.alias))
fprintf (f, " (asm: %s)",
- IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->thunk.alias)));
+ IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
fprintf (f, "\n");
}
fprintf (f, " Called by: ");
- for (edge = node->callers; edge; edge = edge->next_caller)
+ for (edge = callers; edge; edge = edge->next_caller)
{
fprintf (f, "%s/%i ", edge->caller->asm_name (),
edge->caller->order);
}
fprintf (f, "\n Calls: ");
- for (edge = node->callees; edge; edge = edge->next_callee)
+ for (edge = callees; edge; edge = edge->next_callee)
{
fprintf (f, "%s/%i ", edge->callee->asm_name (),
edge->callee->order);
}
fprintf (f, "\n");
- for (edge = node->indirect_calls; edge; edge = edge->next_callee)
+ for (edge = indirect_calls; edge; edge = edge->next_callee)
indirect_calls_count++;
if (indirect_calls_count)
fprintf (f, " Has %i outgoing edges for indirect calls.\n",
indirect_calls_count);
}
-
/* Dump call graph node NODE to stderr. */
DEBUG_FUNCTION void
-debug_cgraph_node (struct cgraph_node *node)
+cgraph_node::debug (void)
{
- dump_cgraph_node (stderr, node);
+ dump (stderr);
}
-
/* Dump the callgraph to file F. */
void
-dump_cgraph (FILE *f)
+cgraph_node::dump_cgraph (FILE *f)
{
struct cgraph_node *node;
fprintf (f, "callgraph:\n\n");
FOR_EACH_FUNCTION (node)
- dump_cgraph_node (f, node);
-}
-
-
-/* Dump the call graph to stderr. */
-
-DEBUG_FUNCTION void
-debug_cgraph (void)
-{
- dump_cgraph (stderr);
+ node->dump (f);
}
/* Return true when the DECL can possibly be inlined. */
+
bool
cgraph_function_possibly_inlined_p (tree decl)
{
return DECL_POSSIBLY_INLINED (decl);
}
-/* NODE is no longer nested function; update cgraph accordingly. */
+/* cgraph_node is no longer nested function; update cgraph accordingly. */
void
-cgraph_unnest_node (struct cgraph_node *node)
+cgraph_node::unnest (void)
{
- struct cgraph_node **node2 = &node->origin->nested;
- gcc_assert (node->origin);
+ struct cgraph_node **node2 = &origin->nested;
+ gcc_assert (origin);
- while (*node2 != node)
+ while (*node2 != this)
node2 = &(*node2)->next_nested;
- *node2 = node->next_nested;
- node->origin = NULL;
+ *node2 = next_nested;
+ origin = NULL;
}
/* Return function availability. See cgraph.h for description of individual
return values. */
enum availability
-cgraph_function_body_availability (struct cgraph_node *node)
+cgraph_node::get_availability (void)
{
enum availability avail;
- if (!node->analyzed)
+ if (!analyzed)
avail = AVAIL_NOT_AVAILABLE;
- else if (node->local.local)
+ else if (local.local)
avail = AVAIL_LOCAL;
- else if (node->alias && node->weakref)
- cgraph_function_or_thunk_node (node, &avail);
- else if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (node->decl)))
- avail = AVAIL_OVERWRITABLE;
- else if (!node->externally_visible)
+ else if (alias && weakref)
+ ultimate_alias_target (&avail);
+ else if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl)))
+ avail = AVAIL_INTERPOSABLE;
+ else if (!externally_visible)
avail = AVAIL_AVAILABLE;
/* Inline functions are safe to be analyzed even if their symbol can
be overwritten at runtime. It is not meaningful to enforce any sane
behaviour on replacing inline function by different body. */
- else if (DECL_DECLARED_INLINE_P (node->decl))
+ else if (DECL_DECLARED_INLINE_P (decl))
avail = AVAIL_AVAILABLE;
/* If the function can be overwritten, return OVERWRITABLE. Take
AVAIL_AVAILABLE here? That would be good reason to preserve this
bit. */
- else if (decl_replaceable_p (node->decl)
- && !DECL_EXTERNAL (node->decl))
- avail = AVAIL_OVERWRITABLE;
+ else if (decl_replaceable_p (decl) && !DECL_EXTERNAL (decl))
+ avail = AVAIL_INTERPOSABLE;
else avail = AVAIL_AVAILABLE;
return avail;
/* Worker for cgraph_node_can_be_local_p. */
static bool
-cgraph_node_cannot_be_local_p_1 (struct cgraph_node *node,
- void *data ATTRIBUTE_UNUSED)
+cgraph_node_cannot_be_local_p_1 (struct cgraph_node *node, void *)
{
return !(!node->force_output
&& ((DECL_COMDAT (node->decl)
&& !node->forced_by_abi
- && !symtab_used_from_object_file_p (node)
+ && !node->used_from_object_file_p ()
&& !node->same_comdat_group)
|| !node->externally_visible));
}
-/* Return true if NODE can be made local for API change.
+/* Return true if cgraph_node can be made local for API change.
Extern inline functions and C++ COMDAT functions can be made local
at the expense of possible code size growth if function is used in multiple
compilation units. */
bool
-cgraph_node_can_be_local_p (struct cgraph_node *node)
+cgraph_node::can_be_local_p (void)
{
- return (!node->address_taken
- && !cgraph_for_node_and_aliases (node,
- cgraph_node_cannot_be_local_p_1,
- NULL, true));
+ return (!address_taken
+ && !call_for_symbol_thunks_and_aliases (cgraph_node_cannot_be_local_p_1,
+ NULL, true));
}
-/* Call calback on NODE, thunks and aliases associated to NODE.
+/* Call calback on cgraph_node, thunks and aliases associated to cgraph_node.
When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
skipped. */
bool
-cgraph_for_node_thunks_and_aliases (struct cgraph_node *node,
- bool (*callback) (struct cgraph_node *, void *),
- void *data,
- bool include_overwritable)
+cgraph_node::call_for_symbol_thunks_and_aliases (bool (*callback)
+ (cgraph_node *, void *),
+ void *data,
+ bool include_overwritable)
{
struct cgraph_edge *e;
struct ipa_ref *ref;
- if (callback (node, data))
+ if (callback (this, data))
return true;
- for (e = node->callers; e; e = e->next_caller)
+ for (e = callers; e; e = e->next_caller)
if (e->caller->thunk.thunk_p
&& (include_overwritable
- || cgraph_function_body_availability (e->caller) > AVAIL_OVERWRITABLE))
- if (cgraph_for_node_thunks_and_aliases (e->caller, callback, data,
- include_overwritable))
+ || e->caller->get_availability () > AVAIL_INTERPOSABLE))
+ if (e->caller->call_for_symbol_thunks_and_aliases (callback, data,
+ include_overwritable))
return true;
- FOR_EACH_ALIAS (node, ref)
+ FOR_EACH_ALIAS (this, ref)
{
struct cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
if (include_overwritable
- || cgraph_function_body_availability (alias) > AVAIL_OVERWRITABLE)
- if (cgraph_for_node_thunks_and_aliases (alias, callback, data,
- include_overwritable))
+ || alias->get_availability () > AVAIL_INTERPOSABLE)
+ if (alias->call_for_symbol_thunks_and_aliases (callback, data,
+ include_overwritable))
return true;
}
return false;
}
-/* Call calback on NODE and aliases associated to NODE.
+/* Call calback on function and aliases associated to the function.
When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
skipped. */
bool
-cgraph_for_node_and_aliases (struct cgraph_node *node,
- bool (*callback) (struct cgraph_node *, void *),
- void *data,
- bool include_overwritable)
+cgraph_node::call_for_symbol_and_aliases (bool (*callback) (cgraph_node *,
+ void *),
+ void *data,
+ bool include_overwritable)
{
struct ipa_ref *ref;
- if (callback (node, data))
+ if (callback (this, data))
return true;
- FOR_EACH_ALIAS (node, ref)
+ FOR_EACH_ALIAS (this, ref)
{
struct cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
if (include_overwritable
- || cgraph_function_body_availability (alias) > AVAIL_OVERWRITABLE)
- if (cgraph_for_node_and_aliases (alias, callback, data,
- include_overwritable))
+ || alias->get_availability () > AVAIL_INTERPOSABLE)
+ if (alias->call_for_symbol_and_aliases (callback, data,
+ include_overwritable))
return true;
}
return false;
/* Worker to bring NODE local. */
-static bool
-cgraph_make_node_local_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
+bool
+cgraph_node::make_local (struct cgraph_node *node, void *)
{
- gcc_checking_assert (cgraph_node_can_be_local_p (node));
+ gcc_checking_assert (node->can_be_local_p ());
if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
{
- symtab_make_decl_local (node->decl);
-
+ node->make_decl_local ();
node->set_section (NULL);
node->set_comdat_group (NULL);
node->externally_visible = false;
node->unique_name = (node->resolution == LDPR_PREVAILING_DEF_IRONLY
|| node->resolution == LDPR_PREVAILING_DEF_IRONLY_EXP);
node->resolution = LDPR_PREVAILING_DEF_IRONLY;
- gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
+ gcc_assert (node->get_availability () == AVAIL_LOCAL);
}
return false;
}
-/* Bring NODE local. */
+/* Bring cgraph node local. */
void
-cgraph_make_node_local (struct cgraph_node *node)
+cgraph_node::make_local (void)
{
- cgraph_for_node_thunks_and_aliases (node, cgraph_make_node_local_1,
- NULL, true);
+ call_for_symbol_thunks_and_aliases (cgraph_node::make_local, NULL, true);
}
/* Worker to set nothrow flag. */
if any to NOTHROW. */
void
-cgraph_set_nothrow_flag (struct cgraph_node *node, bool nothrow)
+cgraph_node::set_nothrow_flag (bool nothrow)
{
- cgraph_for_node_thunks_and_aliases (node, cgraph_set_nothrow_flag_1,
- (void *)(size_t)nothrow, false);
+ call_for_symbol_thunks_and_aliases (cgraph_set_nothrow_flag_1,
+ (void *)(size_t)nothrow, false);
}
/* Worker to set const flag. */
return false;
}
-/* Set TREE_READONLY on NODE's decl and on aliases of NODE
+/* Set TREE_READONLY on cgraph_node's decl and on aliases of the node
if any to READONLY. */
void
-cgraph_set_const_flag (struct cgraph_node *node, bool readonly, bool looping)
+cgraph_node::set_const_flag (bool readonly, bool looping)
{
- cgraph_for_node_thunks_and_aliases (node, cgraph_set_const_flag_1,
- (void *)(size_t)(readonly + (int)looping * 2),
+ call_for_symbol_thunks_and_aliases (cgraph_set_const_flag_1,
+ (void *)(size_t)(readonly + (int)looping * 2),
false);
}
return false;
}
-/* Set DECL_PURE_P on NODE's decl and on aliases of NODE
+/* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
if any to PURE. */
void
-cgraph_set_pure_flag (struct cgraph_node *node, bool pure, bool looping)
+cgraph_node::set_pure_flag (bool pure, bool looping)
{
- cgraph_for_node_thunks_and_aliases (node, cgraph_set_pure_flag_1,
- (void *)(size_t)(pure + (int)looping * 2),
- false);
+ call_for_symbol_thunks_and_aliases (cgraph_set_pure_flag_1,
+ (void *)(size_t)(pure + (int)looping * 2),
+ false);
}
-/* Return true when NODE can not return or throw and thus
+/* Return true when cgraph_node can not return or throw and thus
it is safe to ignore its side effects for IPA analysis. */
bool
-cgraph_node_cannot_return (struct cgraph_node *node)
+cgraph_node::cannot_return_p (void)
{
- int flags = flags_from_decl_or_type (node->decl);
+ int flags = flags_from_decl_or_type (decl);
if (!flag_exceptions)
return (flags & ECF_NORETURN) != 0;
else
bool
cgraph_edge_cannot_lead_to_return (struct cgraph_edge *e)
{
- if (cgraph_node_cannot_return (e->caller))
+ if (e->caller->cannot_return_p ())
return true;
if (e->indirect_unknown_callee)
{
== (ECF_NORETURN | ECF_NOTHROW));
}
else
- return cgraph_node_cannot_return (e->callee);
+ return e->callee->cannot_return_p ();
}
-/* Return true when function NODE can be removed from callgraph
+/* Return true when function can be removed from callgraph
if all direct calls are eliminated. */
bool
-cgraph_can_remove_if_no_direct_calls_and_refs_p (struct cgraph_node *node)
+cgraph_node::can_remove_if_no_direct_calls_and_refs_p (void)
{
- gcc_assert (!node->global.inlined_to);
+ gcc_assert (!global.inlined_to);
/* Extern inlines can always go, we will use the external definition. */
- if (DECL_EXTERNAL (node->decl))
+ if (DECL_EXTERNAL (decl))
return true;
/* When function is needed, we can not remove it. */
- if (node->force_output || node->used_from_other_partition)
+ if (force_output || used_from_other_partition)
return false;
- if (DECL_STATIC_CONSTRUCTOR (node->decl)
- || DECL_STATIC_DESTRUCTOR (node->decl))
+ if (DECL_STATIC_CONSTRUCTOR (decl)
+ || DECL_STATIC_DESTRUCTOR (decl))
return false;
/* Only COMDAT functions can be removed if externally visible. */
- if (node->externally_visible
- && (!DECL_COMDAT (node->decl)
- || node->forced_by_abi
- || symtab_used_from_object_file_p (node)))
+ if (externally_visible
+ && (!DECL_COMDAT (decl)
+ || forced_by_abi
+ || used_from_object_file_p ()))
return false;
return true;
}
/* Worker for cgraph_can_remove_if_no_direct_calls_p. */
static bool
-nonremovable_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
+nonremovable_p (struct cgraph_node *node, void *)
{
- return !cgraph_can_remove_if_no_direct_calls_and_refs_p (node);
+ return !node->can_remove_if_no_direct_calls_and_refs_p ();
}
-/* Return true when function NODE and its aliases can be removed from callgraph
- if all direct calls are eliminated. */
+/* Return true when function cgraph_node and its aliases can be removed from
+ callgraph if all direct calls are eliminated. */
bool
-cgraph_can_remove_if_no_direct_calls_p (struct cgraph_node *node)
+cgraph_node::can_remove_if_no_direct_calls_p (void)
{
/* Extern inlines can always go, we will use the external definition. */
- if (DECL_EXTERNAL (node->decl))
+ if (DECL_EXTERNAL (decl))
return true;
- if (node->address_taken)
+ if (address_taken)
return false;
- return !cgraph_for_node_and_aliases (node, nonremovable_p, NULL, true);
+ return !call_for_symbol_and_aliases (nonremovable_p, NULL, true);
}
-/* Worker for cgraph_can_remove_if_no_direct_calls_p. */
-
-static bool
-used_from_object_file_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
-{
- return symtab_used_from_object_file_p (node);
-}
-
-/* Return true when function NODE can be expected to be removed
+/* Return true when function cgraph_node can be expected to be removed
from program when direct calls in this compilation unit are removed.
As a special case COMDAT functions are
linkonce section. */
bool
-cgraph_will_be_removed_from_program_if_no_direct_calls (struct cgraph_node *node)
+cgraph_node::will_be_removed_from_program_if_no_direct_calls_p (void)
{
- gcc_assert (!node->global.inlined_to);
- if (cgraph_for_node_and_aliases (node, used_from_object_file_p, NULL, true))
+ gcc_assert (!global.inlined_to);
+
+ if (call_for_symbol_and_aliases (used_from_object_file_p_worker,
+ NULL, true))
return false;
if (!in_lto_p && !flag_whole_program)
- return cgraph_only_called_directly_p (node);
+ return only_called_directly_p ();
else
{
- if (DECL_EXTERNAL (node->decl))
+ if (DECL_EXTERNAL (decl))
return true;
- return cgraph_can_remove_if_no_direct_calls_p (node);
+ return can_remove_if_no_direct_calls_p ();
}
}
/* Worker for cgraph_only_called_directly_p. */
static bool
-cgraph_not_only_called_directly_p_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
+cgraph_not_only_called_directly_p_1 (struct cgraph_node *node, void *)
{
- return !cgraph_only_called_directly_or_aliased_p (node);
+ return !node->only_called_directly_or_aliased_p ();
}
-/* Return true when function NODE and all its aliases are only called
+/* Return true when function cgraph_node and all its aliases are only called
directly.
i.e. it is not externally visible, address was not taken and
it is not used in any other non-standard way. */
bool
-cgraph_only_called_directly_p (struct cgraph_node *node)
+cgraph_node::only_called_directly_p (void)
{
- gcc_assert (cgraph_function_or_thunk_node (node, NULL) == node);
- return !cgraph_for_node_and_aliases (node, cgraph_not_only_called_directly_p_1,
+ gcc_assert (ultimate_alias_target () == this);
+ return !call_for_symbol_and_aliases (cgraph_not_only_called_directly_p_1,
NULL, true);
}
static bool
collect_callers_of_node_1 (struct cgraph_node *node, void *data)
{
- vec<cgraph_edge_p> *redirect_callers = (vec<cgraph_edge_p> *)data;
+ vec<cgraph_edge *> *redirect_callers = (vec<cgraph_edge *> *)data;
struct cgraph_edge *cs;
enum availability avail;
- cgraph_function_or_thunk_node (node, &avail);
+ node->ultimate_alias_target (&avail);
- if (avail > AVAIL_OVERWRITABLE)
+ if (avail > AVAIL_INTERPOSABLE)
for (cs = node->callers; cs != NULL; cs = cs->next_caller)
if (!cs->indirect_inlining_edge)
redirect_callers->safe_push (cs);
return false;
}
-/* Collect all callers of NODE and its aliases that are known to lead to NODE
- (i.e. are not overwritable). */
+/* Collect all callers of cgraph_node and its aliases that are known to lead to
+ cgraph_node (i.e. are not overwritable). */
-vec<cgraph_edge_p>
-collect_callers_of_node (struct cgraph_node *node)
+vec<cgraph_edge *>
+cgraph_node::collect_callers (void)
{
- vec<cgraph_edge_p> redirect_callers = vNULL;
- cgraph_for_node_and_aliases (node, collect_callers_of_node_1,
- &redirect_callers, false);
+ vec<cgraph_edge *> redirect_callers = vNULL;
+ call_for_symbol_thunks_and_aliases (collect_callers_of_node_1,
+ &redirect_callers, false);
return redirect_callers;
}
clone_of_p (struct cgraph_node *node, struct cgraph_node *node2)
{
bool skipped_thunk = false;
- node = cgraph_function_or_thunk_node (node, NULL);
- node2 = cgraph_function_or_thunk_node (node2, NULL);
+ node = node->ultimate_alias_target ();
+ node2 = node2->ultimate_alias_target ();
/* There are no virtual clones of thunks so check former_clone_of or if we
might have skipped thunks because this adjustments are no longer
return true;
if (!node->thunk.this_adjusting)
return false;
- node = cgraph_function_or_thunk_node (node->callees->callee, NULL);
+ node = node->callees->callee->ultimate_alias_target ();
skipped_thunk = true;
}
return false;
if (cgraph_state == CGRAPH_LTO_STREAMING)
return false;
- node = cgraph_get_node (decl);
+ node = cgraph_node::get (decl);
/* We do not know if a node from a different partition is an alias or what it
aliases and therefore cannot do the former_clone_of check reliably. When
|| e->callee->in_other_partition)
return false;
+ node = node->ultimate_alias_target ();
+
/* Optimizers can redirect unreachable calls or calls triggering undefined
behaviour to builtin_unreachable. */
if (DECL_BUILT_IN_CLASS (e->callee->decl) == BUILT_IN_NORMAL
&& DECL_FUNCTION_CODE (e->callee->decl) == BUILT_IN_UNREACHABLE)
return false;
- node = cgraph_function_or_thunk_node (node, NULL);
if (e->callee->former_clone_of != node->decl
- && (node != cgraph_function_or_thunk_node (e->callee, NULL))
+ && (node != e->callee->ultimate_alias_target ())
&& !clone_of_p (node, e->callee))
return true;
else
/* Verify cgraph nodes of given cgraph node. */
DEBUG_FUNCTION void
-verify_cgraph_node (struct cgraph_node *node)
+cgraph_node::verify_node (void)
{
struct cgraph_edge *e;
- struct function *this_cfun = DECL_STRUCT_FUNCTION (node->decl);
+ struct function *this_cfun = DECL_STRUCT_FUNCTION (decl);
basic_block this_block;
gimple_stmt_iterator gsi;
bool error_found = false;
return;
timevar_push (TV_CGRAPH_VERIFY);
- error_found |= verify_symtab_base (node);
- for (e = node->callees; e; e = e->next_callee)
+ error_found |= verify_base ();
+ for (e = callees; e; e = e->next_callee)
if (e->aux)
{
error ("aux field set for edge %s->%s",
identifier_to_locale (e->callee->name ()));
error_found = true;
}
- if (node->count < 0)
+ if (count < 0)
{
error ("execution count is negative");
error_found = true;
}
- if (node->global.inlined_to && node->same_comdat_group)
+ if (global.inlined_to && same_comdat_group)
{
error ("inline clone in same comdat group list");
error_found = true;
}
- if (!node->definition && !node->in_other_partition && node->local.local)
+ if (!definition && !in_other_partition && local.local)
{
error ("local symbols must be defined");
error_found = true;
}
- if (node->global.inlined_to && node->externally_visible)
+ if (global.inlined_to && externally_visible)
{
error ("externally visible inline clone");
error_found = true;
}
- if (node->global.inlined_to && node->address_taken)
+ if (global.inlined_to && address_taken)
{
error ("inline clone with address taken");
error_found = true;
}
- if (node->global.inlined_to && node->force_output)
+ if (global.inlined_to && force_output)
{
error ("inline clone is forced to output");
error_found = true;
}
- for (e = node->indirect_calls; e; e = e->next_callee)
+ for (e = indirect_calls; e; e = e->next_callee)
{
if (e->aux)
{
error_found = true;
}
}
- bool check_comdat = symtab_comdat_local_p (node);
- for (e = node->callers; e; e = e->next_caller)
+ bool check_comdat = comdat_local_p ();
+ for (e = callers; e; e = e->next_caller)
{
if (verify_edge_count_and_frequency (e))
error_found = true;
if (check_comdat
- && !symtab_in_same_comdat_p (e->caller, node))
+ && !in_same_comdat_group_p (e->caller))
{
error ("comdat-local function called by %s outside its comdat",
identifier_to_locale (e->caller->name ()));
}
if (!e->inline_failed)
{
- if (node->global.inlined_to
+ if (global.inlined_to
!= (e->caller->global.inlined_to
? e->caller->global.inlined_to : e->caller))
{
error ("inlined_to pointer is wrong");
error_found = true;
}
- if (node->callers->next_caller)
+ if (callers->next_caller)
{
error ("multiple inline callers");
error_found = true;
}
}
else
- if (node->global.inlined_to)
+ if (global.inlined_to)
{
error ("inlined_to pointer set for noninline callers");
error_found = true;
}
}
- for (e = node->indirect_calls; e; e = e->next_callee)
+ for (e = indirect_calls; e; e = e->next_callee)
if (verify_edge_count_and_frequency (e))
error_found = true;
- if (!node->callers && node->global.inlined_to)
+ if (!callers && global.inlined_to)
{
error ("inlined_to pointer is set but no predecessors found");
error_found = true;
}
- if (node->global.inlined_to == node)
+ if (global.inlined_to == this)
{
error ("inlined_to pointer refers to itself");
error_found = true;
}
- if (node->clone_of)
+ if (clone_of)
{
struct cgraph_node *n;
- for (n = node->clone_of->clones; n; n = n->next_sibling_clone)
- if (n == node)
+ for (n = clone_of->clones; n; n = n->next_sibling_clone)
+ if (n == this)
break;
if (!n)
{
- error ("node has wrong clone_of");
+ error ("cgraph_node has wrong clone_of");
error_found = true;
}
}
- if (node->clones)
+ if (clones)
{
struct cgraph_node *n;
- for (n = node->clones; n; n = n->next_sibling_clone)
- if (n->clone_of != node)
+ for (n = clones; n; n = n->next_sibling_clone)
+ if (n->clone_of != this)
break;
if (n)
{
- error ("node has wrong clone list");
+ error ("cgraph_node has wrong clone list");
error_found = true;
}
}
- if ((node->prev_sibling_clone || node->next_sibling_clone) && !node->clone_of)
+ if ((prev_sibling_clone || next_sibling_clone) && !clone_of)
{
- error ("node is in clone list but it is not clone");
+ error ("cgraph_node is in clone list but it is not clone");
error_found = true;
}
- if (!node->prev_sibling_clone && node->clone_of && node->clone_of->clones != node)
+ if (!prev_sibling_clone && clone_of && clone_of->clones != this)
{
- error ("node has wrong prev_clone pointer");
+ error ("cgraph_node has wrong prev_clone pointer");
error_found = true;
}
- if (node->prev_sibling_clone && node->prev_sibling_clone->next_sibling_clone != node)
+ if (prev_sibling_clone && prev_sibling_clone->next_sibling_clone != this)
{
error ("double linked list of clones corrupted");
error_found = true;
}
- if (node->analyzed && node->alias)
+ if (analyzed && alias)
{
bool ref_found = false;
int i;
struct ipa_ref *ref = NULL;
- if (node->callees)
+ if (callees)
{
error ("Alias has call edges");
error_found = true;
}
- for (i = 0; node->iterate_reference (i, ref); i++)
+ for (i = 0; iterate_reference (i, ref); i++)
if (ref->use != IPA_REF_ALIAS)
{
error ("Alias has non-alias reference");
error_found = true;
}
}
- if (node->analyzed && node->thunk.thunk_p)
+ if (analyzed && thunk.thunk_p)
{
- if (!node->callees)
+ if (!callees)
{
error ("No edge out of thunk node");
error_found = true;
}
- else if (node->callees->next_callee)
+ else if (callees->next_callee)
{
error ("More than one edge out of thunk node");
error_found = true;
}
- if (gimple_has_body_p (node->decl))
+ if (gimple_has_body_p (decl))
{
error ("Thunk is not supposed to have body");
error_found = true;
}
}
- else if (node->analyzed && gimple_has_body_p (node->decl)
- && !TREE_ASM_WRITTEN (node->decl)
- && (!DECL_EXTERNAL (node->decl) || node->global.inlined_to)
- && !flag_wpa)
+ else if (analyzed && gimple_has_body_p (decl)
+ && !TREE_ASM_WRITTEN (decl)
+ && (!DECL_EXTERNAL (decl) || global.inlined_to)
+ && !flag_wpa)
{
if (this_cfun->cfg)
{
pointer_set_insert (stmts, stmt);
if (is_gimple_call (stmt))
{
- struct cgraph_edge *e = cgraph_edge (node, stmt);
+ struct cgraph_edge *e = get_edge (stmt);
tree decl = gimple_call_fndecl (stmt);
if (e)
{
}
}
}
- for (i = 0;
- node->iterate_reference (i, ref); i++)
+ for (i = 0; iterate_reference (i, ref); i++)
if (ref->stmt && !pointer_set_contains (stmts, ref->stmt))
{
error ("reference to dead statement");
/* No CFG available?! */
gcc_unreachable ();
- for (e = node->callees; e; e = e->next_callee)
+ for (e = callees; e; e = e->next_callee)
{
if (!e->aux)
{
}
e->aux = 0;
}
- for (e = node->indirect_calls; e; e = e->next_callee)
+ for (e = indirect_calls; e; e = e->next_callee)
{
if (!e->aux && !e->speculative)
{
}
if (error_found)
{
- dump_cgraph_node (stderr, node);
+ dump (stderr);
internal_error ("verify_cgraph_node failed");
}
timevar_pop (TV_CGRAPH_VERIFY);
/* Verify whole cgraph structure. */
DEBUG_FUNCTION void
-verify_cgraph (void)
+cgraph_node::verify_cgraph_nodes (void)
{
struct cgraph_node *node;
return;
FOR_EACH_FUNCTION (node)
- verify_cgraph_node (node);
+ node->verify ();
}
-/* Given NODE, walk the alias chain to return the function NODE is alias of.
+/* Walk the alias chain to return the function cgraph_node is alias of.
Walk through thunk, too.
When AVAILABILITY is non-NULL, get minimal availability in the chain. */
-struct cgraph_node *
-cgraph_function_node (struct cgraph_node *node, enum availability *availability)
+cgraph_node *
+cgraph_node::function_symbol (enum availability *availability)
{
+ cgraph_node *node = NULL;
+
do
{
- node = cgraph_function_or_thunk_node (node, availability);
+ node = ultimate_alias_target (availability);
if (node->thunk.thunk_p)
{
node = node->callees->callee;
if (availability)
{
enum availability a;
- a = cgraph_function_body_availability (node);
+ a = node->get_availability ();
if (a < *availability)
*availability = a;
}
- node = cgraph_function_or_thunk_node (node, availability);
+ node = node->ultimate_alias_target (availability);
}
} while (node && node->thunk.thunk_p);
return node;
}
-/* When doing LTO, read NODE's body from disk if it is not already present. */
+/* When doing LTO, read cgraph_node's body from disk if it is not already
+ present. */
bool
-cgraph_get_body (struct cgraph_node *node)
+cgraph_node::get_body (void)
{
struct lto_file_decl_data *file_data;
const char *data, *name;
size_t len;
- tree decl = node->decl;
+ tree decl = this->decl;
if (DECL_RESULT (decl))
return false;
timevar_push (TV_IPA_LTO_GIMPLE_IN);
- file_data = node->lto_file_data;
+ file_data = lto_file_data;
name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
/* We may have renamed the declaration, e.g., a static function. */
name, &len);
if (!data)
{
- dump_cgraph_node (stderr, node);
+ debug ();
fatal_error ("%s: section %s is missing",
file_data->file_name,
name);
gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
- lto_input_function_body (file_data, node, data);
+ lto_input_function_body (file_data, this, data);
lto_stats.num_function_bodies++;
lto_free_section_data (file_data, LTO_section_function_body, name,
data, len);
- lto_free_function_in_decl_state_for_node (node);
+ lto_free_function_in_decl_state_for_node (this);
timevar_pop (TV_IPA_LTO_GIMPLE_IN);
typedef struct section_hash_entry_d section_hash_entry;
+enum availability
+{
+ /* Not yet set by cgraph_function_body_availability. */
+ AVAIL_UNSET,
+ /* Function body/variable initializer is unknown. */
+ AVAIL_NOT_AVAILABLE,
+ /* Function body/variable initializer is known but might be replaced
+ by a different one from other compilation unit and thus needs to
+ be dealt with a care. Like AVAIL_NOT_AVAILABLE it can have
+ arbitrary side effects on escaping variables and functions, while
+ like AVAILABLE it might access static variables. */
+ AVAIL_INTERPOSABLE,
+ /* Function body/variable initializer is known and will be used in final
+ program. */
+ AVAIL_AVAILABLE,
+ /* Function body/variable initializer is known and all it's uses are
+ explicitly visible within current unit (ie it's address is never taken and
+ it is not exported to other units). Currently used only for functions. */
+ AVAIL_LOCAL
+};
+
+/* Classification of symbols WRT partitioning. */
+enum symbol_partitioning_class
+{
+ /* External declarations are ignored by partitioning algorithms and they are
+ added into the boundary later via compute_ltrans_boundary. */
+ SYMBOL_EXTERNAL,
+ /* Partitioned symbols are pur into one of partitions. */
+ SYMBOL_PARTITION,
+ /* Duplicated symbols (such as comdat or constant pool references) are
+ copied into every node needing them via add_symbol_to_partition. */
+ SYMBOL_DUPLICATE
+};
+
/* Base of all entries in the symbol table.
The symtab_node is inherited by cgraph and varpol nodes. */
class GTY((desc ("%h.type"), tag ("SYMTAB_SYMBOL"),
/* Return asm name. */
const char * asm_name () const;
+ /* Add node into symbol table. This function is not used directly, but via
+ cgraph/varpool node creation routines. */
+ void register_symbol (void);
+
+ /* Remove symbol from symbol table. */
+ void remove (void);
+
+ /* Dump symtab node to F. */
+ void dump (FILE *f);
+
+ /* Dump symtab node to stderr. */
+ void DEBUG_FUNCTION debug (void);
+
+ /* Verify consistency of node. */
+ void DEBUG_FUNCTION verify (void);
+
+ /* Return ipa reference from this symtab_node to
+ REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
+ of the use and STMT the statement (if it exists). */
+ struct ipa_ref *add_reference (symtab_node *referred_node,
+ enum ipa_ref_use use_type);
+
+ /* Return ipa reference from this symtab_node to
+ REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
+ of the use and STMT the statement (if it exists). */
+ struct ipa_ref *add_reference (symtab_node *referred_node,
+ enum ipa_ref_use use_type, gimple stmt);
+
+ /* If VAL is a reference to a function or a variable, add a reference from
+ this symtab_node to the corresponding symbol table node. USE_TYPE specify
+ type of the use and STMT the statement (if it exists). Return the new
+ reference or NULL if none was created. */
+ struct ipa_ref *maybe_add_reference (tree val, enum ipa_ref_use use_type,
+ gimple stmt);
+
+ /* Clone all references from symtab NODE to this symtab_node. */
+ void clone_references (symtab_node *node);
+
+ /* Remove all stmt references in non-speculative references.
+ Those are not maintained during inlining & clonning.
+ The exception are speculative references that are updated along
+ with callgraph edges associated with them. */
+ void clone_referring (symtab_node *node);
+
+ /* Clone reference REF to this symtab_node and set its stmt to STMT. */
+ struct ipa_ref *clone_reference (struct ipa_ref *ref, gimple stmt);
+
+ /* Find the structure describing a reference to REFERRED_NODE
+ and associated with statement STMT. */
+ struct ipa_ref *find_reference (symtab_node *referred_node, gimple stmt,
+ unsigned int lto_stmt_uid);
+
+ /* Remove all references that are associated with statement STMT. */
+ void remove_stmt_references (gimple stmt);
+
+ /* Remove all stmt references in non-speculative references.
+ Those are not maintained during inlining & clonning.
+ The exception are speculative references that are updated along
+ with callgraph edges associated with them. */
+ void clear_stmts_in_references (void);
+
+ /* Remove all references in ref list. */
+ void remove_all_references (void);
+
+ /* Remove all referring items in ref list. */
+ void remove_all_referring (void);
+
+ /* Dump references in ref list to FILE. */
+ void dump_references (FILE *file);
+
+ /* Dump referring in list to FILE. */
+ void dump_referring (FILE *);
+
+ /* Iterates I-th reference in the list, REF is also set. */
+ struct ipa_ref *iterate_reference (unsigned i, struct ipa_ref *&ref);
+
+ /* Iterates I-th referring item in the list, REF is also set. */
+ struct ipa_ref *iterate_referring (unsigned i, struct ipa_ref *&ref);
+
+ /* Iterates I-th referring alias item in the list, REF is also set. */
+ struct ipa_ref *iterate_direct_aliases (unsigned i, struct ipa_ref *&ref);
+
+ /* Return true if symtab node and TARGET represents
+ semantically equivalent symbols. */
+ bool semantically_equivalent_p (symtab_node *target);
+
+ /* Classify symbol symtab node for partitioning. */
+ enum symbol_partitioning_class get_partitioning_class (void);
+
+ /* Return comdat group. */
+ tree get_comdat_group ()
+ {
+ return x_comdat_group;
+ }
+
+ /* Return comdat group as identifier_node. */
+ tree get_comdat_group_id ()
+ {
+ if (x_comdat_group && TREE_CODE (x_comdat_group) != IDENTIFIER_NODE)
+ x_comdat_group = DECL_ASSEMBLER_NAME (x_comdat_group);
+ return x_comdat_group;
+ }
+
+ /* Set comdat group. */
+ void set_comdat_group (tree group)
+ {
+ gcc_checking_assert (!group || TREE_CODE (group) == IDENTIFIER_NODE
+ || DECL_P (group));
+ x_comdat_group = group;
+ }
+
+ /* Return section as string. */
+ const char * get_section ()
+ {
+ if (!x_section)
+ return NULL;
+ return x_section->name;
+ }
+
+ /* Remove node from same comdat group. */
+ void remove_from_same_comdat_group (void);
+
+ /* Add this symtab_node to the same comdat group that OLD is in. */
+ void add_to_same_comdat_group (symtab_node *old_node);
+
+ /* Dissolve the same_comdat_group list in which NODE resides. */
+ void dissolve_same_comdat_group_list (void);
+
+ /* Return true when symtab_node is known to be used from other (non-LTO)
+ object file. Known only when doing LTO via linker plugin. */
+ bool used_from_object_file_p (void);
+
+ /* Walk the alias chain to return the symbol NODE is alias of.
+ If NODE is not an alias, return NODE.
+ When AVAILABILITY is non-NULL, get minimal availability in the chain. */
+ symtab_node *ultimate_alias_target (enum availability *avail = NULL);
+
+ /* Return next reachable static symbol with initializer after NODE. */
+ inline symtab_node *next_defined_symbol (void);
+
+ /* Add reference recording that symtab node is alias of TARGET.
+ The function can fail in the case of aliasing cycles; in this case
+ it returns false. */
+ bool resolve_alias (symtab_node *target);
+
+ /* C++ FE sometimes change linkage flags after producing same
+ body aliases. */
+ void fixup_same_cpp_alias_visibility (symtab_node *target);
+
+ /* Call calback on symtab node and aliases associated to this node.
+ When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
+ skipped. */
+ bool call_for_symbol_and_aliases (bool (*callback) (symtab_node *, void *),
+ void *data,
+ bool include_overwrite);
+
+ /* If node can not be interposable by static or dynamic linker to point to
+ different definition, return this symbol. Otherwise look for alias with
+ such property and if none exists, introduce new one. */
+ symtab_node *noninterposable_alias (void);
+
+ /* Return node that alias is aliasing. */
+ inline symtab_node *get_alias_target (void);
+
+ /* Set section for symbol and its aliases. */
+ void set_section (const char *section);
+
+ /* Set section, do not recurse into aliases.
+ When one wants to change section of symbol and its aliases,
+ use set_section. */
+ void set_section_for_node (const char *section);
+
+ /* Set initialization priority to PRIORITY. */
+ void set_init_priority (priority_type priority);
+
+ /* Return the initialization priority. */
+ priority_type get_init_priority ();
+
+ /* Return availability of NODE. */
+ enum availability get_availability (void);
+
+ /* Make DECL local. */
+ void make_decl_local (void);
+
+ /* Return true if list contains an alias. */
+ bool has_aliases_p (void);
+
+ /* Return true when the symbol is real symbol, i.e. it is not inline clone
+ or abstract function kept for debug info purposes only. */
+ bool real_symbol_p (void);
+
+ /* Return true if NODE can be discarded by linker from the binary. */
+ inline bool
+ can_be_discarded_p (void)
+ {
+ return (DECL_EXTERNAL (decl)
+ || (get_comdat_group ()
+ && resolution != LDPR_PREVAILING_DEF
+ && resolution != LDPR_PREVAILING_DEF_IRONLY
+ && resolution != LDPR_PREVAILING_DEF_IRONLY_EXP));
+ }
+
+ /* Return true if NODE is local to a particular COMDAT group, and must not
+ be named from outside the COMDAT. This is used for C++ decloned
+ constructors. */
+ inline bool comdat_local_p (void)
+ {
+ return (same_comdat_group && !TREE_PUBLIC (decl));
+ }
+
+ /* Return true if ONE and TWO are part of the same COMDAT group. */
+ inline bool in_same_comdat_group_p (symtab_node *target);
+
+ /* Return true when there is a reference to node and it is not vtable. */
+ bool address_taken_from_non_vtable_p (void);
+
+ /* Return true if symbol is known to be nonzero. */
+ bool nonzero_address ();
+
+ /* Return symbol table node associated with DECL, if any,
+ and NULL otherwise. */
+ static inline symtab_node *get (const_tree decl)
+ {
+#ifdef ENABLE_CHECKING
+ /* Check that we are called for sane type of object - functions
+ and static or external variables. */
+ gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL
+ || (TREE_CODE (decl) == VAR_DECL
+ && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)
+ || in_lto_p)));
+ /* Check that the mapping is sane - perhaps this check can go away,
+ but at the moment frontends tends to corrupt the mapping by calling
+ memcpy/memset on the tree nodes. */
+ gcc_checking_assert (!decl->decl_with_vis.symtab_node
+ || decl->decl_with_vis.symtab_node->decl == decl);
+#endif
+ return decl->decl_with_vis.symtab_node;
+ }
+
+ /* Dump symbol table to F. */
+ static void dump_table (FILE *);
+
+ /* Dump symbol table to stderr. */
+ static inline DEBUG_FUNCTION void debug_symtab (void)
+ {
+ dump_table (stderr);
+ }
+
+ /* Verify symbol table for internal consistency. */
+ static DEBUG_FUNCTION void verify_symtab_nodes (void);
+
+ /* Return true when NODE is known to be used from other (non-LTO)
+ object file. Known only when doing LTO via linker plugin. */
+ static bool used_from_object_file_p_worker (symtab_node *node);
+
/* Type of the symbol. */
ENUM_BITFIELD (symtab_type) type : 8;
symtab_node *next_sharing_asm_name;
symtab_node *previous_sharing_asm_name;
- /* Circular list of nodes in the same comdat group if non-NULL. */
- symtab_node *same_comdat_group;
-
- /* Return comdat group. */
- tree get_comdat_group ()
- {
- return x_comdat_group;
- }
-
- /* Return comdat group as identifier_node. */
- tree get_comdat_group_id ()
- {
- if (x_comdat_group && TREE_CODE (x_comdat_group) != IDENTIFIER_NODE)
- x_comdat_group = DECL_ASSEMBLER_NAME (x_comdat_group);
- return x_comdat_group;
- }
-
- /* Set comdat group. */
- void set_comdat_group (tree group)
- {
- gcc_checking_assert (!group || TREE_CODE (group) == IDENTIFIER_NODE
- || DECL_P (group));
- x_comdat_group = group;
- }
-
- /* Return section as string. */
- const char * get_section ()
- {
- if (!x_section)
- return NULL;
- return x_section->name;
- }
-
- /* Return ipa reference from this symtab_node to
- REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
- of the use and STMT the statement (if it exists). */
- struct ipa_ref *add_reference (symtab_node *referred_node,
- enum ipa_ref_use use_type);
-
- /* Return ipa reference from this symtab_node to
- REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
- of the use and STMT the statement (if it exists). */
- struct ipa_ref *add_reference (symtab_node *referred_node,
- enum ipa_ref_use use_type, gimple stmt);
-
- /* If VAL is a reference to a function or a variable, add a reference from
- this symtab_node to the corresponding symbol table node. USE_TYPE specify
- type of the use and STMT the statement (if it exists). Return the new
- reference or NULL if none was created. */
- struct ipa_ref *maybe_add_reference (tree val, enum ipa_ref_use use_type,
- gimple stmt);
-
- /* Clone all references from symtab NODE to this symtab_node. */
- void clone_references (symtab_node *node);
-
- /* Remove all stmt references in non-speculative references.
- Those are not maintained during inlining & clonning.
- The exception are speculative references that are updated along
- with callgraph edges associated with them. */
- void clone_referring (symtab_node *node);
-
- /* Clone reference REF to this symtab_node and set its stmt to STMT. */
- struct ipa_ref *clone_reference (struct ipa_ref *ref, gimple stmt);
-
- /* Find the structure describing a reference to REFERRED_NODE
- and associated with statement STMT. */
- struct ipa_ref *find_reference (symtab_node *, gimple, unsigned int);
-
- /* Remove all references that are associated with statement STMT. */
- void remove_stmt_references (gimple stmt);
-
- /* Remove all stmt references in non-speculative references.
- Those are not maintained during inlining & clonning.
- The exception are speculative references that are updated along
- with callgraph edges associated with them. */
- void clear_stmts_in_references (void);
-
- /* Remove all references in ref list. */
- void remove_all_references (void);
-
- /* Remove all referring items in ref list. */
- void remove_all_referring (void);
-
- /* Dump references in ref list to FILE. */
- void dump_references (FILE *file);
-
- /* Dump referring in list to FILE. */
- void dump_referring (FILE *);
-
- /* Return true if list contains an alias. */
- bool has_aliases_p (void);
-
- /* Iterates I-th reference in the list, REF is also set. */
- struct ipa_ref *iterate_reference (unsigned i, struct ipa_ref *&ref);
-
- /* Iterates I-th referring item in the list, REF is also set. */
- struct ipa_ref *iterate_referring (unsigned i, struct ipa_ref *&ref);
-
- /* Iterates I-th referring alias item in the list, REF is also set. */
- struct ipa_ref *iterate_direct_aliases (unsigned i, struct ipa_ref *&ref);
+ /* Circular list of nodes in the same comdat group if non-NULL. */
+ symtab_node *same_comdat_group;
/* Vectors of referring and referenced entities. */
struct ipa_ref_list ref_list;
/* Section name. Again can be private, if allowed. */
section_hash_entry *x_section;
- /* Set section for symbol and its aliases. */
- void set_section (const char *section);
- void set_section_for_node (const char *section);
+protected:
+ /* Dump base fields of symtab nodes to F. Not to be used directly. */
+ void dump_base (FILE *);
- void set_init_priority (priority_type priority);
- priority_type get_init_priority ();
+ /* Verify common part of symtab node. */
+ bool DEBUG_FUNCTION verify_base (void);
- /* Return true if symbol is known to be nonzero. */
- bool nonzero_address ();
+ /* Remove node from symbol table. This function is not used directly, but via
+ cgraph/varpool node removal routines. */
+ void unregister (void);
+
+ /* Return the initialization and finalization priority information for
+ DECL. If there is no previous priority information, a freshly
+ allocated structure is returned. */
+ struct symbol_priority_map *priority_info (void);
+
+private:
+ /* Worker for set_section. */
+ static bool set_section (symtab_node *n, void *s);
+
+ /* Worker for symtab_resolve_alias. */
+ static bool set_implicit_section (symtab_node *n, void *);
+
+ /* Worker searching noninterposable alias. */
+ static bool noninterposable_alias (symtab_node *node, void *data);
};
/* Walk all aliases for NODE. */
#define FOR_EACH_ALIAS(node, alias) \
- for (unsigned x_i = 0; node->iterate_direct_aliases (x_i, alias); x_i++)
-
-enum availability
-{
- /* Not yet set by cgraph_function_body_availability. */
- AVAIL_UNSET,
- /* Function body/variable initializer is unknown. */
- AVAIL_NOT_AVAILABLE,
- /* Function body/variable initializer is known but might be replaced
- by a different one from other compilation unit and thus needs to
- be dealt with a care. Like AVAIL_NOT_AVAILABLE it can have
- arbitrary side effects on escaping variables and functions, while
- like AVAILABLE it might access static variables. */
- AVAIL_OVERWRITABLE,
- /* Function body/variable initializer is known and will be used in final
- program. */
- AVAIL_AVAILABLE,
- /* Function body/variable initializer is known and all it's uses are explicitly
- visible within current unit (ie it's address is never taken and it is not
- exported to other units).
- Currently used only for functions. */
- AVAIL_LOCAL
-};
+ for (unsigned x_i = 0; node->iterate_direct_aliases (x_i, alias); x_i++)
/* This is the information that is put into the cgraph local structure
to recover a function. */
struct GTY(()) cgraph_global_info {
/* For inline clones this points to the function they will be
inlined into. */
- struct cgraph_node *inlined_to;
+ cgraph_node *inlined_to;
};
/* Information about the function that is propagated by the RTL backend.
/* True when we replace a reference to old_tree. */
bool ref_p;
};
-typedef struct ipa_replace_map *ipa_replace_map_p;
struct GTY(()) cgraph_clone_info
{
- vec<ipa_replace_map_p, va_gc> *tree_map;
+ vec<ipa_replace_map *, va_gc> *tree_map;
bitmap args_to_skip;
bitmap combined_args_to_skip;
};
unsigned int cilk_elemental : 1;
/* Doubly linked list of SIMD clones. */
- struct cgraph_node *prev_clone, *next_clone;
+ cgraph_node *prev_clone, *next_clone;
/* Original cgraph node the SIMD clones were created for. */
- struct cgraph_node *origin;
+ cgraph_node *origin;
/* Annotated function arguments for the original function. */
struct cgraph_simd_clone_arg GTY((length ("%h.nargs"))) args[1];
};
+/* Function Multiversioning info. */
+struct GTY(()) cgraph_function_version_info {
+ /* The cgraph_node for which the function version info is stored. */
+ cgraph_node *this_node;
+ /* Chains all the semantically identical function versions. The
+ first function in this chain is the version_info node of the
+ default function. */
+ struct cgraph_function_version_info *prev;
+ /* If this version node corresponds to a dispatcher for function
+ versions, this points to the version info node of the default
+ function, the first node in the chain. */
+ struct cgraph_function_version_info *next;
+ /* If this node corresponds to a function version, this points
+ to the dispatcher function decl, which is the function that must
+ be called to execute the right function version at run-time.
+
+ If this cgraph node is a dispatcher (if dispatcher_function is
+ true, in the cgraph_node struct) for function versions, this
+ points to resolver function, which holds the function body of the
+ dispatcher. The dispatcher decl is an alias to the resolver
+ function decl. */
+ tree dispatcher_resolver;
+};
+
+#define DEFCIFCODE(code, type, string) CIF_ ## code,
+/* Reasons for inlining failures. */
+
+enum cgraph_inline_failed_t {
+#include "cif-code.def"
+ CIF_N_REASONS
+};
+
+enum cgraph_inline_failed_type_t
+{
+ CIF_FINAL_NORMAL = 0,
+ CIF_FINAL_ERROR
+};
+
+struct cgraph_edge;
/* The cgraph data structure.
Each function decl has assigned cgraph_node listing callees and callers. */
struct GTY((tag ("SYMTAB_FUNCTION"))) cgraph_node : public symtab_node {
public:
+ /* Remove the node from cgraph and all inline clones inlined into it.
+ Skip however removal of FORBIDDEN_NODE and return true if it needs to be
+ removed. This allows to call the function from outer loop walking clone
+ tree. */
+ bool remove_symbol_and_inline_clones (cgraph_node *forbidden_node = NULL);
+
+ /* Record all references from cgraph_node that are taken
+ in statement STMT. */
+ void record_stmt_references (gimple stmt);
+
+ /* Like cgraph_set_call_stmt but walk the clone tree and update all
+ clones sharing the same function body.
+ When WHOLE_SPECULATIVE_EDGES is true, all three components of
+ speculative edge gets updated. Otherwise we update only direct
+ call. */
+ void set_call_stmt_including_clones (gimple old_stmt, gimple new_stmt,
+ bool update_speculative = true);
+
+ /* Walk the alias chain to return the function cgraph_node is alias of.
+ Walk through thunk, too.
+ When AVAILABILITY is non-NULL, get minimal availability in the chain. */
+ cgraph_node *function_symbol (enum availability *avail = NULL);
+
+ /* Create node representing clone of N executed COUNT times. Decrease
+ the execution counts from original node too.
+ The new clone will have decl set to DECL that may or may not be the same
+ as decl of N.
+
+ When UPDATE_ORIGINAL is true, the counts are subtracted from the original
+ function's profile to reflect the fact that part of execution is handled
+ by node.
+ When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
+ the new clone. Otherwise the caller is responsible for doing so later.
+
+ If the new node is being inlined into another one, NEW_INLINED_TO should be
+ the outline function the new one is (even indirectly) inlined to.
+ All hooks will see this in node's global.inlined_to, when invoked.
+ Can be NULL if the node is not inlined. */
+ cgraph_node *create_clone (tree decl, gcov_type count, int freq,
+ bool update_original,
+ vec<cgraph_edge *> redirect_callers,
+ bool call_duplication_hook,
+ struct cgraph_node *new_inlined_to,
+ bitmap args_to_skip);
+
+ /* Create callgraph node clone with new declaration. The actual body will
+ be copied later at compilation stage. */
+ cgraph_node *create_virtual_clone (vec<cgraph_edge *> redirect_callers,
+ vec<ipa_replace_map *, va_gc> *tree_map,
+ bitmap args_to_skip, const char * suffix);
+
+ /* cgraph node being removed from symbol table; see if its entry can be
+ replaced by other inline clone. */
+ cgraph_node *find_replacement (void);
+
+ /* Create a new cgraph node which is the new version of
+ callgraph node. REDIRECT_CALLERS holds the callers
+ edges which should be redirected to point to
+ NEW_VERSION. ALL the callees edges of the node
+ are cloned to the new version node. Return the new
+ version node.
+
+ If non-NULL BLOCK_TO_COPY determine what basic blocks
+ was copied to prevent duplications of calls that are dead
+ in the clone. */
+
+ cgraph_node *create_version_clone (tree new_decl,
+ vec<cgraph_edge *> redirect_callers,
+ bitmap bbs_to_copy);
+
+ /* Perform function versioning.
+ Function versioning includes copying of the tree and
+ a callgraph update (creating a new cgraph node and updating
+ its callees and callers).
+
+ REDIRECT_CALLERS varray includes the edges to be redirected
+ to the new version.
+
+ TREE_MAP is a mapping of tree nodes we want to replace with
+ new ones (according to results of prior analysis).
+
+ If non-NULL ARGS_TO_SKIP determine function parameters to remove
+ from new version.
+ If SKIP_RETURN is true, the new version will return void.
+ If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
+ If non_NULL NEW_ENTRY determine new entry BB of the clone.
+
+ Return the new version's cgraph node. */
+ cgraph_node *create_version_clone_with_body
+ (vec<cgraph_edge *> redirect_callers,
+ vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip,
+ bool skip_return, bitmap bbs_to_copy, basic_block new_entry_block,
+ const char *clone_name);
+
+ /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
+ corresponding to cgraph_node. */
+ struct cgraph_function_version_info *insert_new_function_version (void);
+
+ /* Get the cgraph_function_version_info node corresponding to node. */
+ struct cgraph_function_version_info *function_version (void);
+
+ /* Discover all functions and variables that are trivially needed, analyze
+ them as well as all functions and variables referred by them */
+ void analyze (void);
+
+ /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
+ aliases DECL with an adjustments made into the first parameter.
+ See comments in thunk_adjust for detail on the parameters. */
+ cgraph_node * create_thunk (tree alias, tree, bool this_adjusting,
+ HOST_WIDE_INT fixed_offset,
+ HOST_WIDE_INT virtual_value,
+ tree virtual_offset,
+ tree real_alias);
+
+
+ /* Return node that alias is aliasing. */
+ inline cgraph_node *get_alias_target (void);
+
+ /* Given function symbol, walk the alias chain to return the function node
+ is alias of. Do not walk through thunks.
+ When AVAILABILITY is non-NULL, get minimal availability in the chain. */
+
+ cgraph_node *ultimate_alias_target (availability *availability = NULL);
+
+ /* Expand thunk NODE to gimple if possible.
+ When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
+ no assembler is produced.
+ When OUTPUT_ASM_THUNK is true, also produce assembler for
+ thunks that are not lowered. */
+ bool expand_thunk (bool output_asm_thunks, bool force_gimple_thunk);
+
+ /* As an GCC extension we allow redefinition of the function. The
+ semantics when both copies of bodies differ is not well defined.
+ We replace the old body with new body so in unit at a time mode
+ we always use new body, while in normal mode we may end up with
+ old body inlined into some functions and new body expanded and
+ inlined in others. */
+ void reset (void);
+
+ /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
+ kind of wrapper method. */
+ void create_wrapper (cgraph_node *target);
+
+ /* Verify cgraph nodes of the cgraph node. */
+ void DEBUG_FUNCTION verify_node (void);
+
+ /* Remove function from symbol table. */
+ void remove (void);
+
+ /* Dump call graph node to file F. */
+ void dump (FILE *f);
+
+ /* Dump call graph node to stderr. */
+ void DEBUG_FUNCTION debug (void);
+
+ /* When doing LTO, read cgraph_node's body from disk if it is not already
+ present. */
+ bool get_body (void);
+
+ /* Release memory used to represent body of function.
+ Use this only for functions that are released before being translated to
+ target code (i.e. RTL). Functions that are compiled to RTL and beyond
+ are free'd in final.c via free_after_compilation(). */
+ void release_body (void);
+
+ /* cgraph_node is no longer nested function; update cgraph accordingly. */
+ void unnest (void);
+
+ /* Bring cgraph node local. */
+ void make_local (void);
+
+ /* Likewise indicate that a node is having address taken. */
+ void mark_address_taken (void);
+
+ /* Set fialization priority to PRIORITY. */
+ void set_fini_priority (priority_type priority);
+
+ /* Return the finalization priority. */
+ priority_type get_fini_priority (void);
+
+ /* Create edge from a given function to CALLEE in the cgraph. */
+ struct cgraph_edge *create_edge (cgraph_node *callee,
+ gimple call_stmt, gcov_type count,
+ int freq);
+ /* Create an indirect edge with a yet-undetermined callee where the call
+ statement destination is a formal parameter of the caller with index
+ PARAM_INDEX. */
+ struct cgraph_edge *create_indirect_edge (gimple call_stmt, int ecf_flags,
+ gcov_type count, int freq);
+
+ /* Like cgraph_create_edge walk the clone tree and update all clones sharing
+ same function body. If clones already have edge for OLD_STMT; only
+ update the edge same way as cgraph_set_call_stmt_including_clones does. */
+ void create_edge_including_clones (struct cgraph_node *callee,
+ gimple old_stmt, gimple stmt,
+ gcov_type count,
+ int freq,
+ cgraph_inline_failed_t reason);
+
+ /* Return the callgraph edge representing the GIMPLE_CALL statement
+ CALL_STMT. */
+ cgraph_edge *get_edge (gimple call_stmt);
+
+ /* Collect all callers of cgraph_node and its aliases that are known to lead
+ to NODE (i.e. are not overwritable). */
+ vec<cgraph_edge *> collect_callers (void);
+
+ /* Remove all callers from the node. */
+ void remove_callers (void);
+
+ /* Remove all callees from the node. */
+ void remove_callees (void);
+
+ /* Return function availability. See cgraph.h for description of individual
+ return values. */
+ enum availability get_availability (void);
+
+ /* Set TREE_NOTHROW on cgraph_node's decl and on aliases of the node
+ if any to NOTHROW. */
+ void set_nothrow_flag (bool nothrow);
+
+ /* Set TREE_READONLY on cgraph_node's decl and on aliases of the node
+ if any to READONLY. */
+ void set_const_flag (bool readonly, bool looping);
+
+ /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
+ if any to PURE. */
+ void set_pure_flag (bool pure, bool looping);
+
+ /* Call all node duplication hooks. */
+ void call_duplication_hooks (cgraph_node *node2);
+
+ /* Call calback on function and aliases associated to the function.
+ When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
+ skipped. */
+
+ bool call_for_symbol_and_aliases (bool (*callback) (cgraph_node *,
+ void *),
+ void *data, bool include_overwritable);
+
+ /* Call calback on cgraph_node, thunks and aliases associated to NODE.
+ When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
+ skipped. */
+ bool call_for_symbol_thunks_and_aliases (bool (*callback) (cgraph_node *node,
+ void *data),
+ void *data,
+ bool include_overwritable);
+
+ /* Call all node insertion hooks. */
+ void call_function_insertion_hooks (void);
+
+ /* Likewise indicate that a node is needed, i.e. reachable via some
+ external means. */
+ inline void mark_force_output (void);
+
+ /* Return true when function can be marked local. */
+ bool local_p (void);
+
+ /* Return true if cgraph_node can be made local for API change.
+ Extern inline functions and C++ COMDAT functions can be made local
+ at the expense of possible code size growth if function is used in multiple
+ compilation units. */
+ bool can_be_local_p (void);
+
+ /* Return true when cgraph_node can not return or throw and thus
+ it is safe to ignore its side effects for IPA analysis. */
+ bool cannot_return_p (void);
+
+ /* Return true when function cgraph_node and all its aliases are only called
+ directly.
+ i.e. it is not externally visible, address was not taken and
+ it is not used in any other non-standard way. */
+ bool only_called_directly_p (void);
+
+ /* Return true when function is only called directly or it has alias.
+ i.e. it is not externally visible, address was not taken and
+ it is not used in any other non-standard way. */
+ inline bool only_called_directly_or_aliased_p (void);
+
+ /* Return true when function cgraph_node can be expected to be removed
+ from program when direct calls in this compilation unit are removed.
+
+ As a special case COMDAT functions are
+ cgraph_can_remove_if_no_direct_calls_p while the are not
+ cgraph_only_called_directly_p (it is possible they are called from other
+ unit)
+
+ This function behaves as cgraph_only_called_directly_p because eliminating
+ all uses of COMDAT function does not make it necessarily disappear from
+ the program unless we are compiling whole program or we do LTO. In this
+ case we know we win since dynamic linking will not really discard the
+ linkonce section. */
+ bool will_be_removed_from_program_if_no_direct_calls_p (void);
+
+ /* Return true when function can be removed from callgraph
+ if all direct calls are eliminated. */
+ bool can_remove_if_no_direct_calls_and_refs_p (void);
+
+ /* Return true when function cgraph_node and its aliases can be removed from
+ callgraph if all direct calls are eliminated. */
+ bool can_remove_if_no_direct_calls_p (void);
+
+ /* Return true when callgraph node is a function with Gimple body defined
+ in current unit. Functions can also be define externally or they
+ can be thunks with no Gimple representation.
+
+ Note that at WPA stage, the function body may not be present in memory. */
+ inline bool has_gimple_body_p (void);
+
+ /* Return true if function should be optimized for size. */
+ bool optimize_for_size_p (void);
+
+ /* Dump the callgraph to file F. */
+ static void dump_cgraph (FILE *f);
+
+ /* Dump the call graph to stderr. */
+ static inline void debug_cgraph (void)
+ {
+ dump_cgraph (stderr);
+ }
+
+ /* Record that DECL1 and DECL2 are semantically identical function
+ versions. */
+ static void record_function_versions (tree decl1, tree decl2);
+
+ /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
+ DECL is a duplicate declaration. */
+ static void delete_function_version (tree decl);
+
+ /* Add the function FNDECL to the call graph.
+ Unlike cgraph_finalize_function, this function is intended to be used
+ by middle end and allows insertion of new function at arbitrary point
+ of compilation. The function can be either in high, low or SSA form
+ GIMPLE.
+
+ The function is assumed to be reachable and have address taken (so no
+ API breaking optimizations are performed on it).
+
+ Main work done by this function is to enqueue the function for later
+ processing to avoid need the passes to be re-entrant. */
+ static void add_new_function (tree fndecl, bool lowered);
+
+ /* Return callgraph node for given symbol and check it is a function. */
+ static inline cgraph_node *get (const_tree decl)
+ {
+ gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL);
+ return dyn_cast <cgraph_node *> (symtab_node::get (decl));
+ }
+
+ /* Return cgraph node assigned to DECL. Create new one when needed. */
+ static cgraph_node * create (tree decl);
+
+ /* Allocate new callgraph node and insert it into basic data structures. */
+ static cgraph_node * create_empty (void);
+
+ /* Try to find a call graph node for declaration DECL and if it does not
+ exist or if it corresponds to an inline clone, create a new one. */
+ static cgraph_node * get_create (tree);
+
+ /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
+ Return NULL if there's no such node. */
+ static cgraph_node *get_for_asmname (tree asmname);
+
+ /* Attempt to mark ALIAS as an alias to DECL. Return alias node if
+ successful and NULL otherwise.
+ Same body aliases are output whenever the body of DECL is output,
+ and cgraph_node::get (ALIAS) transparently
+ returns cgraph_node::get (DECL). */
+ static cgraph_node * create_same_body_alias (tree alias, tree decl);
+
+ /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
+ static bool used_from_object_file_p_worker (cgraph_node *node, void *)
+ {
+ return node->used_from_object_file_p ();
+ }
+
+ /* Return true when cgraph_node can not be local.
+ Worker for cgraph_local_node_p. */
+ static bool non_local_p (cgraph_node *node, void *);
+
+ /* Verify whole cgraph structure. */
+ static void DEBUG_FUNCTION verify_cgraph_nodes (void);
+
+ /* Worker to bring NODE local. */
+ static bool make_local (cgraph_node *node, void *);
+
+ /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
+ the function body is associated
+ with (not necessarily cgraph_node (DECL). */
+ static cgraph_node *create_alias (tree alias, tree target);
+
+ static cgraph_edge * create_edge (cgraph_node *caller, cgraph_node *callee,
+ gimple call_stmt, gcov_type count,
+ int freq,
+ bool indir_unknown_callee);
+
struct cgraph_edge *callees;
struct cgraph_edge *callers;
/* List of edges representing indirect calls with a yet undetermined
callee. */
struct cgraph_edge *indirect_calls;
/* For nested functions points to function the node is nested in. */
- struct cgraph_node *origin;
+ cgraph_node *origin;
/* Points to first nested function, if any. */
- struct cgraph_node *nested;
+ cgraph_node *nested;
/* Pointer to the next function with same origin, if any. */
- struct cgraph_node *next_nested;
+ cgraph_node *next_nested;
/* Pointer to the next clone. */
- struct cgraph_node *next_sibling_clone;
- struct cgraph_node *prev_sibling_clone;
- struct cgraph_node *clones;
- struct cgraph_node *clone_of;
+ cgraph_node *next_sibling_clone;
+ cgraph_node *prev_sibling_clone;
+ cgraph_node *clones;
+ cgraph_node *clone_of;
/* For functions with many calls sites it holds map from call expression
to the edge to speed up cgraph_edge function. */
htab_t GTY((param_is (struct cgraph_edge))) call_site_hash;
information for it. */
struct cgraph_simd_clone *simdclone;
/* If this function has SIMD clones, this points to the first clone. */
- struct cgraph_node *simd_clones;
+ cgraph_node *simd_clones;
/* Interprocedural passes scheduled to have their transform functions
applied next time we execute local pass on them. We maintain it
/* True if this decl calls a COMDAT-local function. This is set up in
compute_inline_parameters and inline_call. */
unsigned calls_comdat_local : 1;
-
- void set_fini_priority (priority_type priority);
- priority_type get_fini_priority ();
-};
-
-
-typedef struct cgraph_node *cgraph_node_ptr;
-
-
-/* Function Multiversioning info. */
-struct GTY(()) cgraph_function_version_info {
- /* The cgraph_node for which the function version info is stored. */
- struct cgraph_node *this_node;
- /* Chains all the semantically identical function versions. The
- first function in this chain is the version_info node of the
- default function. */
- struct cgraph_function_version_info *prev;
- /* If this version node corresponds to a dispatcher for function
- versions, this points to the version info node of the default
- function, the first node in the chain. */
- struct cgraph_function_version_info *next;
- /* If this node corresponds to a function version, this points
- to the dispatcher function decl, which is the function that must
- be called to execute the right function version at run-time.
-
- If this cgraph node is a dispatcher (if dispatcher_function is
- true, in the cgraph_node struct) for function versions, this
- points to resolver function, which holds the function body of the
- dispatcher. The dispatcher decl is an alias to the resolver
- function decl. */
- tree dispatcher_resolver;
};
-/* Get the cgraph_function_version_info node corresponding to node. */
-struct cgraph_function_version_info *
- get_cgraph_node_version (struct cgraph_node *node);
-
-/* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
- corresponding to cgraph_node NODE. */
-struct cgraph_function_version_info *
- insert_new_cgraph_node_version (struct cgraph_node *node);
-
-/* Record that DECL1 and DECL2 are semantically identical function
- versions. */
-void record_function_versions (tree decl1, tree decl2);
-
-/* Remove the cgraph_function_version_info and cgraph_node for DECL. This
- DECL is a duplicate declaration. */
-void delete_function_version (tree decl);
-
/* A cgraph node set is a collection of cgraph nodes. A cgraph node
can appear in multiple sets. */
struct cgraph_node_set_def
{
struct pointer_map_t *map;
- vec<cgraph_node_ptr> nodes;
+ vec<cgraph_node *> nodes;
};
-class varpool_node;
-typedef varpool_node *varpool_node_ptr;
+typedef cgraph_node_set_def *cgraph_node_set;
+typedef struct varpool_node_set_def *varpool_node_set;
+class varpool_node;
/* A varpool node set is a collection of varpool nodes. A varpool node
can appear in multiple sets. */
struct varpool_node_set_def
{
struct pointer_map_t * map;
- vec<varpool_node_ptr> nodes;
+ vec<varpool_node *> nodes;
};
-typedef struct cgraph_node_set_def *cgraph_node_set;
-
-
-typedef struct varpool_node_set_def *varpool_node_set;
-
-
/* Iterator structure for cgraph node sets. */
struct cgraph_node_set_iterator
{
unsigned index;
};
-#define DEFCIFCODE(code, type, string) CIF_ ## code,
-/* Reasons for inlining failures. */
-enum cgraph_inline_failed_t {
-#include "cif-code.def"
- CIF_N_REASONS
-};
-
-enum cgraph_inline_failed_type_t
-{
- CIF_FINAL_NORMAL = 0,
- CIF_FINAL_ERROR
-};
-
/* Structure containing additional information about an indirect call. */
struct GTY(()) cgraph_indirect_call_info
struct GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_caller"))) cgraph_edge {
/* Expected number of executions: calculated in profile.c. */
gcov_type count;
- struct cgraph_node *caller;
- struct cgraph_node *callee;
+ cgraph_node *caller;
+ cgraph_node *callee;
struct cgraph_edge *prev_caller;
struct cgraph_edge *next_caller;
struct cgraph_edge *prev_callee;
#define CGRAPH_FREQ_BASE 1000
#define CGRAPH_FREQ_MAX 100000
-typedef struct cgraph_edge *cgraph_edge_p;
-
-
/* The varpool data structure.
Each static variable decl has assigned varpool_node. */
in places where optimization would be valid for local static variable
if we did not do any inter-procedural code movement. */
unsigned used_by_single_function : 1;
+
+ /* Dump given cgraph node to F. */
+ void dump (FILE *f);
+
+ /* Remove variable from symbol table. */
+ void remove (void);
};
/* Every top level asm statement is put into a asm_node. */
inline bool
is_a_helper <cgraph_node *>::test (symtab_node *p)
{
- return p->type == SYMTAB_FUNCTION;
+ return p && p->type == SYMTAB_FUNCTION;
}
/* Report whether or not THIS symtab node is a vriable, aka varpool_node. */
inline bool
is_a_helper <varpool_node *>::test (symtab_node *p)
{
- return p->type == SYMTAB_VARIABLE;
+ return p && p->type == SYMTAB_VARIABLE;
}
extern GTY(()) symtab_node *symtab_nodes;
extern GTY(()) int symtab_order;
extern bool cpp_implicit_aliases_done;
-/* Classifcation of symbols WRT partitioning. */
-enum symbol_partitioning_class
-{
- /* External declarations are ignored by partitioning algorithms and they are
- added into the boundary later via compute_ltrans_boundary. */
- SYMBOL_EXTERNAL,
- /* Partitioned symbols are pur into one of partitions. */
- SYMBOL_PARTITION,
- /* Duplicated symbols (such as comdat or constant pool references) are
- copied into every node needing them via add_symbol_to_partition. */
- SYMBOL_DUPLICATE
-};
-
-
/* In symtab.c */
-void symtab_register_node (symtab_node *);
-void symtab_unregister_node (symtab_node *);
-void symtab_remove_from_same_comdat_group (symtab_node *);
-void symtab_remove_node (symtab_node *);
symtab_node *symtab_node_for_asm (const_tree asmname);
-void symtab_add_to_same_comdat_group (symtab_node *, symtab_node *);
-void symtab_dissolve_same_comdat_group_list (symtab_node *node);
-void dump_symtab (FILE *);
-void debug_symtab (void);
-void dump_symtab_node (FILE *, symtab_node *);
-void debug_symtab_node (symtab_node *);
-void dump_symtab_base (FILE *, symtab_node *);
-void verify_symtab (void);
-void verify_symtab_node (symtab_node *);
-bool verify_symtab_base (symtab_node *);
-bool symtab_used_from_object_file_p (symtab_node *);
-void symtab_make_decl_local (tree);
-symtab_node *symtab_alias_ultimate_target (symtab_node *,
- enum availability *avail = NULL);
-bool symtab_resolve_alias (symtab_node *node, symtab_node *target);
-void fixup_same_cpp_alias_visibility (symtab_node *node, symtab_node *target);
-bool symtab_for_node_and_aliases (symtab_node *,
- bool (*) (symtab_node *, void *),
- void *,
- bool);
-symtab_node *symtab_nonoverwritable_alias (symtab_node *);
-enum availability symtab_node_availability (symtab_node *);
-bool symtab_semantically_equivalent_p (symtab_node *, symtab_node *);
-enum symbol_partitioning_class symtab_get_symbol_partitioning_class (symtab_node *);
/* In cgraph.c */
-void dump_cgraph (FILE *);
-void debug_cgraph (void);
-void dump_cgraph_node (FILE *, struct cgraph_node *);
-void debug_cgraph_node (struct cgraph_node *);
-void cgraph_remove_edge (struct cgraph_edge *);
-void cgraph_remove_node (struct cgraph_node *);
-void cgraph_release_function_body (struct cgraph_node *);
void release_function_body (tree);
-void cgraph_node_remove_callees (struct cgraph_node *node);
-struct cgraph_edge *cgraph_create_edge (struct cgraph_node *,
- struct cgraph_node *,
- gimple, gcov_type, int);
-struct cgraph_edge *cgraph_create_indirect_edge (struct cgraph_node *, gimple,
- int, gcov_type, int);
struct cgraph_indirect_call_info *cgraph_allocate_init_indirect_info (void);
-struct cgraph_node * cgraph_create_node (tree);
-struct cgraph_node * cgraph_create_empty_node (void);
-struct cgraph_node * cgraph_get_create_node (tree);
-struct cgraph_node * cgraph_same_body_alias (struct cgraph_node *, tree, tree);
-struct cgraph_node * cgraph_add_thunk (struct cgraph_node *, tree, tree, bool, HOST_WIDE_INT,
- HOST_WIDE_INT, tree, tree);
-struct cgraph_node *cgraph_node_for_asm (tree);
-struct cgraph_edge *cgraph_edge (struct cgraph_node *, gimple);
+void cgraph_remove_edge (struct cgraph_edge *);
+
void cgraph_set_call_stmt (struct cgraph_edge *, gimple, bool update_speculative = true);
void cgraph_update_edges_for_call_stmt (gimple, tree, gimple);
struct cgraph_local_info *cgraph_local_info (tree);
struct cgraph_global_info *cgraph_global_info (tree);
struct cgraph_rtl_info *cgraph_rtl_info (tree);
-struct cgraph_node *cgraph_create_function_alias (tree, tree);
-void cgraph_call_node_duplication_hooks (struct cgraph_node *,
- struct cgraph_node *);
void cgraph_call_edge_duplication_hooks (struct cgraph_edge *,
struct cgraph_edge *);
-void cgraph_redirect_edge_callee (struct cgraph_edge *, struct cgraph_node *);
-struct cgraph_edge *cgraph_make_edge_direct (struct cgraph_edge *, struct cgraph_node *);
-bool cgraph_only_called_directly_p (struct cgraph_node *);
-
bool cgraph_function_possibly_inlined_p (tree);
-void cgraph_unnest_node (struct cgraph_node *);
+bool cgraph_edge_cannot_lead_to_return (struct cgraph_edge *);
+void cgraph_redirect_edge_callee (struct cgraph_edge *, cgraph_node *);
+struct cgraph_edge *cgraph_make_edge_direct (struct cgraph_edge *,
+ cgraph_node *);
-enum availability cgraph_function_body_availability (struct cgraph_node *);
-void cgraph_add_new_function (tree, bool);
const char* cgraph_inline_failed_string (cgraph_inline_failed_t);
cgraph_inline_failed_type_t cgraph_inline_failed_type (cgraph_inline_failed_t);
-void cgraph_set_nothrow_flag (struct cgraph_node *, bool);
-void cgraph_set_const_flag (struct cgraph_node *, bool, bool);
-void cgraph_set_pure_flag (struct cgraph_node *, bool, bool);
-bool cgraph_node_cannot_return (struct cgraph_node *);
-bool cgraph_edge_cannot_lead_to_return (struct cgraph_edge *);
-bool cgraph_will_be_removed_from_program_if_no_direct_calls
- (struct cgraph_node *node);
-bool cgraph_can_remove_if_no_direct_calls_and_refs_p
- (struct cgraph_node *node);
-bool cgraph_can_remove_if_no_direct_calls_p (struct cgraph_node *node);
bool resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution);
-bool cgraph_for_node_thunks_and_aliases (struct cgraph_node *,
- bool (*) (struct cgraph_node *, void *),
- void *,
- bool);
-bool cgraph_for_node_and_aliases (struct cgraph_node *,
- bool (*) (struct cgraph_node *, void *),
- void *, bool);
-vec<cgraph_edge_p> collect_callers_of_node (struct cgraph_node *node);
-void verify_cgraph (void);
-void verify_cgraph_node (struct cgraph_node *);
-void cgraph_mark_address_taken_node (struct cgraph_node *);
-
typedef void (*cgraph_edge_hook)(struct cgraph_edge *, void *);
-typedef void (*cgraph_node_hook)(struct cgraph_node *, void *);
+typedef void (*cgraph_node_hook)(cgraph_node *, void *);
typedef void (*varpool_node_hook)(varpool_node *, void *);
typedef void (*cgraph_2edge_hook)(struct cgraph_edge *, struct cgraph_edge *,
void *);
-typedef void (*cgraph_2node_hook)(struct cgraph_node *, struct cgraph_node *,
+typedef void (*cgraph_2node_hook)(cgraph_node *, cgraph_node *,
void *);
struct cgraph_edge_hook_list;
struct cgraph_node_hook_list;
struct cgraph_2node_hook_list;
struct cgraph_edge_hook_list *cgraph_add_edge_removal_hook (cgraph_edge_hook, void *);
void cgraph_remove_edge_removal_hook (struct cgraph_edge_hook_list *);
-struct cgraph_node_hook_list *cgraph_add_node_removal_hook (cgraph_node_hook,
+cgraph_node_hook_list *cgraph_add_node_removal_hook (cgraph_node_hook,
void *);
-void cgraph_remove_node_removal_hook (struct cgraph_node_hook_list *);
+void cgraph_remove_node_removal_hook (cgraph_node_hook_list *);
struct varpool_node_hook_list *varpool_add_node_removal_hook (varpool_node_hook,
void *);
void varpool_remove_node_removal_hook (struct varpool_node_hook_list *);
-struct cgraph_node_hook_list *cgraph_add_function_insertion_hook (cgraph_node_hook,
+cgraph_node_hook_list *cgraph_add_function_insertion_hook (cgraph_node_hook,
void *);
-void cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list *);
+void cgraph_remove_function_insertion_hook (cgraph_node_hook_list *);
struct varpool_node_hook_list *varpool_add_variable_insertion_hook (varpool_node_hook,
void *);
void varpool_remove_variable_insertion_hook (struct varpool_node_hook_list *);
-void cgraph_call_function_insertion_hooks (struct cgraph_node *node);
struct cgraph_2edge_hook_list *cgraph_add_edge_duplication_hook (cgraph_2edge_hook, void *);
void cgraph_remove_edge_duplication_hook (struct cgraph_2edge_hook_list *);
struct cgraph_2node_hook_list *cgraph_add_node_duplication_hook (cgraph_2node_hook, void *);
void cgraph_remove_node_duplication_hook (struct cgraph_2node_hook_list *);
gimple cgraph_redirect_edge_call_stmt_to_callee (struct cgraph_edge *);
-struct cgraph_node * cgraph_function_node (struct cgraph_node *,
- enum availability *avail = NULL);
-bool cgraph_get_body (struct cgraph_node *node);
struct cgraph_edge *
cgraph_turn_edge_to_speculative (struct cgraph_edge *,
- struct cgraph_node *,
+ cgraph_node *,
gcov_type, int);
void cgraph_speculative_call_info (struct cgraph_edge *,
struct cgraph_edge *&,
void init_cgraph (void);
void cgraph_process_new_functions (void);
void cgraph_process_same_body_aliases (void);
-void fixup_same_cpp_alias_visibility (symtab_node *, symtab_node *target, tree);
/* Initialize datastructures so DECL is a function in lowered gimple form.
IN_SSA is true if the gimple is in SSA. */
basic_block init_lowered_empty_function (tree, bool);
-void cgraph_reset_node (struct cgraph_node *);
-bool expand_thunk (struct cgraph_node *, bool, bool);
-void cgraph_make_wrapper (struct cgraph_node *source,
- struct cgraph_node *target);
/* In cgraphclones.c */
struct cgraph_edge * cgraph_clone_edge (struct cgraph_edge *,
- struct cgraph_node *, gimple,
+ cgraph_node *, gimple,
unsigned, gcov_type, int, bool);
-struct cgraph_node * cgraph_clone_node (struct cgraph_node *, tree, gcov_type,
- int, bool, vec<cgraph_edge_p>,
- bool, struct cgraph_node *, bitmap);
tree clone_function_name (tree decl, const char *);
-struct cgraph_node * cgraph_create_virtual_clone (struct cgraph_node *old_node,
- vec<cgraph_edge_p>,
- vec<ipa_replace_map_p, va_gc> *tree_map,
- bitmap args_to_skip,
- const char *clone_name);
-struct cgraph_node *cgraph_find_replacement_node (struct cgraph_node *);
-bool cgraph_remove_node_and_inline_clones (struct cgraph_node *, struct cgraph_node *);
-void cgraph_set_call_stmt_including_clones (struct cgraph_node *, gimple, gimple,
- bool update_speculative = true);
-void cgraph_create_edge_including_clones (struct cgraph_node *,
- struct cgraph_node *,
- gimple, gimple, gcov_type, int,
- cgraph_inline_failed_t);
+
void cgraph_materialize_all_clones (void);
-struct cgraph_node * cgraph_copy_node_for_versioning (struct cgraph_node *,
- tree, vec<cgraph_edge_p>, bitmap);
-struct cgraph_node *cgraph_function_versioning (struct cgraph_node *,
- vec<cgraph_edge_p>,
- vec<ipa_replace_map_p, va_gc> *,
- bitmap, bool, bitmap,
- basic_block, const char *);
-void tree_function_versioning (tree, tree, vec<ipa_replace_map_p, va_gc> *,
+void tree_function_versioning (tree, tree, vec<ipa_replace_map *, va_gc> *,
bool, bitmap, bool, bitmap, basic_block);
struct cgraph_edge *cgraph_resolve_speculation (struct cgraph_edge *, tree);
void cgraph_rebuild_references (void);
int compute_call_stmt_bb_frequency (tree, basic_block bb);
void record_references_in_initializer (tree, bool);
-void ipa_record_stmt_references (struct cgraph_node *, gimple);
/* In ipa.c */
bool symtab_remove_unreachable_nodes (bool, FILE *);
cgraph_node_set cgraph_node_set_new (void);
cgraph_node_set_iterator cgraph_node_set_find (cgraph_node_set,
- struct cgraph_node *);
-void cgraph_node_set_add (cgraph_node_set, struct cgraph_node *);
-void cgraph_node_set_remove (cgraph_node_set, struct cgraph_node *);
+ cgraph_node *);
+void cgraph_node_set_add (cgraph_node_set, cgraph_node *);
+void cgraph_node_set_remove (cgraph_node_set, cgraph_node *);
void dump_cgraph_node_set (FILE *, cgraph_node_set);
void debug_cgraph_node_set (cgraph_node_set);
void free_cgraph_node_set (cgraph_node_set);
void ipa_discover_readonly_nonaddressable_vars (void);
bool varpool_externally_visible_p (varpool_node *);
-/* In ipa-visibility.c */
-bool cgraph_local_node_p (struct cgraph_node *);
-bool address_taken_from_non_vtable_p (symtab_node *node);
-
-
/* In predict.c */
bool cgraph_maybe_hot_edge_p (struct cgraph_edge *e);
-bool cgraph_optimize_for_size_p (struct cgraph_node *);
/* In varpool.c */
varpool_node *varpool_create_empty_node (void);
void varpool_mark_needed_node (varpool_node *);
void debug_varpool (void);
void dump_varpool (FILE *);
-void dump_varpool_node (FILE *, varpool_node *);
void varpool_finalize_decl (tree);
enum availability cgraph_variable_initializer_availability (varpool_node *);
-void cgraph_make_node_local (struct cgraph_node *);
-bool cgraph_node_can_be_local_p (struct cgraph_node *);
-
-void varpool_remove_node (varpool_node *node);
void varpool_finalize_named_section_flags (varpool_node *node);
bool varpool_output_variables (void);
bool varpool_assemble_decl (varpool_node *node);
/* In cgraph.c */
extern void change_decl_assembler_name (tree, tree);
+/* Return true when the symbol is real symbol, i.e. it is not inline clone
+ or abstract function kept for debug info purposes only. */
+inline bool
+symtab_node::real_symbol_p (void)
+{
+ cgraph_node *cnode;
+
+ if (DECL_ABSTRACT (decl))
+ return false;
+ if (!is_a <cgraph_node *> (this))
+ return true;
+ cnode = dyn_cast <cgraph_node *> (this);
+ if (cnode->global.inlined_to)
+ return false;
+ return true;
+}
+
/* Return true if DECL should have entry in symbol table if used.
Those are functions and static & external veriables*/
&& (TREE_STATIC (decl) || DECL_EXTERNAL (decl))));
}
-/* Return symbol table node associated with DECL, if any,
- and NULL otherwise. */
-
-static inline symtab_node *
-symtab_get_node (const_tree decl)
+inline bool
+symtab_node::in_same_comdat_group_p (symtab_node *target)
{
-#ifdef ENABLE_CHECKING
- gcc_checking_assert (decl_in_symtab_p (decl));
- /* Check that the mapping is sane - perhaps this check can go away,
- but at the moment frontends tends to corrupt the mapping by calling
- memcpy/memset on the tree nodes. */
- gcc_checking_assert (!decl->decl_with_vis.symtab_node
- || decl->decl_with_vis.symtab_node->decl == decl);
-#endif
- return decl->decl_with_vis.symtab_node;
-}
+ symtab_node *source = this;
-/* Return callgraph node for given symbol and check it is a function. */
-static inline struct cgraph_node *
-cgraph (symtab_node *node)
-{
- gcc_checking_assert (!node || node->type == SYMTAB_FUNCTION);
- return (struct cgraph_node *)node;
+ if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
+ {
+ if (cn->global.inlined_to)
+ source = cn->global.inlined_to;
+ }
+ if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
+ {
+ if (cn->global.inlined_to)
+ target = cn->global.inlined_to;
+ }
+
+ return source->get_comdat_group () == target->get_comdat_group ();
}
-/* Return varpool node for given symbol and check it is a variable. */
-static inline varpool_node *
-varpool (symtab_node *node)
+/* Return node that alias is aliasing. */
+
+inline symtab_node *
+symtab_node::get_alias_target (void)
{
- gcc_checking_assert (!node || node->type == SYMTAB_VARIABLE);
- return (varpool_node *)node;
+ struct ipa_ref *ref = NULL;
+ iterate_reference (0, ref);
+ gcc_checking_assert (ref->use == IPA_REF_ALIAS);
+ return ref->referred;
}
-/* Return callgraph node for given symbol and check it is a function. */
-static inline struct cgraph_node *
-cgraph_get_node (const_tree decl)
+/* Return next reachable static symbol with initializer after the node. */
+inline symtab_node *
+symtab_node::next_defined_symbol (void)
{
- gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL);
- return cgraph (symtab_get_node (decl));
+ symtab_node *node1 = next;
+
+ for (; node1; node1 = node1->next)
+ if (node1->definition)
+ return node1;
+
+ return NULL;
}
/* Return varpool node for given symbol and check it is a function. */
varpool_get_node (const_tree decl)
{
gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
- return varpool (symtab_get_node (decl));
+ return dyn_cast<varpool_node *> (symtab_node::get (decl));
}
/* Walk all symbols. */
return NULL;
}
-/* Return next reachable static symbol with initializer after NODE. */
-static inline symtab_node *
-symtab_next_defined_symbol (symtab_node *node)
-{
- symtab_node *node1 = node->next;
-
- for (; node1; node1 = node1->next)
- if (node1->definition)
- return node1;
-
- return NULL;
-}
/* Walk all symbols with definitions in current unit. */
#define FOR_EACH_DEFINED_SYMBOL(node) \
for ((node) = symtab_first_defined_symbol (); (node); \
- (node) = symtab_next_defined_symbol (node))
+ (node) = node->next_defined_symbol ())
/* Return first variable. */
static inline varpool_node *
(node) = varpool_next_defined_variable (node))
/* Return first function with body defined. */
-static inline struct cgraph_node *
+static inline cgraph_node *
cgraph_first_defined_function (void)
{
symtab_node *node;
}
/* Return next function with body defined after NODE. */
-static inline struct cgraph_node *
-cgraph_next_defined_function (struct cgraph_node *node)
+static inline cgraph_node *
+cgraph_next_defined_function (cgraph_node *node)
{
symtab_node *node1 = node->next;
for (; node1; node1 = node1->next)
(node) = cgraph_next_defined_function ((node)))
/* Return first function. */
-static inline struct cgraph_node *
+static inline cgraph_node *
cgraph_first_function (void)
{
symtab_node *node;
}
/* Return next function. */
-static inline struct cgraph_node *
-cgraph_next_function (struct cgraph_node *node)
+static inline cgraph_node *
+cgraph_next_function (cgraph_node *node)
{
symtab_node *node1 = node->next;
for (; node1; node1 = node1->next)
for ((node) = cgraph_first_function (); (node); \
(node) = cgraph_next_function ((node)))
-/* Return true when NODE is a function with Gimple body defined
+/* Return true when callgraph node is a function with Gimple body defined
in current unit. Functions can also be define externally or they
can be thunks with no Gimple representation.
Note that at WPA stage, the function body may not be present in memory. */
-static inline bool
-cgraph_function_with_gimple_body_p (struct cgraph_node *node)
+inline bool
+cgraph_node::has_gimple_body_p (void)
{
- return node->definition && !node->thunk.thunk_p && !node->alias;
+ return definition && !thunk.thunk_p && !alias;
}
/* Return first function with body defined. */
-static inline struct cgraph_node *
+static inline cgraph_node *
cgraph_first_function_with_gimple_body (void)
{
symtab_node *node;
for (node = symtab_nodes; node; node = node->next)
{
cgraph_node *cn = dyn_cast <cgraph_node *> (node);
- if (cn && cgraph_function_with_gimple_body_p (cn))
+ if (cn && cn->has_gimple_body_p ())
return cn;
}
return NULL;
}
/* Return next reachable static variable with initializer after NODE. */
-static inline struct cgraph_node *
-cgraph_next_function_with_gimple_body (struct cgraph_node *node)
+static inline cgraph_node *
+cgraph_next_function_with_gimple_body (cgraph_node *node)
{
symtab_node *node1 = node->next;
for (; node1; node1 = node1->next)
{
cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1);
- if (cn1 && cgraph_function_with_gimple_body_p (cn1))
+ if (cn1 && cn1->has_gimple_body_p ())
return cn1;
}
return NULL;
}
/* Return the node pointed to by CSI. */
-static inline struct cgraph_node *
+static inline cgraph_node *
csi_node (cgraph_node_set_iterator csi)
{
return csi.set->nodes[csi.index];
/* Return true if SET contains NODE. */
static inline bool
-cgraph_node_in_set_p (struct cgraph_node *node, cgraph_node_set set)
+cgraph_node_in_set_p (cgraph_node *node, cgraph_node_set set)
{
cgraph_node_set_iterator csi;
csi = cgraph_node_set_find (set, node);
return vsi;
}
-/* Return true if SET contains NODE. */
-static inline bool
-varpool_node_in_set_p (varpool_node *node, varpool_node_set set)
-{
- varpool_node_set_iterator vsi;
- vsi = varpool_node_set_find (set, node);
- return !vsi_end_p (vsi);
-}
-
/* Return number of nodes in SET. */
static inline size_t
varpool_node_set_size (varpool_node_set set)
return !set->nodes.is_empty ();
}
-/* Return true when function NODE is only called directly or it has alias.
+/* Return true when function is only called directly or it has alias.
i.e. it is not externally visible, address was not taken and
it is not used in any other non-standard way. */
-static inline bool
-cgraph_only_called_directly_or_aliased_p (struct cgraph_node *node)
-{
- gcc_assert (!node->global.inlined_to);
- return (!node->force_output && !node->address_taken
- && !node->used_from_other_partition
- && !DECL_VIRTUAL_P (node->decl)
- && !DECL_STATIC_CONSTRUCTOR (node->decl)
- && !DECL_STATIC_DESTRUCTOR (node->decl)
- && !node->externally_visible);
+inline bool
+cgraph_node::only_called_directly_or_aliased_p (void)
+{
+ gcc_assert (!global.inlined_to);
+ return (!force_output && !address_taken
+ && !used_from_other_partition
+ && !DECL_VIRTUAL_P (decl)
+ && !DECL_STATIC_CONSTRUCTOR (decl)
+ && !DECL_STATIC_DESTRUCTOR (decl)
+ && !externally_visible);
}
/* Return true when function NODE can be removed from callgraph
return (!node->force_output && !node->used_from_other_partition
&& ((DECL_COMDAT (node->decl)
&& !node->forced_by_abi
- && !symtab_used_from_object_file_p (node))
+ && !node->used_from_object_file_p ())
|| !node->externally_visible
|| DECL_HAS_VALUE_EXPR_P (node->decl)));
}
/* Constant pool accessor function. */
htab_t constant_pool_htab (void);
-/* Return node that alias N is aliasing. */
-
-static inline symtab_node *
-symtab_alias_target (symtab_node *n)
-{
- struct ipa_ref *ref = NULL;
- n->iterate_reference (0, ref);
- gcc_checking_assert (ref->use == IPA_REF_ALIAS);
- return ref->referred;
-}
+/* Return node that alias is aliasing. */
-static inline struct cgraph_node *
-cgraph_alias_target (struct cgraph_node *n)
+inline cgraph_node *
+cgraph_node::get_alias_target (void)
{
- return dyn_cast <cgraph_node *> (symtab_alias_target (n));
+ return dyn_cast <cgraph_node *> (symtab_node::get_alias_target ());
}
static inline varpool_node *
varpool_alias_target (varpool_node *n)
{
- return dyn_cast <varpool_node *> (symtab_alias_target (n));
+ return dyn_cast <varpool_node *> (n->get_alias_target ());
}
-/* Given NODE, walk the alias chain to return the function NODE is alias of.
- Do not walk through thunks.
+/* Given function symbol, walk the alias chain to return the function node
+ is alias of. Do not walk through thunks.
When AVAILABILITY is non-NULL, get minimal availability in the chain. */
-static inline struct cgraph_node *
-cgraph_function_or_thunk_node (struct cgraph_node *node,
- enum availability *availability = NULL)
+inline cgraph_node *
+cgraph_node::ultimate_alias_target (enum availability *availability)
{
- struct cgraph_node *n;
-
- n = dyn_cast <cgraph_node *> (symtab_alias_ultimate_target (node,
- availability));
+ cgraph_node *n = dyn_cast <cgraph_node *> (symtab_node::ultimate_alias_target
+ (availability));
if (!n && availability)
*availability = AVAIL_NOT_AVAILABLE;
return n;
varpool_node *n;
if (node)
- n = dyn_cast <varpool_node *> (symtab_alias_ultimate_target (node,
- availability));
+ n = dyn_cast <varpool_node *> (node->ultimate_alias_target (availability));
else
n = NULL;
static inline bool
cgraph_edge_recursive_p (struct cgraph_edge *e)
{
- struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
+ cgraph_node *callee = e->callee->ultimate_alias_target ();
if (e->caller->global.inlined_to)
return e->caller->global.inlined_to->decl == callee->decl;
else
static inline bool
decl_is_tm_clone (const_tree fndecl)
{
- struct cgraph_node *n = cgraph_get_node (fndecl);
+ cgraph_node *n = cgraph_node::get (fndecl);
if (n)
return n->tm_clone;
return false;
/* Likewise indicate that a node is needed, i.e. reachable via some
external means. */
-static inline void
-cgraph_mark_force_output_node (struct cgraph_node *node)
-{
- node->force_output = 1;
- gcc_checking_assert (!node->global.inlined_to);
-}
-
-/* Return true when the symbol is real symbol, i.e. it is not inline clone
- or abstract function kept for debug info purposes only. */
-
-static inline bool
-symtab_real_symbol_p (symtab_node *node)
-{
- struct cgraph_node *cnode;
-
- if (DECL_ABSTRACT (node->decl))
- return false;
- if (!is_a <cgraph_node *> (node))
- return true;
- cnode = cgraph (node);
- if (cnode->global.inlined_to)
- return false;
- return true;
-}
-
-/* Return true if NODE can be discarded by linker from the binary. */
-
-static inline bool
-symtab_can_be_discarded (symtab_node *node)
-{
- return (DECL_EXTERNAL (node->decl)
- || (node->get_comdat_group ()
- && node->resolution != LDPR_PREVAILING_DEF
- && node->resolution != LDPR_PREVAILING_DEF_IRONLY
- && node->resolution != LDPR_PREVAILING_DEF_IRONLY_EXP));
-}
-
-/* Return true if NODE is local to a particular COMDAT group, and must not
- be named from outside the COMDAT. This is used for C++ decloned
- constructors. */
-
-static inline bool
-symtab_comdat_local_p (symtab_node *node)
+inline void
+cgraph_node::mark_force_output (void)
{
- return (node->same_comdat_group && !TREE_PUBLIC (node->decl));
+ force_output = 1;
+ gcc_checking_assert (!global.inlined_to);
}
-/* Return true if ONE and TWO are part of the same COMDAT group. */
-
-static inline bool
-symtab_in_same_comdat_p (symtab_node *one, symtab_node *two)
-{
- if (cgraph_node *cn = dyn_cast <cgraph_node *> (one))
- {
- if (cn->global.inlined_to)
- one = cn->global.inlined_to;
- }
- if (cgraph_node *cn = dyn_cast <cgraph_node *> (two))
- {
- if (cn->global.inlined_to)
- two = cn->global.inlined_to;
- }
-
- return one->get_comdat_group () == two->get_comdat_group ();
-}
#endif /* GCC_CGRAPH_H */
decl = get_base_var (*tp);
if (TREE_CODE (decl) == FUNCTION_DECL)
{
- struct cgraph_node *node = cgraph_get_create_node (decl);
+ struct cgraph_node *node = cgraph_node::get_create (decl);
if (!ctx->only_vars)
- cgraph_mark_address_taken_node (node);
+ node->mark_address_taken ();
ctx->varpool_node->add_reference (node, IPA_REF_ADDR);
}
if (DECL_FUNCTION_PERSONALITY (node->decl))
{
tree per_decl = DECL_FUNCTION_PERSONALITY (node->decl);
- struct cgraph_node *per_node = cgraph_get_create_node (per_decl);
+ struct cgraph_node *per_node = cgraph_node::get_create (per_decl);
node->add_reference (per_node, IPA_REF_ADDR);
- cgraph_mark_address_taken_node (per_node);
+ per_node->mark_address_taken ();
}
i = fun->eh->region_tree;
addr = get_base_address (addr);
if (TREE_CODE (addr) == FUNCTION_DECL)
{
- struct cgraph_node *node = cgraph_get_create_node (addr);
- cgraph_mark_address_taken_node (node);
+ struct cgraph_node *node = cgraph_node::get_create (addr);
+ node->mark_address_taken ();
((symtab_node *)data)->add_reference (node, IPA_REF_ADDR, stmt);
}
else if (addr && TREE_CODE (addr) == VAR_DECL
{
/* ??? This can happen on platforms with descriptors when these are
directly manipulated in the code. Pretend that it's an address. */
- struct cgraph_node *node = cgraph_get_create_node (t);
- cgraph_mark_address_taken_node (node);
+ struct cgraph_node *node = cgraph_node::get_create (t);
+ node->mark_address_taken ();
((symtab_node *)data)->add_reference (node, IPA_REF_ADDR, stmt);
}
else if (t && TREE_CODE (t) == VAR_DECL
return false;
}
-/* Record all references from NODE that are taken in statement STMT. */
+/* Record all references from cgraph_node that are taken in statement STMT. */
+
void
-ipa_record_stmt_references (struct cgraph_node *node, gimple stmt)
+cgraph_node::record_stmt_references (gimple stmt)
{
- walk_stmt_load_store_addr_ops (stmt, node, mark_load, mark_store,
+ walk_stmt_load_store_addr_ops (stmt, this, mark_load, mark_store,
mark_address);
}
pass_build_cgraph_edges::execute (function *fun)
{
basic_block bb;
- struct cgraph_node *node = cgraph_get_node (current_function_decl);
+ struct cgraph_node *node = cgraph_node::get (current_function_decl);
struct pointer_set_t *visited_nodes = pointer_set_create ();
gimple_stmt_iterator gsi;
tree decl;
bb);
decl = gimple_call_fndecl (stmt);
if (decl)
- cgraph_create_edge (node, cgraph_get_create_node (decl),
- stmt, bb->count, freq);
+ node->create_edge (cgraph_node::get_create (decl),
+ stmt, bb->count, freq);
else if (gimple_call_internal_p (stmt))
;
else
- cgraph_create_indirect_edge (node, stmt,
- gimple_call_flags (stmt),
- bb->count, freq);
+ node->create_indirect_edge (stmt,
+ gimple_call_flags (stmt),
+ bb->count, freq);
}
- ipa_record_stmt_references (node, stmt);
+ node->record_stmt_references (stmt);
if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
&& gimple_omp_parallel_child_fn (stmt))
{
tree fn = gimple_omp_parallel_child_fn (stmt);
- node->add_reference (cgraph_get_create_node (fn),
+ node->add_reference (cgraph_node::get_create (fn),
IPA_REF_ADDR, stmt);
}
if (gimple_code (stmt) == GIMPLE_OMP_TASK)
{
tree fn = gimple_omp_task_child_fn (stmt);
if (fn)
- node->add_reference (cgraph_get_create_node (fn),
+ node->add_reference (cgraph_node::get_create (fn),
IPA_REF_ADDR, stmt);
fn = gimple_omp_task_copy_fn (stmt);
if (fn)
- node->add_reference (cgraph_get_create_node (fn),
+ node->add_reference (cgraph_node::get_create (fn),
IPA_REF_ADDR, stmt);
}
}
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
- ipa_record_stmt_references (node, gsi_stmt (gsi));
+ node->record_stmt_references (gsi_stmt (gsi));
}
/* Look for initializers of constant variables and private statics. */
rebuild_cgraph_edges (void)
{
basic_block bb;
- struct cgraph_node *node = cgraph_get_node (current_function_decl);
+ struct cgraph_node *node = cgraph_node::get (current_function_decl);
gimple_stmt_iterator gsi;
- cgraph_node_remove_callees (node);
+ node->remove_callees ();
node->remove_all_references ();
node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
bb);
decl = gimple_call_fndecl (stmt);
if (decl)
- cgraph_create_edge (node, cgraph_get_create_node (decl), stmt,
- bb->count, freq);
+ node->create_edge (cgraph_node::get_create (decl), stmt,
+ bb->count, freq);
else if (gimple_call_internal_p (stmt))
;
else
- cgraph_create_indirect_edge (node, stmt,
- gimple_call_flags (stmt),
- bb->count, freq);
+ node->create_indirect_edge (stmt,
+ gimple_call_flags (stmt),
+ bb->count, freq);
}
- ipa_record_stmt_references (node, stmt);
+ node->record_stmt_references (stmt);
}
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
- ipa_record_stmt_references (node, gsi_stmt (gsi));
+ node->record_stmt_references (gsi_stmt (gsi));
}
record_eh_tables (node, cfun);
gcc_assert (!node->global.inlined_to);
cgraph_rebuild_references (void)
{
basic_block bb;
- struct cgraph_node *node = cgraph_get_node (current_function_decl);
+ struct cgraph_node *node = cgraph_node::get (current_function_decl);
gimple_stmt_iterator gsi;
struct ipa_ref *ref = NULL;
int i;
FOR_EACH_BB_FN (bb, cfun)
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
- ipa_record_stmt_references (node, gsi_stmt (gsi));
+ node->record_stmt_references (gsi_stmt (gsi));
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
- ipa_record_stmt_references (node, gsi_stmt (gsi));
+ node->record_stmt_references (gsi_stmt (gsi));
}
record_eh_tables (node, cfun);
}
unsigned int
pass_remove_cgraph_callee_edges::execute (function *)
{
- struct cgraph_node *node = cgraph_get_node (current_function_decl);
- cgraph_node_remove_callees (node);
+ struct cgraph_node *node = cgraph_node::get (current_function_decl);
+ node->remove_callees ();
node->remove_all_references ();
return 0;
}
via cgraph_resolve_speculation and not here. */
&& !e->speculative)
{
- struct cgraph_node *callee = cgraph_get_node (decl);
+ struct cgraph_node *callee = cgraph_node::get (decl);
gcc_checking_assert (callee);
- new_edge = cgraph_create_edge (n, callee, call_stmt, count, freq);
+ new_edge = n->create_edge (callee, call_stmt, count, freq);
}
else
{
- new_edge = cgraph_create_indirect_edge (n, call_stmt,
- e->indirect_info->ecf_flags,
- count, freq);
+ new_edge = n->create_indirect_edge (call_stmt,
+ e->indirect_info->ecf_flags,
+ count, freq);
*new_edge->indirect_info = *e->indirect_info;
}
}
else
{
- new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq);
+ new_edge = n->create_edge (e->callee, call_stmt, count, freq);
if (e->indirect_info)
{
new_edge->indirect_info
duplicate_thunk_for_node (cgraph_node *thunk, cgraph_node *node)
{
cgraph_node *new_thunk, *thunk_of;
- thunk_of = cgraph_function_or_thunk_node (thunk->callees->callee);
+ thunk_of = thunk->callees->callee->ultimate_alias_target ();
if (thunk_of->thunk.thunk_p)
node = duplicate_thunk_for_node (thunk_of, node);
DECL_NAME (new_decl) = clone_function_name (thunk->decl, "artificial_thunk");
SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
- new_thunk = cgraph_create_node (new_decl);
+ new_thunk = cgraph_node::create (new_decl);
set_new_clone_decl_and_node_flags (new_thunk);
new_thunk->definition = true;
new_thunk->thunk = thunk->thunk;
new_thunk->clone.args_to_skip = node->clone.args_to_skip;
new_thunk->clone.combined_args_to_skip = node->clone.combined_args_to_skip;
- struct cgraph_edge *e = cgraph_create_edge (new_thunk, node, NULL, 0,
- CGRAPH_FREQ_BASE);
+ struct cgraph_edge *e = new_thunk->create_edge (node, NULL, 0,
+ CGRAPH_FREQ_BASE);
e->call_stmt_cannot_inline_p = true;
cgraph_call_edge_duplication_hooks (thunk->callees, e);
- if (!expand_thunk (new_thunk, false, false))
+ if (!new_thunk->expand_thunk (false, false))
new_thunk->analyzed = true;
- cgraph_call_node_duplication_hooks (thunk, new_thunk);
+ thunk->call_duplication_hooks (new_thunk);
return new_thunk;
}
void
redirect_edge_duplicating_thunks (struct cgraph_edge *e, struct cgraph_node *n)
{
- cgraph_node *orig_to = cgraph_function_or_thunk_node (e->callee);
+ cgraph_node *orig_to = e->callee->ultimate_alias_target ();
if (orig_to->thunk.thunk_p)
n = duplicate_thunk_for_node (orig_to, n);
will see this in node's global.inlined_to, when invoked. Can be NULL if the
node is not inlined. */
-struct cgraph_node *
-cgraph_clone_node (struct cgraph_node *n, tree decl, gcov_type count, int freq,
- bool update_original,
- vec<cgraph_edge_p> redirect_callers,
- bool call_duplication_hook,
- struct cgraph_node *new_inlined_to,
- bitmap args_to_skip)
+cgraph_node *
+cgraph_node::create_clone (tree decl, gcov_type gcov_count, int freq,
+ bool update_original,
+ vec<cgraph_edge *> redirect_callers,
+ bool call_duplication_hook,
+ struct cgraph_node *new_inlined_to,
+ bitmap args_to_skip)
{
- struct cgraph_node *new_node = cgraph_create_empty_node ();
+ struct cgraph_node *new_node = cgraph_node::create_empty ();
struct cgraph_edge *e;
gcov_type count_scale;
unsigned i;
new_node->decl = decl;
- symtab_register_node (new_node);
- new_node->origin = n->origin;
- new_node->lto_file_data = n->lto_file_data;
+ new_node->register_symbol ();
+ new_node->origin = origin;
+ new_node->lto_file_data = lto_file_data;
if (new_node->origin)
{
new_node->next_nested = new_node->origin->nested;
new_node->origin->nested = new_node;
}
- new_node->analyzed = n->analyzed;
- new_node->definition = n->definition;
- new_node->local = n->local;
+ new_node->analyzed = analyzed;
+ new_node->definition = definition;
+ new_node->local = local;
new_node->externally_visible = false;
new_node->local.local = true;
- new_node->global = n->global;
+ new_node->global = global;
new_node->global.inlined_to = new_inlined_to;
- new_node->rtl = n->rtl;
+ new_node->rtl = rtl;
new_node->count = count;
- new_node->frequency = n->frequency;
- new_node->tp_first_run = n->tp_first_run;
+ new_node->frequency = frequency;
+ new_node->tp_first_run = tp_first_run;
new_node->clone.tree_map = NULL;
new_node->clone.args_to_skip = args_to_skip;
if (!args_to_skip)
- new_node->clone.combined_args_to_skip = n->clone.combined_args_to_skip;
- else if (n->clone.combined_args_to_skip)
+ new_node->clone.combined_args_to_skip = clone.combined_args_to_skip;
+ else if (clone.combined_args_to_skip)
{
new_node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
bitmap_ior (new_node->clone.combined_args_to_skip,
- n->clone.combined_args_to_skip, args_to_skip);
+ clone.combined_args_to_skip, args_to_skip);
}
else
new_node->clone.combined_args_to_skip = args_to_skip;
- if (n->count)
+ if (count)
{
- if (new_node->count > n->count)
+ if (new_node->count > count)
count_scale = REG_BR_PROB_BASE;
else
- count_scale = GCOV_COMPUTE_SCALE (new_node->count, n->count);
+ count_scale = GCOV_COMPUTE_SCALE (new_node->count, count);
}
else
count_scale = 0;
if (update_original)
{
- n->count -= count;
- if (n->count < 0)
- n->count = 0;
+ count -= gcov_count;
+ if (count < 0)
+ count = 0;
}
FOR_EACH_VEC_ELT (redirect_callers, i, e)
redirect_edge_duplicating_thunks (e, new_node);
}
- for (e = n->callees;e; e=e->next_callee)
+ for (e = callees;e; e=e->next_callee)
cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
count_scale, freq, update_original);
- for (e = n->indirect_calls; e; e = e->next_callee)
+ for (e = indirect_calls; e; e = e->next_callee)
cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
count_scale, freq, update_original);
- new_node->clone_references (n);
+ new_node->clone_references (this);
- new_node->next_sibling_clone = n->clones;
- if (n->clones)
- n->clones->prev_sibling_clone = new_node;
- n->clones = new_node;
- new_node->clone_of = n;
+ new_node->next_sibling_clone = clones;
+ if (clones)
+ clones->prev_sibling_clone = new_node;
+ clones = new_node;
+ new_node->clone_of = this;
if (call_duplication_hook)
- cgraph_call_node_duplication_hooks (n, new_node);
+ call_duplication_hooks (new_node);
return new_node;
}
bitmap interface.
*/
struct cgraph_node *
-cgraph_create_virtual_clone (struct cgraph_node *old_node,
- vec<cgraph_edge_p> redirect_callers,
- vec<ipa_replace_map_p, va_gc> *tree_map,
- bitmap args_to_skip,
- const char * suffix)
+cgraph_node::create_virtual_clone (vec<cgraph_edge *> redirect_callers,
+ vec<ipa_replace_map *, va_gc> *tree_map,
+ bitmap args_to_skip, const char * suffix)
{
- tree old_decl = old_node->decl;
+ tree old_decl = decl;
struct cgraph_node *new_node = NULL;
tree new_decl;
size_t len, i;
if (!in_lto_p)
gcc_checking_assert (tree_versionable_function_p (old_decl));
- gcc_assert (old_node->local.can_change_signature || !args_to_skip);
+ gcc_assert (local.can_change_signature || !args_to_skip);
/* Make a new FUNCTION_DECL tree node */
if (!args_to_skip)
SET_DECL_ASSEMBLER_NAME (new_decl, clone_function_name (old_decl, suffix));
SET_DECL_RTL (new_decl, NULL);
- new_node = cgraph_clone_node (old_node, new_decl, old_node->count,
- CGRAPH_FREQ_BASE, false,
- redirect_callers, false, NULL, args_to_skip);
+ new_node = create_clone (new_decl, count, CGRAPH_FREQ_BASE, false,
+ redirect_callers, false, NULL, args_to_skip);
+
/* Update the properties.
Make clone visible only within this translation unit. Make sure
that is not weak also.
FOR_EACH_VEC_SAFE_ELT (tree_map, i, map)
new_node->maybe_add_reference (map->new_tree, IPA_REF_ADDR, NULL);
- if (old_node->ipa_transforms_to_apply.exists ())
+ if (ipa_transforms_to_apply.exists ())
new_node->ipa_transforms_to_apply
- = old_node->ipa_transforms_to_apply.copy ();
-
- cgraph_call_node_duplication_hooks (old_node, new_node);
+ = ipa_transforms_to_apply.copy ();
+ call_duplication_hooks (new_node);
return new_node;
}
-/* NODE is being removed from symbol table; see if its entry can be replaced by
- other inline clone. */
-struct cgraph_node *
-cgraph_find_replacement_node (struct cgraph_node *node)
+/* callgraph node being removed from symbol table; see if its entry can be
+ replaced by other inline clone. */
+cgraph_node *
+cgraph_node::find_replacement (void)
{
struct cgraph_node *next_inline_clone, *replacement;
- for (next_inline_clone = node->clones;
+ for (next_inline_clone = clones;
next_inline_clone
- && next_inline_clone->decl != node->decl;
+ && next_inline_clone->decl != decl;
next_inline_clone = next_inline_clone->next_sibling_clone)
;
= next_inline_clone->prev_sibling_clone;
if (next_inline_clone->prev_sibling_clone)
{
- gcc_assert (node->clones != next_inline_clone);
+ gcc_assert (clones != next_inline_clone);
next_inline_clone->prev_sibling_clone->next_sibling_clone
= next_inline_clone->next_sibling_clone;
}
else
{
- gcc_assert (node->clones == next_inline_clone);
- node->clones = next_inline_clone->next_sibling_clone;
+ gcc_assert (clones == next_inline_clone);
+ clones = next_inline_clone->next_sibling_clone;
}
- new_clones = node->clones;
- node->clones = NULL;
+ new_clones = clones;
+ clones = NULL;
/* Copy clone info. */
- next_inline_clone->clone = node->clone;
+ next_inline_clone->clone = clone;
/* Now place it into clone tree at same level at NODE. */
- next_inline_clone->clone_of = node->clone_of;
+ next_inline_clone->clone_of = clone_of;
next_inline_clone->prev_sibling_clone = NULL;
next_inline_clone->next_sibling_clone = NULL;
- if (node->clone_of)
+ if (clone_of)
{
- if (node->clone_of->clones)
- node->clone_of->clones->prev_sibling_clone = next_inline_clone;
- next_inline_clone->next_sibling_clone = node->clone_of->clones;
- node->clone_of->clones = next_inline_clone;
+ if (clone_of->clones)
+ clone_of->clones->prev_sibling_clone = next_inline_clone;
+ next_inline_clone->next_sibling_clone = clone_of->clones;
+ clone_of->clones = next_inline_clone;
}
/* Merge the clone list. */
{
n = next_inline_clone->clones;
while (n->next_sibling_clone)
- n = n->next_sibling_clone;
+ n = n->next_sibling_clone;
n->next_sibling_clone = new_clones;
new_clones->prev_sibling_clone = n;
}
call. */
void
-cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
- gimple old_stmt, gimple new_stmt,
- bool update_speculative)
+cgraph_node::set_call_stmt_including_clones (gimple old_stmt, gimple new_stmt,
+ bool update_speculative)
{
struct cgraph_node *node;
- struct cgraph_edge *edge = cgraph_edge (orig, old_stmt);
+ struct cgraph_edge *edge = get_edge (old_stmt);
if (edge)
cgraph_set_call_stmt (edge, new_stmt, update_speculative);
- node = orig->clones;
+ node = clones;
if (node)
- while (node != orig)
+ while (node != this)
{
- struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
+ struct cgraph_edge *edge = node->get_edge (old_stmt);
if (edge)
{
cgraph_set_call_stmt (edge, new_stmt, update_speculative);
node = node->next_sibling_clone;
else
{
- while (node != orig && !node->next_sibling_clone)
+ while (node != this && !node->next_sibling_clone)
node = node->clone_of;
- if (node != orig)
+ if (node != this)
node = node->next_sibling_clone;
}
}
frequencies of the clones. */
void
-cgraph_create_edge_including_clones (struct cgraph_node *orig,
- struct cgraph_node *callee,
- gimple old_stmt,
- gimple stmt, gcov_type count,
- int freq,
- cgraph_inline_failed_t reason)
+cgraph_node::create_edge_including_clones (struct cgraph_node *callee,
+ gimple old_stmt, gimple stmt,
+ gcov_type count,
+ int freq,
+ cgraph_inline_failed_t reason)
{
struct cgraph_node *node;
struct cgraph_edge *edge;
- if (!cgraph_edge (orig, stmt))
+ if (!get_edge (stmt))
{
- edge = cgraph_create_edge (orig, callee, stmt, count, freq);
+ edge = create_edge (callee, stmt, count, freq);
edge->inline_failed = reason;
}
- node = orig->clones;
+ node = clones;
if (node)
- while (node != orig)
+ while (node != this)
{
- struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
+ struct cgraph_edge *edge = node->get_edge (old_stmt);
/* It is possible that clones already contain the edge while
master didn't. Either we promoted indirect call into direct
master where edges has been removed. */
if (edge)
cgraph_set_call_stmt (edge, stmt);
- else if (!cgraph_edge (node, stmt))
+ else if (! node->get_edge (stmt))
{
- edge = cgraph_create_edge (node, callee, stmt, count,
- freq);
+ edge = node->create_edge (callee, stmt, count, freq);
edge->inline_failed = reason;
}
node = node->next_sibling_clone;
else
{
- while (node != orig && !node->next_sibling_clone)
+ while (node != this && !node->next_sibling_clone)
node = node->clone_of;
- if (node != orig)
+ if (node != this)
node = node->next_sibling_clone;
}
}
tree. */
bool
-cgraph_remove_node_and_inline_clones (struct cgraph_node *node, struct cgraph_node *forbidden_node)
+cgraph_node::remove_symbol_and_inline_clones (cgraph_node *forbidden_node)
{
struct cgraph_edge *e, *next;
bool found = false;
- if (node == forbidden_node)
+ if (this == forbidden_node)
{
- cgraph_remove_edge (node->callers);
+ cgraph_remove_edge (callers);
return true;
}
- for (e = node->callees; e; e = next)
+ for (e = callees; e; e = next)
{
next = e->next_callee;
if (!e->inline_failed)
- found |= cgraph_remove_node_and_inline_clones (e->callee, forbidden_node);
+ found |= e->callee->remove_symbol_and_inline_clones (forbidden_node);
}
- cgraph_remove_node (node);
+ remove ();
return found;
}
/* Create a new cgraph node which is the new version of
- OLD_VERSION node. REDIRECT_CALLERS holds the callers
+ callgraph node. REDIRECT_CALLERS holds the callers
edges which should be redirected to point to
- NEW_VERSION. ALL the callees edges of OLD_VERSION
+ NEW_VERSION. ALL the callees edges of the node
are cloned to the new version node. Return the new
version node.
was copied to prevent duplications of calls that are dead
in the clone. */
-struct cgraph_node *
-cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
- tree new_decl,
- vec<cgraph_edge_p> redirect_callers,
- bitmap bbs_to_copy)
+cgraph_node *
+cgraph_node::create_version_clone (tree new_decl,
+ vec<cgraph_edge *> redirect_callers,
+ bitmap bbs_to_copy)
{
struct cgraph_node *new_version;
struct cgraph_edge *e;
unsigned i;
- gcc_assert (old_version);
-
- new_version = cgraph_create_node (new_decl);
+ new_version = cgraph_node::create (new_decl);
- new_version->analyzed = old_version->analyzed;
- new_version->definition = old_version->definition;
- new_version->local = old_version->local;
+ new_version->analyzed = analyzed;
+ new_version->definition = definition;
+ new_version->local = local;
new_version->externally_visible = false;
new_version->local.local = new_version->definition;
- new_version->global = old_version->global;
- new_version->rtl = old_version->rtl;
- new_version->count = old_version->count;
+ new_version->global = global;
+ new_version->rtl = rtl;
+ new_version->count = count;
- for (e = old_version->callees; e; e=e->next_callee)
+ for (e = callees; e; e=e->next_callee)
if (!bbs_to_copy
|| bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
cgraph_clone_edge (e, new_version, e->call_stmt,
e->lto_stmt_uid, REG_BR_PROB_BASE,
CGRAPH_FREQ_BASE,
true);
- for (e = old_version->indirect_calls; e; e=e->next_callee)
+ for (e = indirect_calls; e; e=e->next_callee)
if (!bbs_to_copy
|| bitmap_bit_p (bbs_to_copy, gimple_bb (e->call_stmt)->index))
cgraph_clone_edge (e, new_version, e->call_stmt,
cgraph_redirect_edge_callee (e, new_version);
}
- cgraph_call_node_duplication_hooks (old_version, new_version);
+ call_duplication_hooks (new_version);
return new_version;
}
TREE_MAP is a mapping of tree nodes we want to replace with
new ones (according to results of prior analysis).
- OLD_VERSION_NODE is the node that is versioned.
If non-NULL ARGS_TO_SKIP determine function parameters to remove
from new version.
Return the new version's cgraph node. */
-struct cgraph_node *
-cgraph_function_versioning (struct cgraph_node *old_version_node,
- vec<cgraph_edge_p> redirect_callers,
- vec<ipa_replace_map_p, va_gc> *tree_map,
- bitmap args_to_skip,
- bool skip_return,
- bitmap bbs_to_copy,
- basic_block new_entry_block,
- const char *clone_name)
+cgraph_node *
+cgraph_node::create_version_clone_with_body
+ (vec<cgraph_edge *> redirect_callers,
+ vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip,
+ bool skip_return, bitmap bbs_to_copy, basic_block new_entry_block,
+ const char *clone_name)
{
- tree old_decl = old_version_node->decl;
+ tree old_decl = decl;
struct cgraph_node *new_version_node = NULL;
tree new_decl;
if (!tree_versionable_function_p (old_decl))
return NULL;
- gcc_assert (old_version_node->local.can_change_signature || !args_to_skip);
+ gcc_assert (local.can_change_signature || !args_to_skip);
/* Make a new FUNCTION_DECL tree node for the new version. */
if (!args_to_skip && !skip_return)
/* Create the new version's call-graph node.
and update the edges of the new node. */
- new_version_node =
- cgraph_copy_node_for_versioning (old_version_node, new_decl,
- redirect_callers, bbs_to_copy);
+ new_version_node = create_version_clone (new_decl, redirect_callers,
+ bbs_to_copy);
- if (old_version_node->ipa_transforms_to_apply.exists ())
+ if (ipa_transforms_to_apply.exists ())
new_version_node->ipa_transforms_to_apply
- = old_version_node->ipa_transforms_to_apply.copy ();
+ = ipa_transforms_to_apply.copy ();
/* Copy the OLD_VERSION_NODE function tree to the new version. */
tree_function_versioning (old_decl, new_decl, tree_map, false, args_to_skip,
skip_return, bbs_to_copy, new_entry_block);
that is not weak also.
??? We cannot use COMDAT linkage because there is no
ABI support for this. */
- symtab_make_decl_local (new_version_node->decl);
+ new_version_node->make_decl_local ();
DECL_VIRTUAL_P (new_version_node->decl) = 0;
new_version_node->externally_visible = 0;
new_version_node->local.local = 1;
/* Update the call_expr on the edges to call the new version node. */
update_call_expr (new_version_node);
- cgraph_call_function_insertion_hooks (new_version_node);
+ new_version_node->call_function_insertion_hooks ();
return new_version_node;
}
node->prev_sibling_clone = NULL;
if (!node->clone_of->analyzed && !node->clone_of->clones)
{
- cgraph_release_function_body (node->clone_of);
- cgraph_node_remove_callees (node->clone_of);
+ node->clone_of->release_body ();
+ node->clone_of->remove_callees ();
node->clone_of->remove_all_references ();
}
node->clone_of = NULL;
if (cgraph_dump_file)
fprintf (cgraph_dump_file, "Materializing clones\n");
#ifdef ENABLE_CHECKING
- verify_cgraph ();
+ cgraph_node::verify_cgraph_nodes ();
#endif
/* We can also do topological order, but number of iterations should be
&& !gimple_has_body_p (node->decl))
{
if (!node->clone_of->clone_of)
- cgraph_get_body (node->clone_of);
+ node->clone_of->get_body ();
if (gimple_has_body_p (node->clone_of->decl))
{
if (cgraph_dump_file)
FOR_EACH_FUNCTION (node)
if (!node->analyzed && node->callees)
{
- cgraph_node_remove_callees (node);
+ node->remove_callees ();
node->remove_all_references ();
}
else
if (cgraph_dump_file)
fprintf (cgraph_dump_file, "Materialization Call site updates done.\n");
#ifdef ENABLE_CHECKING
- verify_cgraph ();
+ cgraph_node::verify_cgraph_nodes ();
#endif
symtab_remove_unreachable_nodes (false, cgraph_dump_file);
}
static void expand_all_functions (void);
static void mark_functions_to_output (void);
static void expand_function (struct cgraph_node *);
-static void analyze_function (struct cgraph_node *);
static void handle_alias_pairs (void);
FILE *cgraph_dump_file;
it into reachable functions list. */
cgraph_finalize_function (fndecl, false);
- cgraph_call_function_insertion_hooks (node);
+ node->call_function_insertion_hooks ();
enqueue_node (node);
break;
gimple_register_cfg_hooks ();
if (!node->analyzed)
- analyze_function (node);
+ node->analyze ();
push_cfun (DECL_STRUCT_FUNCTION (fndecl));
if (cgraph_state == CGRAPH_STATE_IPA_SSA
&& !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
free_dominance_info (CDI_POST_DOMINATORS);
free_dominance_info (CDI_DOMINATORS);
pop_cfun ();
- cgraph_call_function_insertion_hooks (node);
+ node->call_function_insertion_hooks ();
break;
case CGRAPH_STATE_EXPANSION:
/* Functions created during expansion shall be compiled
directly. */
node->process = 0;
- cgraph_call_function_insertion_hooks (node);
+ node->call_function_insertion_hooks ();
expand_function (node);
break;
body for expanding the function but this is difficult to do. */
void
-cgraph_reset_node (struct cgraph_node *node)
+cgraph_node::reset (void)
{
- /* If node->process is set, then we have already begun whole-unit analysis.
+ /* If process is set, then we have already begun whole-unit analysis.
This is *not* testing for whether we've already emitted the function.
That case can be sort-of legitimately seen with real function redefinition
errors. I would argue that the front end should never present us with
such a case, but don't enforce that for now. */
- gcc_assert (!node->process);
+ gcc_assert (!process);
/* Reset our data structures so we can analyze the function again. */
- memset (&node->local, 0, sizeof (node->local));
- memset (&node->global, 0, sizeof (node->global));
- memset (&node->rtl, 0, sizeof (node->rtl));
- node->analyzed = false;
- node->definition = false;
- node->alias = false;
- node->weakref = false;
- node->cpp_implicit_alias = false;
-
- cgraph_node_remove_callees (node);
- node->remove_all_references ();
+ memset (&local, 0, sizeof (local));
+ memset (&global, 0, sizeof (global));
+ memset (&rtl, 0, sizeof (rtl));
+ analyzed = false;
+ definition = false;
+ alias = false;
+ weakref = false;
+ cpp_implicit_alias = false;
+
+ remove_callees ();
+ remove_all_references ();
}
/* Return true when there are references to NODE. */
void
cgraph_finalize_function (tree decl, bool no_collect)
{
- struct cgraph_node *node = cgraph_get_create_node (decl);
+ struct cgraph_node *node = cgraph_node::get_create (decl);
if (node->definition)
{
/* Nested functions should only be defined once. */
gcc_assert (!DECL_CONTEXT (decl)
|| TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
- cgraph_reset_node (node);
+ node->reset ();
node->local.redefined_extern_inline = true;
}
processing to avoid need the passes to be re-entrant. */
void
-cgraph_add_new_function (tree fndecl, bool lowered)
+cgraph_node::add_new_function (tree fndecl, bool lowered)
{
gcc::pass_manager *passes = g->get_passes ();
struct cgraph_node *node;
break;
case CGRAPH_STATE_CONSTRUCTION:
/* Just enqueue function to be processed at nearest occurrence. */
- node = cgraph_get_create_node (fndecl);
+ node = cgraph_node::get_create (fndecl);
if (lowered)
node->lowered = true;
if (!cgraph_new_nodes)
case CGRAPH_STATE_EXPANSION:
/* Bring the function into finalized state and enqueue for later
analyzing and compilation. */
- node = cgraph_get_create_node (fndecl);
+ node = cgraph_node::get_create (fndecl);
node->local.local = false;
node->definition = true;
node->force_output = true;
case CGRAPH_STATE_FINISHED:
/* At the very end of compilation we have to do all the work up
to expansion. */
- node = cgraph_create_node (fndecl);
+ node = cgraph_node::create (fndecl);
if (lowered)
node->lowered = true;
node->definition = true;
- analyze_function (node);
+ node->analyze ();
push_cfun (DECL_STRUCT_FUNCTION (fndecl));
gimple_register_cfg_hooks ();
bitmap_obstack_initialize (NULL);
}
/* Analyze the function scheduled to be output. */
-static void
-analyze_function (struct cgraph_node *node)
+void
+cgraph_node::analyze (void)
{
- tree decl = node->decl;
+ tree decl = this->decl;
location_t saved_loc = input_location;
input_location = DECL_SOURCE_LOCATION (decl);
- if (node->thunk.thunk_p)
+ if (thunk.thunk_p)
{
- cgraph_create_edge (node, cgraph_get_node (node->thunk.alias),
- NULL, 0, CGRAPH_FREQ_BASE);
- if (!expand_thunk (node, false, false))
+ create_edge (cgraph_node::get (thunk.alias),
+ NULL, 0, CGRAPH_FREQ_BASE);
+ if (!expand_thunk (false, false))
{
- node->thunk.alias = NULL;
- node->analyzed = true;
+ thunk.alias = NULL;
+ analyzed = true;
return;
}
- node->thunk.alias = NULL;
+ thunk.alias = NULL;
}
- if (node->alias)
- symtab_resolve_alias
- (node, cgraph_get_node (node->alias_target));
- else if (node->dispatcher_function)
+ if (alias)
+ resolve_alias (cgraph_node::get (alias_target));
+ else if (dispatcher_function)
{
/* Generate the dispatcher body of multi-versioned functions. */
struct cgraph_function_version_info *dispatcher_version_info
- = get_cgraph_node_version (node);
+ = function_version ();
if (dispatcher_version_info != NULL
&& (dispatcher_version_info->dispatcher_resolver
== NULL_TREE))
{
tree resolver = NULL_TREE;
gcc_assert (targetm.generate_version_dispatcher_body);
- resolver = targetm.generate_version_dispatcher_body (node);
+ resolver = targetm.generate_version_dispatcher_body (this);
gcc_assert (resolver != NULL_TREE);
}
}
{
push_cfun (DECL_STRUCT_FUNCTION (decl));
- assign_assembler_name_if_neeeded (node->decl);
+ assign_assembler_name_if_neeeded (decl);
/* Make sure to gimplify bodies only once. During analyzing a
function we lower it, which will require gimplified nested
dump_function (TDI_generic, decl);
/* Lower the function. */
- if (!node->lowered)
+ if (!lowered)
{
- if (node->nested)
- lower_nested_functions (node->decl);
- gcc_assert (!node->nested);
+ if (nested)
+ lower_nested_functions (decl);
+ gcc_assert (!nested);
gimple_register_cfg_hooks ();
bitmap_obstack_initialize (NULL);
free_dominance_info (CDI_DOMINATORS);
compact_blocks ();
bitmap_obstack_release (NULL);
- node->lowered = true;
+ lowered = true;
}
pop_cfun ();
}
- node->analyzed = true;
+ analyzed = true;
input_location = saved_loc;
}
symtab_node *node;
FOR_EACH_SYMBOL (node)
if (node->cpp_implicit_alias && !node->analyzed)
- symtab_resolve_alias
- (node,
- TREE_CODE (node->alias_target) == VAR_DECL
+ node->resolve_alias
+ (TREE_CODE (node->alias_target) == VAR_DECL
? (symtab_node *)varpool_node_for_decl (node->alias_target)
- : (symtab_node *)cgraph_get_create_node (node->alias_target));
+ : (symtab_node *)cgraph_node::get_create (node->alias_target));
cpp_implicit_aliases_done = true;
}
{
tree decl = node->decl;
if (DECL_PRESERVE_P (decl))
- cgraph_mark_force_output_node (node);
+ node->mark_force_output ();
else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
{
if (! TREE_PUBLIC (node->decl))
if (targets.length () == 1)
target = targets[0];
else
- target = cgraph_get_create_node
- (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
+ target = cgraph_node::create
+ (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
if (cgraph_dump_file)
{
if (cpp_implicit_aliases_done)
FOR_EACH_SYMBOL (node)
if (node->cpp_implicit_alias)
- fixup_same_cpp_alias_visibility (node, symtab_alias_target (node));
+ node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
if (optimize && flag_devirtualize)
build_type_inheritance_graph ();
&& !cnode->thunk.thunk_p
&& !cnode->dispatcher_function)
{
- cgraph_reset_node (cnode);
+ cnode->reset ();
cnode->local.redefined_extern_inline = true;
continue;
}
if (!cnode->analyzed)
- analyze_function (cnode);
+ cnode->analyze ();
for (edge = cnode->callees; edge; edge = edge->next_callee)
if (edge->callee->definition)
if (DECL_ABSTRACT_ORIGIN (decl))
{
struct cgraph_node *origin_node
- = cgraph_get_node (DECL_ABSTRACT_ORIGIN (decl));
+ = cgraph_node::get (DECL_ABSTRACT_ORIGIN (decl));
origin_node->used_as_abstract_origin = true;
}
}
if (cgraph_dump_file)
{
fprintf (cgraph_dump_file, "\n\nInitial ");
- dump_symtab (cgraph_dump_file);
+ symtab_node::dump_table (cgraph_dump_file);
}
if (cgraph_dump_file)
{
if (cgraph_dump_file)
fprintf (cgraph_dump_file, " %s", node->name ());
- symtab_remove_node (node);
+ node->remove ();
continue;
}
if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
if (cnode->definition && !gimple_has_body_p (decl)
&& !cnode->alias
&& !cnode->thunk.thunk_p)
- cgraph_reset_node (cnode);
+ cnode->reset ();
gcc_assert (!cnode->definition || cnode->thunk.thunk_p
|| cnode->alias
if (cgraph_dump_file)
{
fprintf (cgraph_dump_file, "\n\nReclaimed ");
- dump_symtab (cgraph_dump_file);
+ symtab_node::dump_table (cgraph_dump_file);
}
bitmap_obstack_release (NULL);
pointer_set_destroy (reachable_call_targets);
if (!target_node
&& lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
{
- symtab_node *node = symtab_get_node (p->decl);
+ symtab_node *node = symtab_node::get (p->decl);
if (node)
{
node->alias_target = p->target;
else if (!target_node)
{
error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
- symtab_node *node = symtab_get_node (p->decl);
+ symtab_node *node = symtab_node::get (p->decl);
if (node)
node->alias = false;
alias_pairs->unordered_remove (i);
if (TREE_CODE (p->decl) == FUNCTION_DECL
&& target_node && is_a <cgraph_node *> (target_node))
{
- struct cgraph_node *src_node = cgraph_get_node (p->decl);
+ struct cgraph_node *src_node = cgraph_node::get (p->decl);
if (src_node && src_node->definition)
- cgraph_reset_node (src_node);
- cgraph_create_function_alias (p->decl, target_node->decl);
+ src_node->reset ();
+ cgraph_node::create_alias (p->decl, target_node->decl);
alias_pairs->unordered_remove (i);
}
else if (TREE_CODE (p->decl) == VAR_DECL
if (node->same_comdat_group)
{
struct cgraph_node *next;
- for (next = cgraph (node->same_comdat_group);
+ for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
next != node;
- next = cgraph (next->same_comdat_group))
+ next = dyn_cast<cgraph_node *> (next->same_comdat_group))
if (!next->thunk.thunk_p && !next->alias
- && !symtab_comdat_local_p (next))
+ && !next->comdat_local_p ())
next->process = 1;
}
}
&& !node->clones
&& !DECL_EXTERNAL (decl))
{
- dump_cgraph_node (stderr, node);
+ node->debug ();
internal_error ("failed to reclaim unneeded function");
}
#endif
&& !node->clones
&& !DECL_EXTERNAL (decl))
{
- dump_cgraph_node (stderr, node);
+ node->debug ();
internal_error ("failed to reclaim unneeded function in same "
"comdat group");
}
thunks that are not lowered. */
bool
-expand_thunk (struct cgraph_node *node, bool output_asm_thunks, bool force_gimple_thunk)
+cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
{
- bool this_adjusting = node->thunk.this_adjusting;
- HOST_WIDE_INT fixed_offset = node->thunk.fixed_offset;
- HOST_WIDE_INT virtual_value = node->thunk.virtual_value;
+ bool this_adjusting = thunk.this_adjusting;
+ HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
+ HOST_WIDE_INT virtual_value = thunk.virtual_value;
tree virtual_offset = NULL;
- tree alias = node->callees->callee->decl;
- tree thunk_fndecl = node->decl;
+ tree alias = callees->callee->decl;
+ tree thunk_fndecl = decl;
tree a;
return false;
if (in_lto_p)
- cgraph_get_body (node);
+ get_body ();
a = DECL_ARGUMENTS (thunk_fndecl);
current_function_decl = thunk_fndecl;
free_after_compilation (cfun);
set_cfun (NULL);
TREE_ASM_WRITTEN (thunk_fndecl) = 1;
- node->thunk.thunk_p = false;
- node->analyzed = false;
+ thunk.thunk_p = false;
+ analyzed = false;
}
else
{
gimple ret;
if (in_lto_p)
- cgraph_get_body (node);
+ get_body ();
a = DECL_ARGUMENTS (thunk_fndecl);
current_function_decl = thunk_fndecl;
DECL_IGNORED_P (thunk_fndecl) = 1;
bitmap_obstack_initialize (NULL);
- if (node->thunk.virtual_offset_p)
+ if (thunk.virtual_offset_p)
virtual_offset = size_int (virtual_value);
/* Build the return declaration for the function. */
vargs.quick_push (tmp);
}
call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
- node->callees->call_stmt = call;
+ callees->call_stmt = call;
gimple_call_set_from_thunk (call, true);
if (restmp)
{
/* Since we want to emit the thunk, we explicitly mark its name as
referenced. */
- node->thunk.thunk_p = false;
- node->lowered = true;
+ thunk.thunk_p = false;
+ lowered = true;
bitmap_obstack_release (NULL);
}
current_function_decl = NULL;
struct cgraph_node *thunk = e->caller;
e = e->next_caller;
- expand_thunk (thunk, true, false);
+ thunk->expand_thunk (true, false);
assemble_thunks_and_aliases (thunk);
}
else
announce_function (decl);
node->process = 0;
gcc_assert (node->lowered);
- cgraph_get_body (node);
+ node->get_body ();
/* Generate RTL for the body of DECL. */
gimple_set_body (decl, NULL);
if (DECL_STRUCT_FUNCTION (decl) == 0
- && !cgraph_get_node (decl)->origin)
+ && !cgraph_node::get (decl)->origin)
{
/* Stop pointing to the local nodes about to be freed.
But DECL_INITIAL must remain nonzero so we know this
FIXME: Perhaps thunks should be move before function IFF they are not in comdat
groups. */
assemble_thunks_and_aliases (node);
- cgraph_release_function_body (node);
+ node->release_body ();
/* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
points to the dead function body. */
- cgraph_node_remove_callees (node);
+ node->remove_callees ();
node->remove_all_references ();
}
? DECL_ASSEMBLER_NAME (node->alias_target)
: node->alias_target);
else if (node->analyzed)
- target = DECL_ASSEMBLER_NAME (symtab_alias_target (node)->decl);
+ target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
else
{
gcc_unreachable ();
return;
#ifdef ENABLE_CHECKING
- verify_symtab ();
+ symtab_node::verify_symtab_nodes ();
#endif
timevar_push (TV_CGRAPHOPT);
if (cgraph_dump_file)
{
fprintf (cgraph_dump_file, "Optimized ");
- dump_symtab (cgraph_dump_file);
+ symtab_node:: dump_table (cgraph_dump_file);
}
if (post_ipa_mem_report)
{
if (!quiet_flag)
fprintf (stderr, "Assembling functions:\n");
#ifdef ENABLE_CHECKING
- verify_symtab ();
+ symtab_node::verify_symtab_nodes ();
#endif
cgraph_materialize_all_clones ();
execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
symtab_remove_unreachable_nodes (true, dump_file);
#ifdef ENABLE_CHECKING
- verify_symtab ();
+ symtab_node::verify_symtab_nodes ();
#endif
bitmap_obstack_release (NULL);
mark_functions_to_output ();
if (cgraph_dump_file)
{
fprintf (cgraph_dump_file, "\nFinal ");
- dump_symtab (cgraph_dump_file);
+ symtab_node::dump_table (cgraph_dump_file);
}
#ifdef ENABLE_CHECKING
- verify_symtab ();
+ symtab_node::verify_symtab_nodes ();
/* Double check that all inline clones are gone and that all
function bodies have been released from memory. */
if (!seen_error ())
|| gimple_has_body_p (node->decl))
{
error_found = true;
- dump_cgraph_node (stderr, node);
+ node->debug ();
}
if (error_found)
internal_error ("nodes with unreleased memory found");
timevar_pop (TV_CGRAPH);
}
-/* Creates a wrapper from SOURCE node to TARGET node. Thunk is used for this
+/* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
kind of wrapper method. */
void
-cgraph_make_wrapper (struct cgraph_node *source, struct cgraph_node *target)
+cgraph_node::create_wrapper (struct cgraph_node *target)
{
/* Preserve DECL_RESULT so we get right by reference flag. */
- tree decl_result = DECL_RESULT (source->decl);
+ tree decl_result = DECL_RESULT (decl);
/* Remove the function's body. */
- cgraph_release_function_body (source);
- cgraph_reset_node (source);
+ release_body ();
+ reset ();
- DECL_RESULT (source->decl) = decl_result;
- DECL_INITIAL (source->decl) = NULL;
- allocate_struct_function (source->decl, false);
+ DECL_RESULT (decl) = decl_result;
+ DECL_INITIAL (decl) = NULL;
+ allocate_struct_function (decl, false);
set_cfun (NULL);
/* Turn alias into thunk and expand it into GIMPLE representation. */
- source->definition = true;
- source->thunk.thunk_p = true;
- source->thunk.this_adjusting = false;
+ definition = true;
+ thunk.thunk_p = true;
+ thunk.this_adjusting = false;
- struct cgraph_edge *e = cgraph_create_edge (source, target, NULL, 0,
- CGRAPH_FREQ_BASE);
+ struct cgraph_edge *e = create_edge (target, NULL, 0, CGRAPH_FREQ_BASE);
- if (!expand_thunk (source, false, true))
- source->analyzed = true;
+ if (!expand_thunk (false, true))
+ analyzed = true;
e->call_stmt_cannot_inline_p = true;
/* Inline summary set-up. */
-
- analyze_function (source);
- inline_analyze_function (source);
+ analyze ();
+ inline_analyze_function (this);
}
#include "gt-cgraphunit.h"
#endif
if (USE_HIDDEN_LINKONCE)
{
- cgraph_create_node (decl)->set_comdat_group (DECL_ASSEMBLER_NAME (decl));
+ cgraph_node::create (decl)->set_comdat_group (DECL_ASSEMBLER_NAME (decl));
targetm.asm_out.unique_section (decl, 0);
switch_to_section (get_named_section (decl, NULL, 0));
&& cfun->machine->use_fast_prologue_epilogue_nregs != frame->nregs)
{
int count = frame->nregs;
- struct cgraph_node *node = cgraph_get_node (current_function_decl);
+ struct cgraph_node *node = cgraph_node::get (current_function_decl);
cfun->machine->use_fast_prologue_epilogue_nregs = count;
gcc_assert (fn != NULL && DECL_FUNCTION_VERSIONED (fn));
- node = cgraph_get_node (fn);
+ node = cgraph_node::get (fn);
gcc_assert (node != NULL);
- node_v = get_cgraph_node_version (node);
+ node_v = node->function_version ();
gcc_assert (node_v != NULL);
if (node_v->dispatcher_resolver != NULL)
/* Right now, the dispatching is done via ifunc. */
dispatch_decl = make_dispatcher_decl (default_node->decl);
- dispatcher_node = cgraph_get_create_node (dispatch_decl);
+ dispatcher_node = cgraph_node::get_create (dispatch_decl);
gcc_assert (dispatcher_node != NULL);
dispatcher_node->dispatcher_function = 1;
dispatcher_version_info
- = insert_new_cgraph_node_version (dispatcher_node);
+ = dispatcher_node->insert_new_function_version ();
dispatcher_version_info->next = default_version_info;
dispatcher_node->definition = 1;
push_cfun (DECL_STRUCT_FUNCTION (decl));
*empty_bb = init_lowered_empty_function (decl, false);
- cgraph_add_new_function (decl, true);
- cgraph_call_function_insertion_hooks (cgraph_get_create_node (decl));
+ cgraph_node::add_new_function (decl, true);
+ cgraph_node::get_create (decl)->call_function_insertion_hooks ();
pop_cfun ();
/* Create the alias for dispatch to resolver here. */
/*cgraph_create_function_alias (dispatch_decl, decl);*/
- cgraph_same_body_alias (NULL, dispatch_decl, decl);
+ cgraph_node::create_same_body_alias (dispatch_decl, decl);
XDELETEVEC (resolver_name);
return decl;
}
node = (cgraph_node *)node_p;
- node_version_info = get_cgraph_node_version (node);
+ node_version_info = node->function_version ();
gcc_assert (node->dispatcher_function
&& node_version_info != NULL);
gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
- node = cgraph_get_node (fn);
+ node = cgraph_node::get (fn);
if (node == NULL)
return;
gcc_assert (node->dispatcher_function);
- node_v = get_cgraph_node_version (node);
+ node_v = node->function_version ();
if (node_v == NULL)
return;
if (DECL_ASSEMBLER_NAME_SET_P (method))
mangle_decl (method);
}
- record_function_versions (fn, method);
+ cgraph_node::record_function_versions (fn, method);
continue;
}
if (DECL_INHERITED_CTOR_BASE (method))
if (DECL_ASSEMBLER_NAME_SET_P (olddecl))
mangle_decl (olddecl);
}
- record_function_versions (olddecl, newdecl);
+ cgraph_node::record_function_versions (olddecl, newdecl);
return 0;
}
}
{
struct symtab_node *symbol;
if (TREE_CODE (olddecl) == FUNCTION_DECL)
- symbol = cgraph_get_create_node (newdecl);
+ symbol = cgraph_node::get_create (newdecl);
else
symbol = varpool_node_for_decl (newdecl);
- symbol->set_comdat_group (symtab_get_node (olddecl)->get_comdat_group ());
+ symbol->set_comdat_group (symtab_node::get
+ (olddecl)->get_comdat_group ());
}
DECL_DEFER_OUTPUT (newdecl) |= DECL_DEFER_OUTPUT (olddecl);
DECL_FUNCTION_VERSIONED (newdecl) = 1;
/* newdecl will be purged after copying to olddecl and is no longer
a version. */
- delete_function_version (newdecl);
+ cgraph_node::delete_function_version (newdecl);
}
if (TREE_CODE (newdecl) == FUNCTION_DECL)
{
int function_size;
- struct symtab_node *snode = symtab_get_node (olddecl);
+ struct symtab_node *snode = symtab_node::get (olddecl);
function_size = sizeof (struct tree_decl_common);
if (TREE_CODE (olddecl) == VAR_DECL
&& (TREE_STATIC (olddecl) || TREE_PUBLIC (olddecl) || DECL_EXTERNAL (olddecl)))
- snode = symtab_get_node (olddecl);
+ snode = symtab_node::get (olddecl);
memcpy ((char *) olddecl + sizeof (struct tree_decl_common),
(char *) newdecl + sizeof (struct tree_decl_common),
size - sizeof (struct tree_decl_common)
if (TREE_CODE (newdecl) == FUNCTION_DECL
|| TREE_CODE (newdecl) == VAR_DECL)
{
- struct symtab_node *snode = symtab_get_node (newdecl);
+ struct symtab_node *snode = symtab_node::get (newdecl);
if (snode)
- symtab_remove_node (snode);
+ snode->remove ();
}
ggc_free (newdecl);
If we know a method will be emitted in other TU and no new
functions can be marked reachable, just use the external
definition. */
- struct cgraph_node *node = cgraph_get_create_node (decl);
+ struct cgraph_node *node = cgraph_node::get_create (decl);
node->forced_by_abi = true;
}
else if (TREE_CODE (decl) == VAR_DECL)
{
current = varpool_node_for_decl (vtbl);
if (last)
- symtab_add_to_same_comdat_group (current, last);
+ current->add_to_same_comdat_group (last);
last = current;
}
}
if (TREE_CODE (decl) == FUNCTION_DECL
|| TREE_CODE (decl) == VAR_DECL)
{
- struct symtab_node *snode = symtab_get_node (decl);
+ struct symtab_node *snode = symtab_node::get (decl);
if (snode)
snode->set_comdat_group (NULL);
if (single_init_fn == NULL_TREE)
continue;
cgraph_node *alias
- = cgraph_same_body_alias (cgraph_get_create_node (fn),
- single_init_fn, fn);
+ = cgraph_node::get_create (fn)->create_same_body_alias
+ (single_init_fn, fn);
gcc_assert (alias != NULL);
}
#endif
{
struct cgraph_node *node, *next;
- node = cgraph_get_node (decl);
+ node = cgraph_node::get (decl);
if (node->cpp_implicit_alias)
- node = cgraph_alias_target (node);
+ node = node->get_alias_target ();
- cgraph_for_node_and_aliases (node, clear_decl_external,
- NULL, true);
+ node->call_for_symbol_thunks_and_aliases (clear_decl_external,
+ NULL, true);
/* If we mark !DECL_EXTERNAL one of the symbols in some comdat
group, we need to mark all symbols in the same comdat group
that way. */
if (node->same_comdat_group)
- for (next = cgraph (node->same_comdat_group);
+ for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
next != node;
- next = cgraph (next->same_comdat_group))
- cgraph_for_node_and_aliases (next, clear_decl_external,
- NULL, true);
+ next = dyn_cast<cgraph_node *> (next->same_comdat_group))
+ next->call_for_symbol_thunks_and_aliases (clear_decl_external,
+ NULL, true);
}
/* If we're going to need to write this function out, and
if (!DECL_EXTERNAL (decl)
&& decl_needed_p (decl)
&& !TREE_ASM_WRITTEN (decl)
- && !cgraph_get_node (decl)->definition)
+ && !cgraph_node::get (decl)->definition)
{
/* We will output the function; no longer consider it in this
loop. */
if (DECL_ONE_ONLY (statfn))
{
/* Put the thunk in the same comdat group as the call op. */
- symtab_add_to_same_comdat_group
- (cgraph_get_create_node (statfn),
- cgraph_get_create_node (callop));
+ cgraph_node::get_create (statfn)->add_to_same_comdat_group
+ (cgraph_node::get_create (callop));
}
tree body = begin_function_body ();
tree compound_stmt = begin_compound_stmt (0);
if (TREE_CODE (decl) == FUNCTION_DECL)
{
/* Don't create an alias to an unreferenced function. */
- if (struct cgraph_node *n = cgraph_get_node (decl))
- cgraph_same_body_alias (n, alias, decl);
+ if (struct cgraph_node *n = cgraph_node::get (decl))
+ n->create_same_body_alias (alias, decl);
}
else
varpool_extra_name_alias (alias, decl);
if (!flag_syntax_only)
{
struct cgraph_node *funcn, *aliasn;
- funcn = cgraph_get_node (function);
+ funcn = cgraph_node::get (function);
gcc_checking_assert (funcn);
- aliasn = cgraph_same_body_alias (funcn, alias, function);
+ aliasn = cgraph_node::create_same_body_alias (alias, function);
DECL_ASSEMBLER_NAME (function);
gcc_assert (aliasn != NULL);
}
tree fn = function;
struct symtab_node *symbol;
- if ((symbol = symtab_get_node (function))
+ if ((symbol = symtab_node::get (function))
&& symbol->alias)
{
if (symbol->analyzed)
- fn = symtab_alias_ultimate_target (symtab_get_node (function))->decl;
+ fn = symtab_node::get (function)->ultimate_alias_target ()->decl;
else
- fn = symtab_get_node (function)->alias_target;
+ fn = symtab_node::get (function)->alias_target;
}
resolve_unique_section (fn, 0, flag_function_sections);
/* Output the thunk into the same section as function. */
set_decl_section_name (thunk_fndecl, DECL_SECTION_NAME (fn));
- symtab_get_node (thunk_fndecl)->implicit_section
- = symtab_get_node (fn)->implicit_section;
+ symtab_node::get (thunk_fndecl)->implicit_section
+ = symtab_node::get (fn)->implicit_section;
}
}
a = nreverse (t);
DECL_ARGUMENTS (thunk_fndecl) = a;
TREE_ASM_WRITTEN (thunk_fndecl) = 1;
- funcn = cgraph_get_node (function);
+ funcn = cgraph_node::get (function);
gcc_checking_assert (funcn);
- thunk_node = cgraph_add_thunk (funcn, thunk_fndecl, function,
- this_adjusting, fixed_offset, virtual_value,
- virtual_offset, alias);
+ thunk_node = funcn->create_thunk (thunk_fndecl, function,
+ this_adjusting, fixed_offset, virtual_value,
+ virtual_offset, alias);
if (DECL_ONE_ONLY (function))
- symtab_add_to_same_comdat_group (thunk_node,
- funcn);
+ thunk_node->add_to_same_comdat_group (funcn);
if (!this_adjusting
|| !targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
diff_seen = true;
}
grp_name[idx] = '\0';
- gcc_assert (diff_seen || symtab_get_node (complete)->alias);
+ gcc_assert (diff_seen || symtab_node::get (complete)->alias);
return get_identifier (grp_name);
}
else if (HAVE_COMDAT_GROUP)
{
tree comdat_group = cdtor_comdat_group (fns[1], fns[0]);
- cgraph_get_create_node (fns[0])->set_comdat_group (comdat_group);
- symtab_add_to_same_comdat_group (cgraph_get_create_node (fns[1]),
- cgraph_get_create_node (fns[0]));
- symtab_add_to_same_comdat_group (symtab_get_node (fn),
- symtab_get_node (fns[0]));
+ cgraph_node::get_create (fns[0])->set_comdat_group (comdat_group);
+ cgraph_node::get_create (fns[1])->add_to_same_comdat_group
+ (cgraph_node::get_create (fns[0]));
+ symtab_node::get (fn)->add_to_same_comdat_group
+ (symtab_node::get (fns[0]));
if (fns[2])
/* If *[CD][12]* dtors go into the *[CD]5* comdat group and dtor is
virtual, it goes into the same comdat group as well. */
- symtab_add_to_same_comdat_group (cgraph_get_create_node (fns[2]),
- symtab_get_node (fns[0]));
+ cgraph_node::get_create (fns[2])->add_to_same_comdat_group
+ (symtab_node::get (fns[0]));
TREE_PUBLIC (fn) = false;
DECL_EXTERNAL (fn) = false;
DECL_INTERFACE_KNOWN (fn) = true;
name of fn was corrupted by write_mangled_name by adding *INTERNAL*
to it. By doing so, it also corrupted the comdat group. */
if (DECL_ONE_ONLY (fn))
- cgraph_get_create_node (clone)->set_comdat_group (cxx_comdat_group (clone));
+ cgraph_node::get_create (clone)->set_comdat_group (cxx_comdat_group (clone));
DECL_USE_TEMPLATE (clone) = DECL_USE_TEMPLATE (fn);
DECL_EXTERNAL (clone) = DECL_EXTERNAL (fn);
DECL_INTERFACE_KNOWN (clone) = DECL_INTERFACE_KNOWN (fn);
if (can_alias
&& fns[0]
&& idx == 1
- && cgraph_same_body_alias (cgraph_get_create_node (fns[0]),
- clone, fns[0]))
+ && cgraph_node::get_create (fns[0])->create_same_body_alias
+ (clone, fns[0]))
{
alias = true;
if (DECL_ONE_ONLY (fns[0]))
into the same, *[CD]5* comdat group instead of
*[CD][12]*. */
comdat_group = cdtor_comdat_group (fns[1], fns[0]);
- cgraph_get_create_node (fns[0])->set_comdat_group (comdat_group);
- if (symtab_get_node (clone)->same_comdat_group)
- symtab_remove_from_same_comdat_group (symtab_get_node (clone));
- symtab_add_to_same_comdat_group (symtab_get_node (clone),
- symtab_get_node (fns[0]));
+ cgraph_node::get_create (fns[0])->set_comdat_group (comdat_group);
+ if (symtab_node::get (clone)->same_comdat_group)
+ symtab_node::get (clone)->remove_from_same_comdat_group ();
+ symtab_node::get (clone)->add_to_same_comdat_group
+ (symtab_node::get (fns[0]));
}
}
/* If *[CD][12]* dtors go into the *[CD]5* comdat group and dtor is
virtual, it goes into the same comdat group as well. */
if (comdat_group)
- symtab_add_to_same_comdat_group
- (cgraph_get_create_node (clone),
- symtab_get_node (fns[0]));
+ cgraph_node::get_create (clone)->add_to_same_comdat_group
+ (symtab_node::get (fns[0]));
}
else if (alias)
/* No need to populate body. */ ;
&& !DECL_THUNK_P (decl)
&& !DECL_EXTERNAL (decl))
{
- struct cgraph_node *node = cgraph_get_node (decl);
+ struct cgraph_node *node = cgraph_node::get (decl);
/* Don't fix same_body aliases. Although they don't have their own
CFG, they share it with what they alias to. */
DECL_STATIC_CONSTRUCTOR (vtv_fndecl) = 0;
gimplify_function_tree (vtv_fndecl);
- cgraph_add_new_function (vtv_fndecl, false);
+ cgraph_node::add_new_function (vtv_fndecl, false);
cgraph_process_new_functions ();
we can find and protect them. */
set_decl_section_name (var_decl, ".vtable_map_vars");
- symtab_get_node (var_decl)->implicit_section = true;
+ symtab_node::get (var_decl)->implicit_section = true;
DECL_INITIAL (var_decl) = initial_value;
comdat_linkage (var_decl);
optimizing and gimplifying the CU by now.
So if *TP has no call graph node associated
to it, it means *TP will not be emitted. */
- if (!cgraph_get_node (*tp))
+ if (!cgraph_node::get (*tp))
return *tp;
}
else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
}
if (crtl->nothrow
- && (cgraph_function_body_availability (cgraph_get_node
- (current_function_decl))
+ && (cgraph_node::get (current_function_decl)->get_availability ()
>= AVAIL_AVAILABLE))
{
- struct cgraph_node *node = cgraph_get_node (current_function_decl);
+ struct cgraph_node *node = cgraph_node::get (current_function_decl);
struct cgraph_edge *e;
for (e = node->callers; e; e = e->next_caller)
e->can_throw_external = false;
- cgraph_set_nothrow_flag (node, true);
+ node->set_nothrow_flag (true);
if (dump_file)
fprintf (dump_file, "Marking function nothrow: %s\n\n",
{
struct symtab_node *symbol;
- symbol = symtab_get_node (base);
+ symbol = symtab_node::get (base);
if (symbol)
return symbol->nonzero_address ();
else
set_cfun (NULL);
if (decl_function_context (fndecl))
- (void) cgraph_create_node (fndecl);
+ (void) cgraph_node::create (fndecl);
else
cgraph_finalize_function (fndecl, true);
function has already called cgraph_create_node, which also created
the cgraph node for this function. */
if (!has_coarray_vars || gfc_option.coarray != GFC_FCOARRAY_LIB)
- (void) cgraph_create_node (fndecl);
+ (void) cgraph_node::create (fndecl);
}
else
cgraph_finalize_function (fndecl, true);
static objects are defined. */
if (cgraph_function_flags_ready)
return true;
- snode = symtab_get_node (decl);
+ snode = symtab_node::get (decl);
if (!snode || !snode->definition)
return false;
node = dyn_cast <cgraph_node *> (snode);
if (DECL_VISIBILITY_SPECIFIED (decl)
&& DECL_EXTERNAL (decl)
&& DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
- && (!(snode = symtab_get_node (decl)) || !snode->in_other_partition))
+ && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
return false;
/* When function is public, we always can introduce new reference.
Exception are the COMDAT functions where introducing a direct
if (!cgraph_function_flags_ready)
return true;
- snode = symtab_get_node (decl);
+ snode = symtab_node::get (decl);
if (!snode
|| ((!snode->definition || DECL_EXTERNAL (decl))
&& (!snode->in_other_partition
/* Make sure we create a cgraph node for functions we'll reference.
They can be non-existent if the reference comes from an entry
of an external vtable for example. */
- cgraph_get_create_node (base);
+ cgraph_node::get_create (base);
}
/* Fixup types in global initializers. */
if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
{
if (dump_file && virtual_method_call_p (callee)
&& !possible_polymorphic_call_target_p
- (callee, cgraph_get_node (gimple_call_addr_fndecl
- (OBJ_TYPE_REF_EXPR (callee)))))
+ (callee, cgraph_node::get (gimple_call_addr_fndecl
+ (OBJ_TYPE_REF_EXPR (callee)))))
{
fprintf (dump_file,
"Type inheritance inconsistent devirtualization of ");
/* Make sure we create a cgraph node for functions we'll reference.
They can be non-existent if the reference comes from an entry
of an external vtable for example. */
- cgraph_get_create_node (fn);
+ cgraph_node::get_create (fn);
return fn;
}
to avoid calling them if we never see any calls. */
if (cfun_node == NULL)
{
- cfun_node = cgraph_get_node (current_function_decl);
+ cfun_node = cgraph_node::get (current_function_decl);
bb_freq = (compute_call_stmt_bb_frequency
(current_function_decl, bb));
}
- e = cgraph_edge (cfun_node, n);
+ e = cfun_node->get_edge (n);
if (e != NULL)
e->frequency = bb_freq;
}
static void
unshare_body (tree fndecl)
{
- struct cgraph_node *cgn = cgraph_get_node (fndecl);
+ struct cgraph_node *cgn = cgraph_node::get (fndecl);
/* If the language requires deep unsharing, we need a pointer set to make
sure we don't repeatedly unshare subtrees of unshareable nodes. */
struct pointer_set_t *visited
static void
unvisit_body (tree fndecl)
{
- struct cgraph_node *cgn = cgraph_get_node (fndecl);
+ struct cgraph_node *cgn = cgraph_node::get (fndecl);
unmark_visited (&DECL_SAVED_TREE (fndecl));
unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
unshare_body (fndecl);
unvisit_body (fndecl);
- cgn = cgraph_get_node (fndecl);
+ cgn = cgraph_node::get (fndecl);
if (cgn && cgn->origin)
nonlocal_vlas = pointer_set_create ();
for (i = 0; symbol->iterate_reference (i, ref); i++)
{
- symtab_node *node = symtab_alias_ultimate_target (ref->referred, NULL);
+ symtab_node *node = ref->referred->ultimate_alias_target ();
if (!node->aux && node->definition)
{
node->aux = *first;
enqueue_references (first, edge->callee);
else
{
- symtab_node *node = symtab_alias_ultimate_target (edge->callee,
- NULL);
+ symtab_node *node = edge->callee->ultimate_alias_target ();
if (!node->aux && node->definition)
{
node->aux = *first;
gcc_assert (!symbol->get_comdat_group ());
symbol->set_comdat_group (head->get_comdat_group ());
- symtab_add_to_same_comdat_group (symbol, head);
+ symbol->add_to_same_comdat_group (head);
return false;
}
ERROR_MARK_NODE as bottom for the propagation. */
FOR_EACH_DEFINED_SYMBOL (symbol)
- if (!symtab_real_symbol_p (symbol))
+ if (!symbol->real_symbol_p ())
;
else if ((group = symbol->get_comdat_group ()) != NULL)
{
&& (DECL_STATIC_CONSTRUCTOR (symbol->decl)
|| DECL_STATIC_DESTRUCTOR (symbol->decl))))
{
- map.put (symtab_alias_ultimate_target (symbol, NULL), error_mark_node);
+ map.put (symbol->ultimate_alias_target (), error_mark_node);
/* Mark the symbol so we won't waste time visiting it for dataflow. */
symbol->aux = (symtab_node *) (void *) 1;
symbol->aux = NULL;
if (!symbol->get_comdat_group ()
&& !symbol->alias
- && symtab_real_symbol_p (symbol))
+ && symbol->real_symbol_p ())
{
tree group = *map.get (symbol);
if (dump_file)
{
fprintf (dump_file, "Localizing symbol\n");
- dump_symtab_node (dump_file, symbol);
+ symbol->dump (dump_file);
fprintf (dump_file, "To group: %s\n", IDENTIFIER_POINTER (group));
}
- symtab_for_node_and_aliases (symbol, set_comdat_group,
- *comdat_head_map.get (group), true);
+ symbol->call_for_symbol_and_aliases (set_comdat_group,
+ *comdat_head_map.get (group),
+ true);
}
}
return 0;
reason = "alias or thunk";
else if (!node->local.versionable)
reason = "not a tree_versionable_function";
- else if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
+ else if (node->get_availability () <= AVAIL_INTERPOSABLE)
reason = "insufficient body availability";
else if (!opt_for_fn (node->decl, optimize)
|| !opt_for_fn (node->decl, flag_ipa_cp))
}
/* Don't clone decls local to a comdat group; it breaks and for C++
decloned constructors, inlining is always better anyway. */
- else if (symtab_comdat_local_p (node))
+ else if (node->comdat_local_p ())
reason = "comdat-local function";
if (reason && dump_file && !node->alias && !node->thunk.thunk_p)
for (cs = node->callers; cs; cs = cs->next_caller)
if (cs->caller->thunk.thunk_p)
- cgraph_for_node_and_aliases (cs->caller, gather_caller_stats,
- stats, false);
+ cs->caller->call_for_symbol_thunks_and_aliases (gather_caller_stats,
+ stats, false);
else
{
stats->count_sum += cs->count;
{
struct caller_statistics stats;
- gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
+ gcc_checking_assert (node->has_gimple_body_p ());
if (!flag_ipa_cp_clone)
{
}
init_caller_stats (&stats);
- cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
+ node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats, false);
if (inline_summary (node)->self_size < stats.n_calls)
{
bool disable = false, variable = false;
int i;
- gcc_checking_assert (cgraph_function_with_gimple_body_p (node));
+ gcc_checking_assert (node->has_gimple_body_p ());
if (!node->local.local)
{
/* When cloning is allowed, we can assume that externally visible
{
if (dump_file)
{
- dump_symtab (dump_file);
+ symtab_node::dump_table (dump_file);
fprintf (dump_file, "\nIPA lattices after constant "
"propagation, before gcc_unreachable:\n");
print_all_lattices (dump_file, true, false);
bool ret = false;
int i, args_count, parms_count;
- callee = cgraph_function_node (cs->callee, &availability);
+ callee = cs->callee->function_symbol (&availability);
if (!callee->definition)
return false;
- gcc_checking_assert (cgraph_function_with_gimple_body_p (callee));
+ gcc_checking_assert (callee->has_gimple_body_p ());
callee_info = IPA_NODE_REF (callee);
args = IPA_EDGE_REF (cs);
of aliases first. */
alias_or_thunk = cs->callee;
while (alias_or_thunk->alias)
- alias_or_thunk = cgraph_alias_target (alias_or_thunk);
+ alias_or_thunk = alias_or_thunk->get_alias_target ();
if (alias_or_thunk->thunk.thunk_p)
{
ret |= set_all_contains_variable (ipa_get_parm_lattices (callee_info,
struct ipcp_param_lattices *dest_plats;
dest_plats = ipa_get_parm_lattices (callee_info, i);
- if (availability == AVAIL_OVERWRITABLE)
+ if (availability == AVAIL_INTERPOSABLE)
ret |= set_all_contains_variable (dest_plats);
else
{
if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
&& DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
|| !possible_polymorphic_call_target_p
- (ie, cgraph_get_node (target)))
+ (ie, cgraph_node::get (target)))
target = ipa_impossible_devirt_target (ie, target);
return target;
}
}
if (target && !possible_polymorphic_call_target_p (ie,
- cgraph_get_node (target)))
+ cgraph_node::get (target)))
target = ipa_impossible_devirt_target (ie, target);
return target;
/* Only bare minimum benefit for clearly un-inlineable targets. */
res += 1;
- callee = cgraph_get_node (target);
+ callee = cgraph_node::get (target);
if (!callee || !callee->definition)
continue;
- callee = cgraph_function_node (callee, &avail);
+ callee = callee->function_symbol (&avail);
if (avail < AVAIL_AVAILABLE)
continue;
isummary = inline_summary (callee);
int time, size;
init_caller_stats (&stats);
- cgraph_for_node_and_aliases (node, gather_caller_stats, &stats, false);
+ node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
+ false);
estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
known_aggs_ptrs, &size, &time, &hints);
time -= devirtualization_time_bonus (node, known_csts, known_binfos,
"time_benefit: %i\n", size, base_time - time);
if (size <= 0
- || cgraph_will_be_removed_from_program_if_no_direct_calls (node))
+ || node->will_be_removed_from_program_if_no_direct_calls_p ())
{
info->do_clone_for_all_contexts = true;
base_time = time;
{
unsigned j;
struct cgraph_node *v, *node = topo->order[i];
- vec<cgraph_node_ptr> cycle_nodes = ipa_get_nodes_in_cycle (node);
+ vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (node);
/* First, iteratively propagate within the strongly connected component
until all lattices stabilize. */
FOR_EACH_VEC_ELT (cycle_nodes, j, v)
- if (cgraph_function_with_gimple_body_p (v))
+ if (v->has_gimple_body_p ())
push_node_to_stack (topo, v);
v = pop_node_from_stack (topo);
the local effects of the discovered constants and all valid values to
their topological sort. */
FOR_EACH_VEC_ELT (cycle_nodes, j, v)
- if (cgraph_function_with_gimple_body_p (v))
+ if (v->has_gimple_body_p ())
{
struct cgraph_edge *cs;
struct ipa_node_params *info = IPA_NODE_REF (node);
determine_versionability (node);
- if (cgraph_function_with_gimple_body_p (node))
+ if (node->has_gimple_body_p ())
{
info->lattices = XCNEWVEC (struct ipcp_param_lattices,
ipa_get_param_count (info));
/* Vector of pointers which for linked lists of clones of an original crgaph
edge. */
-static vec<cgraph_edge_p> next_edge_clone;
-static vec<cgraph_edge_p> prev_edge_clone;
+static vec<cgraph_edge *> next_edge_clone;
+static vec<cgraph_edge *> prev_edge_clone;
static inline void
grow_edge_clone_vectors (void)
struct ipcp_value_source *src)
{
struct ipa_node_params *caller_info = IPA_NODE_REF (cs->caller);
- cgraph_node *real_dest = cgraph_function_node (cs->callee);
+ cgraph_node *real_dest = cs->callee->function_symbol ();
struct ipa_node_params *dst_info = IPA_NODE_REF (real_dest);
if ((dst_info->ipcp_orig_node && !dst_info->is_all_contexts_clone)
/* Return a vector of incoming edges that do bring value VAL. It is assumed
their number is known and equal to CALLER_COUNT. */
-static vec<cgraph_edge_p>
+static vec<cgraph_edge *>
gather_edges_for_value (struct ipcp_value *val, int caller_count)
{
struct ipcp_value_source *src;
- vec<cgraph_edge_p> ret;
+ vec<cgraph_edge *> ret;
ret.create (caller_count);
for (src = val->sources; src; src = src->next)
return;
init_caller_stats (&stats);
- cgraph_for_node_and_aliases (orig_node, gather_caller_stats, &stats, false);
+ orig_node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
+ false);
orig_sum = stats.count_sum;
init_caller_stats (&stats);
- cgraph_for_node_and_aliases (new_node, gather_caller_stats, &stats, false);
+ new_node->call_for_symbol_thunks_and_aliases (gather_caller_stats, &stats,
+ false);
new_sum = stats.count_sum;
if (orig_node_count < orig_sum + new_sum)
create_specialized_node (struct cgraph_node *node,
vec<tree> known_vals,
struct ipa_agg_replacement_value *aggvals,
- vec<cgraph_edge_p> callers)
+ vec<cgraph_edge *> callers)
{
struct ipa_node_params *new_info, *info = IPA_NODE_REF (node);
- vec<ipa_replace_map_p, va_gc> *replace_trees = NULL;
+ vec<ipa_replace_map *, va_gc> *replace_trees = NULL;
struct ipa_agg_replacement_value *av;
struct cgraph_node *new_node;
int i, count = ipa_get_param_count (info);
}
}
- new_node = cgraph_create_virtual_clone (node, callers, replace_trees,
- args_to_skip, "constprop");
+ new_node = node->create_virtual_clone (callers, replace_trees,
+ args_to_skip, "constprop");
ipa_set_node_agg_value_chain (new_node, aggvals);
for (av = aggvals; av; av = av->next)
new_node->maybe_add_reference (av->value, IPA_REF_ADDR, NULL);
static void
find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
vec<tree> known_vals,
- vec<cgraph_edge_p> callers)
+ vec<cgraph_edge *> callers)
{
struct ipa_node_params *info = IPA_NODE_REF (node);
int i, count = ipa_get_param_count (info);
static struct ipa_agg_replacement_value *
find_aggregate_values_for_callers_subset (struct cgraph_node *node,
- vec<cgraph_edge_p> callers)
+ vec<cgraph_edge *> callers)
{
struct ipa_node_params *dest_info = IPA_NODE_REF (node);
struct ipa_agg_replacement_value *res = NULL;
while (cs)
{
enum availability availability;
- struct cgraph_node *dst = cgraph_function_node (cs->callee,
- &availability);
+ struct cgraph_node *dst = cs->callee->function_symbol (&availability);
if ((dst == node || IPA_NODE_REF (dst)->is_all_contexts_clone)
- && availability > AVAIL_OVERWRITABLE
+ && availability > AVAIL_INTERPOSABLE
&& cgraph_edge_brings_value_p (cs, src))
{
if (cgraph_edge_brings_all_scalars_for_node (cs, val->spec_node)
struct ipa_agg_replacement_value *aggvals;
int freq_sum, caller_count;
gcov_type count_sum;
- vec<cgraph_edge_p> callers;
+ vec<cgraph_edge *> callers;
vec<tree> kv;
if (val->spec_node)
if (info->do_clone_for_all_contexts)
{
struct cgraph_node *clone;
- vec<cgraph_edge_p> callers;
+ vec<cgraph_edge *> callers;
if (dump_file)
fprintf (dump_file, " - Creating a specialized node of %s/%i "
"for all known contexts.\n", node->name (),
node->order);
- callers = collect_callers_of_node (node);
+ callers = node->collect_callers ();
move_binfos_to_values (known_csts, known_binfos);
clone = create_specialized_node (node, known_csts,
known_aggs_to_agg_replacement_list (known_aggs),
struct cgraph_node *callee;
struct ipa_node_params *info;
- callee = cgraph_function_node (cs->callee, NULL);
+ callee = cs->callee->function_symbol (NULL);
info = IPA_NODE_REF (callee);
if (info->node_dead)
for (cs = node->callers; cs; cs = cs->next_caller)
if (cs->caller->thunk.thunk_p
- && cgraph_for_node_and_aliases (cs->caller,
- has_undead_caller_from_outside_scc_p,
- NULL, true))
+ && cs->caller->call_for_symbol_thunks_and_aliases
+ (has_undead_caller_from_outside_scc_p, NULL, true))
return true;
else if (!ipa_edge_within_scc (cs)
&& !IPA_NODE_REF (cs->caller)->node_dead)
{
struct cgraph_node *v;
for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
- if (cgraph_will_be_removed_from_program_if_no_direct_calls (v)
- && !cgraph_for_node_and_aliases (v,
- has_undead_caller_from_outside_scc_p,
- NULL, true))
+ if (v->will_be_removed_from_program_if_no_direct_calls_p ()
+ && !v->call_for_symbol_thunks_and_aliases
+ (has_undead_caller_from_outside_scc_p, NULL, true))
IPA_NODE_REF (v)->node_dead = 1;
for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
struct cgraph_node *v;
iterate = false;
for (v = node; v ; v = ((struct ipa_dfs_info *) v->aux)->next_cycle)
- if (cgraph_function_with_gimple_body_p (v)
+ if (v->has_gimple_body_p ()
&& ipcp_versionable_function_p (v))
iterate |= decide_whether_version_node (v);
FOR_EACH_SYMBOL (n)
if (is_a <cgraph_node *> (n)
&& DECL_VIRTUAL_P (n->decl)
- && symtab_real_symbol_p (n))
+ && n->real_symbol_p ())
get_odr_type (TYPE_MAIN_VARIANT (method_class_type (TREE_TYPE (n->decl))),
true);
for (i = 0; node->iterate_referring (i, ref); i++)
if ((ref->use == IPA_REF_ALIAS
- && referenced_from_vtable_p (cgraph (ref->referring)))
+ && referenced_from_vtable_p (dyn_cast<cgraph_node *> (ref->referring)))
|| (ref->use == IPA_REF_ADDR
&& TREE_CODE (ref->referring->decl) == VAR_DECL
&& DECL_VIRTUAL_P (ref->referring->decl)))
if (!target)
return;
- target_node = cgraph_get_node (target);
+ target_node = cgraph_node::get (target);
/* Preffer alias target over aliases, so we do not get confused by
fake duplicates. */
if (target_node)
{
- alias_target = cgraph_function_or_thunk_node (target_node, &avail);
+ alias_target = target_node->ultimate_alias_target (&avail);
if (target_node != alias_target
&& avail >= AVAIL_AVAILABLE
- && cgraph_function_body_availability (target_node))
+ && target_node->get_availability ())
target_node = alias_target;
}
&& (TREE_PUBLIC (target)
|| DECL_EXTERNAL (target)
|| target_node->definition)
- && symtab_real_symbol_p (target_node))
+ && target_node->real_symbol_p ())
{
gcc_assert (!target_node->global.inlined_to);
- gcc_assert (symtab_real_symbol_p (target_node));
+ gcc_assert (target_node->real_symbol_p ());
if (!pointer_set_insert (inserted, target_node->decl))
{
pointer_set_insert (cached_polymorphic_call_targets,
return true;
targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
for (i = 0; i < targets.length (); i++)
- if (symtab_semantically_equivalent_p (n, targets[i]))
+ if (n->semantically_equivalent_p (targets[i]))
return true;
/* At a moment we allow middle end to dig out new external declarations
FOR_EACH_FUNCTION (n)
if (DECL_VIRTUAL_P (n->decl)
&& !n->definition
- && symtab_real_symbol_p (n))
+ && n->real_symbol_p ())
get_odr_type (method_class_type (TYPE_MAIN_VARIANT (TREE_TYPE (n->decl))),
true);
timevar_pop (TV_IPA_INHERITANCE);
struct cgraph_edge *e2;
struct ipa_ref *ref;
cgraph_speculative_call_info (e, e2, e, ref);
- if (cgraph_function_or_thunk_node (e2->callee, NULL)
- == cgraph_function_or_thunk_node (likely_target, NULL))
+ if (e2->callee->ultimate_alias_target ()
+ == likely_target->ultimate_alias_target ())
{
fprintf (dump_file, "We agree with speculation\n\n");
nok++;
}
/* Don't use an implicitly-declared destructor (c++/58678). */
struct cgraph_node *non_thunk_target
- = cgraph_function_node (likely_target);
+ = likely_target->function_symbol ();
if (DECL_ARTIFICIAL (non_thunk_target->decl)
&& DECL_COMDAT (non_thunk_target->decl))
{
nartificial++;
continue;
}
- if (cgraph_function_body_availability (likely_target)
- <= AVAIL_OVERWRITABLE
- && symtab_can_be_discarded (likely_target))
+ if (likely_target->get_availability () <= AVAIL_INTERPOSABLE
+ && likely_target->can_be_discarded_p ())
{
if (dump_file)
fprintf (dump_file, "Target is overwritable\n\n");
likely_target->name (),
likely_target->order);
}
- if (!symtab_can_be_discarded (likely_target))
+ if (!likely_target->can_be_discarded_p ())
{
cgraph_node *alias;
- alias = cgraph (symtab_nonoverwritable_alias
- (likely_target));
+ alias = dyn_cast<cgraph_node *> (likely_target->noninterposable_alias ());
if (alias)
likely_target = alias;
}
struct cgraph_node *callee = !e->inline_failed ? e->callee : NULL;
cgraph_redirect_edge_callee (e,
- cgraph_get_create_node
+ cgraph_node::get_create
(builtin_decl_implicit (BUILT_IN_UNREACHABLE)));
e->inline_failed = CIF_UNREACHABLE;
if (callee)
- cgraph_remove_node_and_inline_clones (callee, NULL);
+ callee->remove_symbol_and_inline_clones ();
}
if (predicate && !true_predicate_p (predicate))
{
vec<tree> *known_binfos_ptr,
vec<ipa_agg_jump_function_p> *known_aggs_ptr)
{
- struct cgraph_node *callee =
- cgraph_function_or_thunk_node (e->callee, NULL);
+ struct cgraph_node *callee = e->callee->ultimate_alias_target ();
struct inline_summary *info = inline_summary (callee);
vec<tree> known_vals = vNULL;
vec<ipa_agg_jump_function_p> known_aggs = vNULL;
for (edge = node->callees; edge; edge = edge->next_callee)
{
struct inline_edge_summary *es = inline_edge_summary (edge);
- struct cgraph_node *callee =
- cgraph_function_or_thunk_node (edge->callee, NULL);
+ struct cgraph_node *callee = edge->callee->ultimate_alias_target ();
int i;
fprintf (f,
if (is_gimple_call (stmt)
&& !gimple_call_internal_p (stmt))
{
- struct cgraph_edge *edge = cgraph_edge (node, stmt);
+ struct cgraph_edge *edge = node->get_edge (stmt);
struct inline_edge_summary *es = inline_edge_summary (edge);
/* Special case: results of BUILT_IN_CONSTANT_P will be always
estimate_function_body_sizes (node, early);
for (e = node->callees; e; e = e->next_callee)
- if (symtab_comdat_local_p (e->callee))
+ if (e->callee->comdat_local_p ())
break;
node->calls_comdat_local = (e != NULL);
static unsigned int
compute_inline_parameters_for_current (void)
{
- compute_inline_parameters (cgraph_get_node (current_function_decl), true);
+ compute_inline_parameters (cgraph_node::get (current_function_decl), true);
return 0;
}
gcc_checking_assert (*time >= 0);
gcc_checking_assert (*size >= 0);
- callee = cgraph_get_node (target);
+ callee = cgraph_node::get (target);
if (!callee || !callee->definition)
return false;
- callee = cgraph_function_node (callee, &avail);
+ callee = callee->function_symbol (&avail);
if (avail < AVAIL_AVAILABLE)
return false;
isummary = inline_summary (callee);
struct inline_edge_summary *es = inline_edge_summary (edge);
int min_size;
- callee = cgraph_function_or_thunk_node (edge->callee, NULL);
+ callee = edge->callee->ultimate_alias_target ();
gcc_checking_assert (edge->inline_failed);
evaluate_properties_for_edge (edge, true,
return size - (size > 0);
}
- callee = cgraph_function_or_thunk_node (edge->callee, NULL);
+ callee = edge->callee->ultimate_alias_target ();
/* Early inliner runs without caching, go ahead and do the dirty work. */
gcc_checking_assert (edge->inline_failed);
return hints - 1;
}
- callee = cgraph_function_or_thunk_node (edge->callee, NULL);
+ callee = edge->callee->ultimate_alias_target ();
/* Early inliner runs without caching, go ahead and do the dirty work. */
gcc_checking_assert (edge->inline_failed);
struct growth_data d = { node, 0, false };
struct inline_summary *info = inline_summary (node);
- cgraph_for_node_and_aliases (node, do_estimate_growth_1, &d, true);
+ node->call_for_symbol_thunks_and_aliases (do_estimate_growth_1, &d, true);
/* For self recursive functions the growth estimation really should be
infinity. We don't want to return very large values because the growth
;
else
{
- if (cgraph_will_be_removed_from_program_if_no_direct_calls (node))
+ if (node->will_be_removed_from_program_if_no_direct_calls_p ())
d.growth -= info->size;
/* COMDAT functions are very often not shared across multiple units
since they come from various template instantiations.
Take this into account. */
else if (DECL_COMDAT (node->decl)
- && cgraph_can_remove_if_no_direct_calls_p (node))
+ && node->can_remove_if_no_direct_calls_p ())
d.growth -= (info->size
* (100 - PARAM_VALUE (PARAM_COMDAT_SHARING_PROBABILITY))
+ 50) / 100;
instead of
cgraph_will_be_removed_from_program_if_no_direct_calls */
if (DECL_EXTERNAL (node->decl)
- || !cgraph_can_remove_if_no_direct_calls_p (node))
+ || !node->can_remove_if_no_direct_calls_p ())
return true;
/* If there is cached value, just go ahead. */
if ((int)node_growth_cache.length () > node->uid
&& (ret = node_growth_cache[node->uid]))
return ret > 0;
- if (!cgraph_will_be_removed_from_program_if_no_direct_calls (node)
+ if (!node->will_be_removed_from_program_if_no_direct_calls_p ()
&& (!DECL_COMDAT (node->decl)
- || !cgraph_can_remove_if_no_direct_calls_p (node)))
+ || !node->can_remove_if_no_direct_calls_p ()))
return true;
max_callers = inline_summary (node)->size * 4 / edge_growth + 2;
index = streamer_read_uhwi (&ib);
encoder = file_data->symtab_node_encoder;
- node = cgraph (lto_symtab_encoder_deref (encoder, index));
+ node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
+ index));
info = inline_summary (node);
info->estimated_stack_size
return (!node->address_taken
&& !node->has_aliases_p ()
&& !node->used_as_abstract_origin
- && cgraph_can_remove_if_no_direct_calls_p (node)
+ && node->can_remove_if_no_direct_calls_p ()
/* Inlining might enable more devirtualizing, so we want to remove
those only after all devirtualizable virtual calls are processed.
Lacking may edges in callgraph we just preserve them post
items can be removed. */
if (!node->same_comdat_group)
return true;
- for (next = cgraph (node->same_comdat_group);
- next != node; next = cgraph (next->same_comdat_group))
+ for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
+ next != node; next = dyn_cast<cgraph_node *> (next->same_comdat_group))
if ((next->callers && next->callers != e)
|| !can_remove_node_now_p_1 (next))
return false;
For now we keep the ohter functions in the group in program until
cgraph_remove_unreachable_functions gets rid of them. */
gcc_assert (!e->callee->global.inlined_to);
- symtab_dissolve_same_comdat_group_list (e->callee);
+ e->callee->dissolve_same_comdat_group_list ();
if (e->callee->definition && !DECL_EXTERNAL (e->callee->decl))
{
if (overall_size)
if (freq_scale == -1)
freq_scale = e->frequency;
- n = cgraph_clone_node (e->callee, e->callee->decl,
- MIN (e->count, e->callee->count), freq_scale,
- update_original, vNULL, true, inlining_into,
- NULL);
+ n = e->callee->create_clone (e->callee->decl,
+ MIN (e->count, e->callee->count),
+ freq_scale,
+ update_original, vNULL, true,
+ inlining_into,
+ NULL);
cgraph_redirect_edge_callee (e, n);
}
}
else
- symtab_dissolve_same_comdat_group_list (e->callee);
+ e->callee->dissolve_same_comdat_group_list ();
e->callee->global.inlined_to = inlining_into;
bool
inline_call (struct cgraph_edge *e, bool update_original,
- vec<cgraph_edge_p> *new_edges,
+ vec<cgraph_edge *> *new_edges,
int *overall_size, bool update_overall_summary,
bool *callee_removed)
{
int old_size = 0, new_size = 0;
struct cgraph_node *to = NULL;
struct cgraph_edge *curr = e;
- struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
+ struct cgraph_node *callee = e->callee->ultimate_alias_target ();
bool new_edges_found = false;
#ifdef ENABLE_CHECKING
if (!alias->callers
&& can_remove_node_now_p (alias, e))
{
- next_alias = cgraph_alias_target (alias);
- cgraph_remove_node (alias);
+ next_alias = alias->get_alias_target ();
+ alias->remove ();
if (callee_removed)
*callee_removed = true;
alias = next_alias;
if (callee->calls_comdat_local)
to->calls_comdat_local = true;
- else if (to->calls_comdat_local && symtab_comdat_local_p (callee))
+ else if (to->calls_comdat_local && callee->comdat_local_p ())
{
struct cgraph_edge *se = to->callees;
for (; se; se = se->next_callee)
- if (se->inline_failed && symtab_comdat_local_p (se->callee))
+ if (se->inline_failed && se->callee->comdat_local_p ())
break;
if (se == NULL)
to->calls_comdat_local = false;
fprintf (dump_file, "\nSaving body of %s for later reuse\n",
node->name ());
- gcc_assert (node == cgraph_get_node (node->decl));
+ gcc_assert (node == cgraph_node::get (node->decl));
/* first_clone will be turned into real function. */
first_clone = node->clones;
first_clone->decl = copy_node (node->decl);
first_clone->decl->decl_with_vis.symtab_node = first_clone;
- gcc_assert (first_clone == cgraph_get_node (first_clone->decl));
+ gcc_assert (first_clone == cgraph_node::get (first_clone->decl));
/* Now reshape the clone tree, so all other clones descends from
first_clone. */
Remove it now. */
if (!first_clone->callers)
{
- cgraph_remove_node_and_inline_clones (first_clone, NULL);
+ first_clone->remove_symbol_and_inline_clones ();
first_clone = NULL;
}
#ifdef ENABLE_CHECKING
else
- verify_cgraph_node (first_clone);
+ first_clone->verify ();
#endif
return first_clone;
}
caller_growth_limits (struct cgraph_edge *e)
{
struct cgraph_node *to = e->caller;
- struct cgraph_node *what = cgraph_function_or_thunk_node (e->callee, NULL);
+ struct cgraph_node *what = e->callee->ultimate_alias_target ();
int newsize;
int limit = 0;
HOST_WIDE_INT stack_size_limit = 0, inlined_stack;
{
bool inlinable = true;
enum availability avail;
- struct cgraph_node *callee
- = cgraph_function_or_thunk_node (e->callee, &avail);
+ cgraph_node *callee = e->callee->ultimate_alias_target (&avail);
tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (e->caller->decl);
tree callee_tree
= callee ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (callee->decl) : NULL;
e->inline_failed = CIF_FUNCTION_NOT_INLINABLE;
inlinable = false;
}
- else if (avail <= AVAIL_OVERWRITABLE)
+ else if (avail <= AVAIL_INTERPOSABLE)
{
e->inline_failed = CIF_OVERWRITABLE;
inlinable = false;
static bool
can_early_inline_edge_p (struct cgraph_edge *e)
{
- struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee,
- NULL);
+ struct cgraph_node *callee = e->callee->ultimate_alias_target ();
/* Early inliner might get called at WPA stage when IPA pass adds new
function. In this case we can not really do any of early inlining
because function bodies are missing. */
want_early_inline_function_p (struct cgraph_edge *e)
{
bool want_inline = true;
- struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
+ struct cgraph_node *callee = e->callee->ultimate_alias_target ();
if (DECL_DISREGARD_INLINE_LIMITS (callee->decl))
;
want_inline_small_function_p (struct cgraph_edge *e, bool report)
{
bool want_inline = true;
- struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
+ struct cgraph_node *callee = e->callee->ultimate_alias_target ();
if (DECL_DISREGARD_INLINE_LIMITS (callee->decl))
;
static bool
want_inline_function_to_all_callers_p (struct cgraph_node *node, bool cold)
{
- struct cgraph_node *function = cgraph_function_or_thunk_node (node, NULL);
+ struct cgraph_node *function = node->ultimate_alias_target ();
bool has_hot_call = false;
/* Does it have callers? */
- if (!cgraph_for_node_and_aliases (node, has_caller_p, NULL, true))
+ if (!node->call_for_symbol_thunks_and_aliases (has_caller_p, NULL, true))
return false;
/* Already inlined? */
if (function->global.inlined_to)
return false;
- if (cgraph_function_or_thunk_node (node, NULL) != node)
+ if (node->ultimate_alias_target () != node)
return false;
/* Inlining into all callers would increase size? */
if (estimate_growth (node) > 0)
return false;
/* All inlines must be possible. */
- if (cgraph_for_node_and_aliases (node, check_callers, &has_hot_call, true))
+ if (node->call_for_symbol_thunks_and_aliases
+ (check_callers, &has_hot_call, true))
return false;
if (!cold && !has_hot_call)
return false;
{
gcov_type badness;
int growth, edge_time;
- struct cgraph_node *callee = cgraph_function_or_thunk_node (edge->callee,
- NULL);
+ struct cgraph_node *callee = edge->callee->ultimate_alias_target ();
struct inline_summary *callee_info = inline_summary (callee);
inline_hints hints;
growth chould have just increased and consequentely badness metric
don't need updating. */
if (e->inline_failed
- && (callee = cgraph_function_or_thunk_node (e->callee, &avail))
+ && (callee = e->callee->ultimate_alias_target (&avail))
&& inline_summary (callee)->inlinable
&& avail >= AVAIL_AVAILABLE
&& !bitmap_bit_p (updated_nodes, callee->uid))
for (e = where->callees; e; e = e->next_callee)
if (e->callee == node
- || (cgraph_function_or_thunk_node (e->callee, &avail) == node
- && avail > AVAIL_OVERWRITABLE))
+ || (e->callee->ultimate_alias_target (&avail) == node
+ && avail > AVAIL_INTERPOSABLE))
{
/* When profile feedback is available, prioritize by expected number
of calls. */
static bool
recursive_inlining (struct cgraph_edge *edge,
- vec<cgraph_edge_p> *new_edges)
+ vec<cgraph_edge *> *new_edges)
{
int limit = PARAM_VALUE (PARAM_MAX_INLINE_INSNS_RECURSIVE_AUTO);
fibheap_t heap;
for (cnode = curr->caller;
cnode->global.inlined_to; cnode = cnode->callers->caller)
if (node->decl
- == cgraph_function_or_thunk_node (curr->callee, NULL)->decl)
+ == curr->callee->ultimate_alias_target ()->decl)
depth++;
if (!want_inline_self_recursive_call_p (curr, node, false, depth))
if (!master_clone)
{
/* We need original clone to copy around. */
- master_clone = cgraph_clone_node (node, node->decl,
- node->count, CGRAPH_FREQ_BASE,
- false, vNULL, true, NULL, NULL);
+ master_clone = node->create_clone (node->decl, node->count,
+ CGRAPH_FREQ_BASE, false, vNULL,
+ true, NULL, NULL);
for (e = master_clone->callees; e; e = e->next_callee)
if (!e->inline_failed)
clone_inlined_nodes (e, true, false, NULL, CGRAPH_FREQ_BASE);
{
next = cgraph_next_function (node);
if (node->global.inlined_to == master_clone)
- cgraph_remove_node (node);
+ node->remove ();
}
- cgraph_remove_node (master_clone);
+ master_clone->remove ();
return true;
}
/* Compute badness of all edges in NEW_EDGES and add them to the HEAP. */
static void
-add_new_edges_to_heap (fibheap_t heap, vec<cgraph_edge_p> new_edges)
+add_new_edges_to_heap (fibheap_t heap, vec<cgraph_edge *> new_edges)
{
while (new_edges.length () > 0)
{
speculation_useful_p (struct cgraph_edge *e, bool anticipate_inlining)
{
enum availability avail;
- struct cgraph_node *target = cgraph_function_or_thunk_node (e->callee, &avail);
+ struct cgraph_node *target = e->callee->ultimate_alias_target (&avail);
struct cgraph_edge *direct, *indirect;
struct ipa_ref *ref;
fibheap_t edge_heap = fibheap_new ();
bitmap updated_nodes = BITMAP_ALLOC (NULL);
int min_size, max_size;
- auto_vec<cgraph_edge_p> new_indirect_edges;
+ auto_vec<cgraph_edge *> new_indirect_edges;
int initial_size = 0;
struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
struct cgraph_edge_hook_list *edge_removal_hook_holder;
FOR_EACH_DEFINED_FUNCTION (node)
if (!node->global.inlined_to)
{
- if (cgraph_function_with_gimple_body_p (node)
+ if (node->has_gimple_body_p ()
|| node->thunk.thunk_p)
{
struct inline_summary *info = inline_summary (node);
continue;
}
- callee = cgraph_function_or_thunk_node (edge->callee, NULL);
+ callee = edge->callee->ultimate_alias_target ();
growth = estimate_edge_growth (edge);
if (dump_file)
{
for (e = node->callees; e; e = e->next_callee)
{
struct cgraph_node *orig_callee;
- struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
+ struct cgraph_node *callee = e->callee->ultimate_alias_target ();
/* We've hit cycle? It is time to give up. */
if (callee->aux)
&& want_inline_function_to_all_callers_p (node, cold))
{
int num_calls = 0;
- cgraph_for_node_and_aliases (node, sum_callers,
- &num_calls, true);
- while (cgraph_for_node_and_aliases (node, inline_to_all_callers,
- &num_calls, true))
+ node->call_for_symbol_thunks_and_aliases (sum_callers, &num_calls,
+ true);
+ while (node->call_for_symbol_thunks_and_aliases (inline_to_all_callers,
+ &num_calls, true))
;
remove_functions = true;
}
for (e = node->callees; e; e = e->next_callee)
{
- struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
+ struct cgraph_node *callee = e->callee->ultimate_alias_target ();
if (!DECL_DISREGARD_INLINE_LIMITS (callee->decl))
continue;
for (e = node->callees; e; e = e->next_callee)
{
- struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
+ struct cgraph_node *callee = e->callee->ultimate_alias_target ();
if (!inline_summary (callee)->inlinable
|| !e->inline_failed)
continue;
unsigned int
pass_early_inline::execute (function *fun)
{
- struct cgraph_node *node = cgraph_get_node (current_function_decl);
+ struct cgraph_node *node = cgraph_node::get (current_function_decl);
struct cgraph_edge *edge;
unsigned int todo = 0;
int iterations = 0;
return 0;
#ifdef ENABLE_CHECKING
- verify_cgraph_node (node);
+ node->verify ();
#endif
node->remove_all_references ();
bool speculation_useful_p (struct cgraph_edge *e, bool anticipate_inlining);
/* In ipa-inline-transform.c */
-bool inline_call (struct cgraph_edge *, bool, vec<cgraph_edge_p> *, int *, bool,
+bool inline_call (struct cgraph_edge *, bool, vec<cgraph_edge *> *, int *, bool,
bool *callee_removed = NULL);
unsigned int inline_transform (struct cgraph_node *);
void clone_inlined_nodes (struct cgraph_edge *e, bool, bool, int *,
counter 2 is total number of executions. */
if (h->hvalue.counters[2])
{
- struct cgraph_edge * e = cgraph_edge (node, stmt);
+ struct cgraph_edge * e = node->get_edge (stmt);
if (e && !e->indirect_unknown_callee)
continue;
e->indirect_info->common_target_id
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Processing frequency %s\n", node->name ());
- cgraph_for_node_and_aliases (node, ipa_propagate_frequency_1, &d, true);
+ node->call_for_symbol_thunks_and_aliases (ipa_propagate_frequency_1, &d,
+ true);
if ((d.only_called_at_startup && !d.only_called_at_exit)
&& !node->only_called_at_startup)
fprintf (dump_file,
"Not speculating: call is cold.\n");
}
- else if (cgraph_function_body_availability (n2)
- <= AVAIL_OVERWRITABLE
- && symtab_can_be_discarded (n2))
+ else if (n2->get_availability () <= AVAIL_INTERPOSABLE
+ && n2->can_be_discarded_p ())
{
nuseless++;
if (dump_file)
control flow goes to this particular implementation
of N2. Speculate on the local alias to allow inlining.
*/
- if (!symtab_can_be_discarded (n2))
+ if (!n2->can_be_discarded_p ())
{
cgraph_node *alias;
- alias = cgraph (symtab_nonoverwritable_alias
- (n2));
+ alias = dyn_cast<cgraph_node *> (n2->noninterposable_alias ());
if (alias)
n2 = alias;
}
struct ipa_bb_info
{
/* Call graph edges going out of this BB. */
- vec<cgraph_edge_p> cg_edges;
+ vec<cgraph_edge *> cg_edges;
/* Alias analysis statuses of each formal parameter at this bb. */
vec<param_aa_status> param_aa_statuses;
};
if (callee)
{
- cgraph_function_or_thunk_node (callee, NULL);
+ callee->ultimate_alias_target ();
/* We do not need to bother analyzing calls to unknown functions
unless they may become known during lto/whopr. */
if (!callee->definition && !flag_lto)
{
struct cgraph_edge *cs;
- cs = cgraph_edge (node, stmt);
+ cs = node->get_edge (stmt);
cs->indirect_info->param_index = param_index;
cs->indirect_info->agg_contents = 0;
cs->indirect_info->member_ptr = 0;
/* If we previously turned the call into a direct call, there is
no need to analyze. */
- struct cgraph_edge *cs = cgraph_edge (fbi->node, call);
+ struct cgraph_edge *cs = fbi->node->get_edge (call);
if (cs && !cs->indirect_unknown_callee)
return;
if (TREE_CODE (target) == SSA_NAME)
#ifdef ENABLE_CHECKING
if (fndecl)
gcc_assert (possible_polymorphic_call_target_p
- (otr, cgraph_get_node (fndecl)));
+ (otr, cgraph_node::get (fndecl)));
#endif
return fndecl;
}
}
target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
- callee = cgraph_get_create_node (target);
+ callee = cgraph_node::get_create (target);
unreachable = true;
}
else
- callee = cgraph_get_node (target);
+ callee = cgraph_node::get (target);
}
else
- callee = cgraph_get_node (target);
+ callee = cgraph_node::get (target);
/* Because may-edges are not explicitely represented and vtable may be external,
we may create the first reference to the object in the unit. */
ie->callee->order);
return NULL;
}
- callee = cgraph_get_create_node (target);
+ callee = cgraph_node::get_create (target);
}
if (!dbg_cnt (devirt))
|| TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
return NULL;
- return cgraph_get_node (TREE_OPERAND (cst, 0));
+ return cgraph_node::get (TREE_OPERAND (cst, 0));
}
ie->caller->name (), ie->caller->order);
}
tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
- cgraph_get_create_node (new_target);
+ cgraph_node::get_create (new_target);
return new_target;
}
if ((TREE_CODE (TREE_TYPE (target)) == FUNCTION_TYPE
&& DECL_FUNCTION_CODE (target) == BUILT_IN_UNREACHABLE)
|| !possible_polymorphic_call_target_p
- (ie, cgraph_get_node (target)))
+ (ie, cgraph_node::get (target)))
target = ipa_impossible_devirt_target (ie, target);
return ipa_make_edge_direct_to_target (ie, target);
}
if (target)
{
- if (!possible_polymorphic_call_target_p (ie, cgraph_get_node (target)))
+ if (!possible_polymorphic_call_target_p (ie, cgraph_node::get (target)))
target = ipa_impossible_devirt_target (ie, target);
return ipa_make_edge_direct_to_target (ie, target);
}
static bool
update_indirect_edges_after_inlining (struct cgraph_edge *cs,
struct cgraph_node *node,
- vec<cgraph_edge_p> *new_edges)
+ vec<cgraph_edge *> *new_edges)
{
struct ipa_edge_args *top;
struct cgraph_edge *ie, *next_ie, *new_direct_edge;
static bool
propagate_info_to_inlined_callees (struct cgraph_edge *cs,
struct cgraph_node *node,
- vec<cgraph_edge_p> *new_edges)
+ vec<cgraph_edge *> *new_edges)
{
struct cgraph_edge *e;
bool res;
if (t && TREE_CODE (t) == ADDR_EXPR
&& TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
- && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
+ && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
&& (ref = new_root->find_reference (n, NULL, 0)))
{
if (dump_file)
gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
&& TREE_CODE (TREE_OPERAND (cst, 0))
== FUNCTION_DECL);
- n = cgraph_get_node (TREE_OPERAND (cst, 0));
+ n = cgraph_node::get (TREE_OPERAND (cst, 0));
if (n)
{
struct cgraph_node *clone;
bool
ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
- vec<cgraph_edge_p> *new_edges)
+ vec<cgraph_edge *> *new_edges)
{
bool changed;
/* Do nothing if the preparation phase has not been carried out yet
static void
ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
{
- if (cgraph_function_with_gimple_body_p (node))
+ if (node->has_gimple_body_p ())
ipa_analyze_node (node);
}
ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
ipa_parm_adjustment_vec adjustments)
{
- struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
+ struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
vec<tree> vargs;
vec<tree, va_gc> **debug_args = NULL;
gimple new_stmt;
cgraph_set_call_stmt (cs, new_stmt);
do
{
- ipa_record_stmt_references (current_node, gsi_stmt (gsi));
+ current_node->record_stmt_references (gsi_stmt (gsi));
gsi_prev (&gsi);
}
while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
lsei_next_function_in_partition (&lsei))
{
node = lsei_cgraph_node (lsei);
- if (cgraph_function_with_gimple_body_p (node)
+ if (node->has_gimple_body_p ()
&& IPA_NODE_REF (node) != NULL)
count++;
}
lsei_next_function_in_partition (&lsei))
{
node = lsei_cgraph_node (lsei);
- if (cgraph_function_with_gimple_body_p (node)
+ if (node->has_gimple_body_p ()
&& IPA_NODE_REF (node) != NULL)
ipa_write_node_info (ob, node);
}
index = streamer_read_uhwi (&ib_main);
encoder = file_data->symtab_node_encoder;
- node = cgraph (lto_symtab_encoder_deref (encoder, index));
+ node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
+ index));
gcc_assert (node->definition);
ipa_read_node_info (&ib_main, node, data_in);
}
lsei_next_function_in_partition (&lsei))
{
node = lsei_cgraph_node (lsei);
- if (cgraph_function_with_gimple_body_p (node)
+ if (node->has_gimple_body_p ()
&& ipa_get_agg_replacements_for_node (node) != NULL)
count++;
}
lsei_next_function_in_partition (&lsei))
{
node = lsei_cgraph_node (lsei);
- if (cgraph_function_with_gimple_body_p (node)
+ if (node->has_gimple_body_p ()
&& ipa_get_agg_replacements_for_node (node) != NULL)
write_agg_replacement_chain (ob, node);
}
index = streamer_read_uhwi (&ib_main);
encoder = file_data->symtab_node_encoder;
- node = cgraph (lto_symtab_encoder_deref (encoder, index));
+ node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
+ index));
gcc_assert (node->definition);
read_agg_replacement_chain (&ib_main, node, data_in);
}
/* Function formal parameters related computations. */
void ipa_initialize_node_params (struct cgraph_node *node);
bool ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
- vec<cgraph_edge_p> *new_edges);
+ vec<cgraph_edge *> *new_edges);
/* Indirect edge and binfo processing. */
tree ipa_get_indirect_edge_target (struct cgraph_edge *ie,
l->can_throw = false;
state_from_flags (&l->state_previously_known, &l->looping_previously_known,
flags_from_decl_or_type (fn->decl),
- cgraph_node_cannot_return (fn));
+ fn->cannot_return_p ());
if (fn->thunk.thunk_p || fn->alias)
{
static void
add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
{
- if (cgraph_function_body_availability (node) < AVAIL_OVERWRITABLE)
+ if (node->get_availability () < AVAIL_INTERPOSABLE)
return;
/* There are some shared nodes, in particular the initializers on
static declarations. We do not need to scan them more than once
since all we would be interested in are the addressof
operations. */
visited_nodes = pointer_set_create ();
- if (cgraph_function_body_availability (node) > AVAIL_OVERWRITABLE)
+ if (node->get_availability () > AVAIL_INTERPOSABLE)
set_function_state (node, analyze_function (node, true));
pointer_set_destroy (visited_nodes);
visited_nodes = NULL;
/* Process all of the functions.
- We process AVAIL_OVERWRITABLE functions. We can not use the results
+ We process AVAIL_INTERPOSABLE functions. We can not use the results
by default, but the info can be used at LTO with -fwhole-program or
when function got cloned and the clone is AVAILABLE. */
FOR_EACH_DEFINED_FUNCTION (node)
- if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
+ if (node->get_availability () >= AVAIL_INTERPOSABLE)
set_function_state (node, analyze_function (node, true));
pointer_set_destroy (visited_nodes);
fs = XCNEW (struct funct_state_d);
index = streamer_read_uhwi (ib);
encoder = file_data->symtab_node_encoder;
- node = cgraph (lto_symtab_encoder_deref (encoder, index));
+ node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
+ index));
set_function_state (node, fs);
/* Note that the flags must be read in the opposite
{
struct cgraph_edge *e;
for (e = node->callees; e; e = e->next_callee)
- if (cgraph_function_node (e->callee, NULL) == node)
+ if (e->callee->function_symbol () == node)
return true;
return false;
}
order_pos = ipa_reduced_postorder (order, true, false, NULL);
if (dump_file)
{
- dump_cgraph (dump_file);
+ cgraph_node::dump_cgraph (dump_file);
ipa_print_order (dump_file, "reduced", order, order_pos);
}
break;
/* For overwritable nodes we can not assume anything. */
- if (cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
+ if (w->get_availability () == AVAIL_INTERPOSABLE)
{
worse_state (&pure_const_state, &looping,
w_l->state_previously_known,
for (e = w->callees; e; e = e->next_callee)
{
enum availability avail;
- struct cgraph_node *y = cgraph_function_node (e->callee, &avail);
+ struct cgraph_node *y = e->callee->function_symbol (&avail);
enum pure_const_state_e edge_state = IPA_CONST;
bool edge_looping = false;
e->callee->name (),
e->callee->order);
}
- if (avail > AVAIL_OVERWRITABLE)
+ if (avail > AVAIL_INTERPOSABLE)
{
funct_state y_l = get_function_state (y);
if (dump_file && (dump_flags & TDF_DETAILS))
this_looping ? "looping " : "",
w->name ());
}
- cgraph_set_const_flag (w, true, this_looping);
+ w->set_const_flag (true, this_looping);
break;
case IPA_PURE:
this_looping ? "looping " : "",
w->name ());
}
- cgraph_set_pure_flag (w, true, this_looping);
+ w->set_pure_flag (true, this_looping);
break;
default:
order_pos = ipa_reduced_postorder (order, true, false, ignore_edge);
if (dump_file)
{
- dump_cgraph (dump_file);
+ cgraph_node::dump_cgraph (dump_file);
ipa_print_order (dump_file, "reduced for nothrow", order, order_pos);
}
funct_state w_l = get_function_state (w);
if (w_l->can_throw
- || cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
+ || w->get_availability () == AVAIL_INTERPOSABLE)
can_throw = true;
if (can_throw)
for (e = w->callees; e; e = e->next_callee)
{
enum availability avail;
- struct cgraph_node *y = cgraph_function_node (e->callee, &avail);
+ struct cgraph_node *y = e->callee->function_symbol (&avail);
- if (avail > AVAIL_OVERWRITABLE)
+ if (avail > AVAIL_INTERPOSABLE)
{
funct_state y_l = get_function_state (y);
be different. */
if (!w->global.inlined_to)
{
- cgraph_set_nothrow_flag (w, true);
+ w->set_nothrow_flag (true);
if (dump_file)
fprintf (dump_file, "Function found to be nothrow: %s\n",
w->name ());
fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
return true;
}
- if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
+ if (node->get_availability () <= AVAIL_INTERPOSABLE)
{
if (dump_file)
fprintf (dump_file, "Function is not available or overwritable; not analyzing.\n");
bool skip;
struct cgraph_node *node;
- node = cgraph_get_node (current_function_decl);
+ node = cgraph_node::get (current_function_decl);
skip = skip_function_for_local_pure_const (node);
if (!warn_suggest_attribute_const
&& !warn_suggest_attribute_pure
warn_function_const (current_function_decl, !l->looping);
if (!skip)
{
- cgraph_set_const_flag (node, true, l->looping);
+ node->set_const_flag (true, l->looping);
changed = true;
}
if (dump_file)
{
if (!skip)
{
- cgraph_set_const_flag (node, true, false);
+ node->set_const_flag (true, false);
changed = true;
}
if (dump_file)
{
if (!skip)
{
- cgraph_set_pure_flag (node, true, l->looping);
+ node->set_pure_flag (true, l->looping);
changed = true;
}
warn_function_pure (current_function_decl, !l->looping);
{
if (!skip)
{
- cgraph_set_pure_flag (node, true, false);
+ node->set_pure_flag (true, false);
changed = true;
}
if (dump_file)
}
if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
{
- cgraph_set_nothrow_flag (node, true);
+ node->set_nothrow_flag (true);
changed = true;
if (dump_file)
fprintf (dump_file, "Function found to be nothrow: %s\n",
bool
ipa_ref::cannot_lead_to_return ()
{
- return cgraph_node_cannot_return (dyn_cast <cgraph_node *> (referring));
+ return dyn_cast <cgraph_node *> (referring)->cannot_return_p ();
}
/* Return reference list this reference is in. */
ipa_reference_get_not_read_global (struct cgraph_node *fn)
{
ipa_reference_optimization_summary_t info =
- get_reference_optimization_summary (cgraph_function_node (fn, NULL));
+ get_reference_optimization_summary (fn->function_symbol (NULL));
if (info)
return info->statics_not_read;
else if (flags_from_decl_or_type (fn->decl) & ECF_LEAF)
e = e->next_callee)
{
enum availability avail;
- struct cgraph_node *y = cgraph_function_node (e->callee, &avail);
+ struct cgraph_node *y = e->callee->function_symbol (&avail);
if (!y)
continue;
/* Only look into nodes we can propagate something. */
int flags = flags_from_decl_or_type (y->decl);
- if (avail > AVAIL_OVERWRITABLE
- || (avail == AVAIL_OVERWRITABLE && (flags & ECF_LEAF)))
+ if (avail > AVAIL_INTERPOSABLE
+ || (avail == AVAIL_INTERPOSABLE && (flags & ECF_LEAF)))
{
if (get_reference_vars_info (y))
{
}
}
- if (cgraph_node_cannot_return (fn))
+ if (fn->cannot_return_p ())
bitmap_clear (local->statics_written);
}
if (dump_file)
FOR_EACH_DEFINED_FUNCTION (node)
- if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
+ if (node->get_availability () >= AVAIL_INTERPOSABLE)
{
ipa_reference_local_vars_info_t l;
unsigned int index;
tree decl = node->decl;
int flags = flags_from_decl_or_type (decl);
if ((flags & ECF_LEAF)
- && cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
+ && node->get_availability () <= AVAIL_INTERPOSABLE)
;
else if (flags & ECF_CONST)
;
- else if ((flags & ECF_PURE)
- || cgraph_node_cannot_return (node))
+ else if ((flags & ECF_PURE) || node->cannot_return_p ())
{
read_all = true;
if (dump_file && (dump_flags & TDF_DETAILS))
struct cgraph_edge *e, *ie;
/* When function is overwritable, we can not assume anything. */
- if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
+ if (node->get_availability () <= AVAIL_INTERPOSABLE)
read_write_all_from_decl (node, read_all, write_all);
for (e = node->callees;
e = e->next_callee)
{
enum availability avail;
- struct cgraph_node *callee = cgraph_function_node (e->callee, &avail);
+ struct cgraph_node *callee = e->callee->function_symbol (&avail);
gcc_checking_assert (callee);
- if (avail <= AVAIL_OVERWRITABLE)
+ if (avail <= AVAIL_INTERPOSABLE)
read_write_all_from_decl (callee, read_all, write_all);
}
int i;
if (dump_file)
- dump_cgraph (dump_file);
+ cgraph_node::dump_cgraph (dump_file);
ipa_discover_readonly_nonaddressable_vars ();
generate_summary ();
fprintf (dump_file, "Starting cycle with %s/%i\n",
node->asm_name (), node->order);
- vec<cgraph_node_ptr> cycle_nodes = ipa_get_nodes_in_cycle (node);
+ vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (node);
/* If any node in a cycle is read_all or write_all, they all are. */
FOR_EACH_VEC_ELT (cycle_nodes, x, w)
read_all = union_static_var_sets (node_g->statics_read,
w_l->statics_read);
if (!(flags & ECF_PURE)
- && !cgraph_node_cannot_return (w))
+ && !w->cannot_return_p ())
write_all = union_static_var_sets (node_g->statics_written,
w_l->statics_written);
}
ipa_reference_vars_info_t node_info = get_reference_vars_info (node);
ipa_reference_global_vars_info_t node_g = &node_info->global;
- vec<cgraph_node_ptr> cycle_nodes = ipa_get_nodes_in_cycle (node);
+ vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (node);
FOR_EACH_VEC_ELT (cycle_nodes, x, w)
{
ipa_reference_vars_info_t w_ri = get_reference_vars_info (w);
node_info = get_reference_vars_info (node);
if (!node->alias
- && (cgraph_function_body_availability (node) > AVAIL_OVERWRITABLE
+ && (node->get_availability () > AVAIL_INTERPOSABLE
|| (flags_from_decl_or_type (node->decl) & ECF_LEAF)))
{
node_g = &node_info->global;
index = streamer_read_uhwi (ib);
encoder = file_data->symtab_node_encoder;
- node = cgraph (lto_symtab_encoder_deref (encoder, index));
+ node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref
+ (encoder, index));
info = XCNEW (struct ipa_reference_optimization_summary_d);
set_reference_optimization_summary (node, info);
info->statics_not_read = BITMAP_ALLOC (&optimization_summary_obstack);
bitmap args_to_skip;
tree parm;
int num = 0;
- struct cgraph_node *node, *cur_node = cgraph_get_node (current_function_decl);
+ cgraph_node *node, *cur_node = cgraph_node::get (current_function_decl);
basic_block return_bb = find_return_bb ();
basic_block call_bb;
gimple_stmt_iterator gsi;
/* Now create the actual clone. */
rebuild_cgraph_edges ();
- node = cgraph_function_versioning (cur_node, vNULL,
- NULL,
- args_to_skip,
- !split_part_return_p,
- split_point->split_bbs,
- split_point->entry_bb, "part");
+ node = cur_node->create_version_clone_with_body
+ (vNULL, NULL, args_to_skip, !split_part_return_p, split_point->split_bbs,
+ split_point->entry_bb, "part");
/* Let's take a time profile for splitted function. */
node->tp_first_run = cur_node->tp_first_run + 1;
/* If the original function is declared inline, there is no point in issuing
a warning for the non-inlinable part. */
DECL_NO_INLINE_WARNING_P (node->decl) = 1;
- cgraph_node_remove_callees (cur_node);
+ cur_node->remove_callees ();
cur_node->remove_all_references ();
if (!split_part_return_p)
TREE_THIS_VOLATILE (node->decl) = 1;
basic_block bb;
int overall_time = 0, overall_size = 0;
int todo = 0;
- struct cgraph_node *node = cgraph_get_node (current_function_decl);
+ struct cgraph_node *node = cgraph_node::get (current_function_decl);
if (flags_from_decl_or_type (current_function_decl)
& (ECF_NORETURN|ECF_MALLOC))
fprintf (out, "\n\n ordered call graph: %s\n", note);
for (i = count - 1; i >= 0; i--)
- dump_cgraph_node (out, order[i]);
+ order[i]->dump (out);
fprintf (out, "\n");
fflush (out);
}
-\f
+
struct searchc_env {
struct cgraph_node **stack;
int stack_size;
{
struct ipa_dfs_info * w_info;
enum availability avail;
- struct cgraph_node *w = cgraph_function_or_thunk_node (edge->callee, &avail);
+ struct cgraph_node *w = edge->callee->ultimate_alias_target (&avail);
if (!w || (ignore_edge && ignore_edge (edge)))
continue;
if (w->aux
- && (avail > AVAIL_OVERWRITABLE
- || (env->allow_overwritable && avail == AVAIL_OVERWRITABLE)))
+ && (avail > AVAIL_INTERPOSABLE
+ || (env->allow_overwritable && avail == AVAIL_INTERPOSABLE)))
{
w_info = (struct ipa_dfs_info *) w->aux;
if (w_info->new_node)
FOR_EACH_DEFINED_FUNCTION (node)
{
- enum availability avail = cgraph_function_body_availability (node);
+ enum availability avail = node->get_availability ();
- if (avail > AVAIL_OVERWRITABLE
+ if (avail > AVAIL_INTERPOSABLE
|| (allow_overwritable
- && (avail == AVAIL_OVERWRITABLE)))
+ && (avail == AVAIL_INTERPOSABLE)))
{
/* Reuse the info if it is already there. */
struct ipa_dfs_info *info = (struct ipa_dfs_info *) node->aux;
/* Get the set of nodes for the cycle in the reduced call graph starting
from NODE. */
-vec<cgraph_node_ptr>
+vec<cgraph_node *>
ipa_get_nodes_in_cycle (struct cgraph_node *node)
{
- vec<cgraph_node_ptr> v = vNULL;
+ vec<cgraph_node *> v = vNULL;
struct ipa_dfs_info *node_dfs_info;
while (node)
{
{
struct ipa_dfs_info *caller_dfs = (struct ipa_dfs_info *) cs->caller->aux;
struct ipa_dfs_info *callee_dfs;
- struct cgraph_node *callee = cgraph_function_node (cs->callee, NULL);
+ struct cgraph_node *callee = cs->callee->function_symbol ();
callee_dfs = (struct ipa_dfs_info *) callee->aux;
return (caller_dfs
|| (!node->address_taken
&& !node->global.inlined_to
&& !node->alias && !node->thunk.thunk_p
- && !cgraph_only_called_directly_p (node))))
+ && !node->only_called_directly_p ())))
{
stack_size = 0;
stack[stack_size].node = node;
functions to non-always-inline functions. */
if (DECL_DISREGARD_INLINE_LIMITS (edge->caller->decl)
&& !DECL_DISREGARD_INLINE_LIMITS
- (cgraph_function_node (edge->callee, NULL)->decl))
+ (edge->callee->function_symbol ()->decl))
node2 = NULL;
}
for (; stack[stack_size].node->iterate_referring (
gcc_assert (!*slot);
*slot = state;
}
- cgraph_get_body (src);
- cgraph_get_body (dst);
+ src->get_body ();
+ dst->get_body ();
srccfun = DECL_STRUCT_FUNCTION (src->decl);
dstcfun = DECL_STRUCT_FUNCTION (dst->decl);
if (n_basic_blocks_for_fn (srccfun)
(dst->decl,
gimple_bb (e->call_stmt));
}
- cgraph_release_function_body (src);
+ src->release_body ();
inline_update_overall_summary (dst);
}
/* TODO: if there is no match, we can scale up. */
bool
recursive_call_p (tree func, tree dest)
{
- struct cgraph_node *dest_node = cgraph_get_create_node (dest);
- struct cgraph_node *cnode = cgraph_get_create_node (func);
+ struct cgraph_node *dest_node = cgraph_node::get_create (dest);
+ struct cgraph_node *cnode = cgraph_node::get_create (func);
- return symtab_semantically_equivalent_p (dest_node,
- cnode);
+ return dest_node->semantically_equivalent_p (cnode);
}
int ipa_reduced_postorder (struct cgraph_node **, bool, bool,
bool (*ignore_edge) (struct cgraph_edge *));
void ipa_free_postorder_info (void);
-vec<cgraph_node_ptr> ipa_get_nodes_in_cycle (struct cgraph_node *);
+vec<cgraph_node *> ipa_get_nodes_in_cycle (struct cgraph_node *);
bool ipa_edge_within_scc (struct cgraph_edge *);
int ipa_reverse_postorder (struct cgraph_node **);
tree get_base_var (tree);
/* Return true when NODE can not be local. Worker for cgraph_local_node_p. */
-static bool
-cgraph_non_local_node_p_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
+bool
+cgraph_node::non_local_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
{
/* FIXME: Aliases can be local, but i386 gets thunks wrong then. */
- return !(cgraph_only_called_directly_or_aliased_p (node)
+ return !(node->only_called_directly_or_aliased_p ()
&& !node->has_aliases_p ()
&& node->definition
&& !DECL_EXTERNAL (node->decl)
/* Return true when function can be marked local. */
bool
-cgraph_local_node_p (struct cgraph_node *node)
+cgraph_node::local_p (void)
{
- struct cgraph_node *n = cgraph_function_or_thunk_node (node, NULL);
+ cgraph_node *n = ultimate_alias_target ();
/* FIXME: thunks can be considered local, but we need prevent i386
from attempting to change calling convention of them. */
if (n->thunk.thunk_p)
return false;
- return !cgraph_for_node_and_aliases (n,
- cgraph_non_local_node_p_1, NULL, true);
+ return !n->call_for_symbol_thunks_and_aliases (cgraph_node::non_local_p,
+ NULL, true);
}
/* Return true when there is a reference to node and it is not vtable. */
+
bool
-address_taken_from_non_vtable_p (symtab_node *node)
+symtab_node::address_taken_from_non_vtable_p (void)
{
int i;
struct ipa_ref *ref = NULL;
- for (i = 0; node->iterate_referring (i, ref); i++)
+ for (i = 0; iterate_referring (i, ref); i++)
if (ref->use == IPA_REF_ADDR)
{
varpool_node *node;
&& (TREE_CODE (node->decl) != FUNCTION_DECL
|| (!DECL_CXX_CONSTRUCTOR_P (node->decl)
&& !DECL_CXX_DESTRUCTOR_P (node->decl)))
- && address_taken_from_non_vtable_p (node))
+ && node->address_taken_from_non_vtable_p ())
return false;
/* If the symbol is used in some weird way, better to not touch it. */
return true;
/* If linker counts on us, we must preserve the function. */
- if (symtab_used_from_object_file_p (node))
+ if (node->used_from_object_file_p ())
return true;
if (DECL_PRESERVE_P (node->decl))
return true;
return false;
/* If linker counts on us, we must preserve the function. */
- if (symtab_used_from_object_file_p (vnode))
+ if (vnode->used_from_object_file_p ())
return true;
if (DECL_HARD_REGISTER (vnode->decl))
Even if the linker clams the symbol is unused, never bring internal
symbols that are declared by user as used or externally visible.
This is needed for i.e. references from asm statements. */
- if (symtab_used_from_object_file_p (vnode))
+ if (vnode->used_from_object_file_p ())
return true;
if (vnode->resolution == LDPR_PREVAILING_DEF_IRONLY)
return false;
bool
can_replace_by_local_alias (symtab_node *node)
{
- return (symtab_node_availability (node) > AVAIL_OVERWRITABLE
+ return (node->get_availability () > AVAIL_INTERPOSABLE
&& !decl_binds_to_current_def_p (node->decl)
- && !symtab_can_be_discarded (node));
+ && !node->can_be_discarded_p ());
}
/* Return true if we can replace refernece to NODE by local alias
if (TREE_CODE (*tp) == VAR_DECL
|| TREE_CODE (*tp) == FUNCTION_DECL)
{
- if (can_replace_by_local_alias_in_vtable (symtab_get_node (*tp)))
- *tp = symtab_nonoverwritable_alias (symtab_get_node (*tp))->decl;
+ if (can_replace_by_local_alias_in_vtable (symtab_node::get (*tp)))
+ *tp = symtab_node::get (*tp)->noninterposable_alias ()->decl;
*walk_subtrees = 0;
}
else if (IS_TYPE_OR_DECL_P (*tp))
DECL_WEAK (node->decl) = false;
if (!define)
DECL_EXTERNAL (node->decl) = true;
- symtab_dissolve_same_comdat_group_list (node);
+ node->dissolve_same_comdat_group_list ();
}
/* Decide on visibility of all symbols. */
all of them have to be, otherwise it is a front-end bug. */
gcc_assert (DECL_EXTERNAL (n->decl));
#endif
- symtab_dissolve_same_comdat_group_list (node);
+ node->dissolve_same_comdat_group_list ();
}
gcc_assert ((!DECL_WEAK (node->decl)
&& !DECL_COMDAT (node->decl))
next->set_comdat_group (NULL);
if (!next->alias)
next->set_section (NULL);
- symtab_make_decl_local (next->decl);
+ next->make_decl_local ();
next->unique_name = ((next->resolution == LDPR_PREVAILING_DEF_IRONLY
|| next->unique_name
|| next->resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
in the group and they will all be made local. We need to
dissolve the group at once so that the predicate does not
segfault though. */
- symtab_dissolve_same_comdat_group_list (node);
+ node->dissolve_same_comdat_group_list ();
}
if (TREE_PUBLIC (node->decl))
node->set_comdat_group (NULL);
if (DECL_COMDAT (node->decl) && !node->alias)
node->set_section (NULL);
- symtab_make_decl_local (node->decl);
+ node->make_decl_local ();
}
if (node->thunk.thunk_p
{
struct cgraph_node *decl_node = node;
- decl_node = cgraph_function_node (decl_node->callees->callee, NULL);
+ decl_node = decl_node->callees->callee->function_symbol ();
/* Thunks have the same visibility as function they are attached to.
Make sure the C++ front end set this up properly. */
{
gcc_checking_assert (DECL_COMDAT (node->decl)
== DECL_COMDAT (decl_node->decl));
- gcc_checking_assert (symtab_in_same_comdat_p (node, decl_node));
+ gcc_checking_assert (node->in_same_comdat_group_p (decl_node));
gcc_checking_assert (node->same_comdat_group);
}
node->forced_by_abi = decl_node->forced_by_abi;
}
FOR_EACH_DEFINED_FUNCTION (node)
{
- node->local.local |= cgraph_local_node_p (node);
+ node->local.local |= node->local_p ();
/* If we know that function can not be overwritten by a different semantics
and moreover its section can not be discarded, replace all direct calls
- by calls to an nonoverwritable alias. This make dynamic linking
+ by calls to an noninterposable alias. This make dynamic linking
cheaper and enable more optimization.
TODO: We can also update virtual tables. */
if (node->callers
&& can_replace_by_local_alias (node))
{
- struct cgraph_node *alias = cgraph (symtab_nonoverwritable_alias (node));
+ cgraph_node *alias = dyn_cast<cgraph_node *>
+ (node->noninterposable_alias ());
if (alias && alias != node)
{
next->set_comdat_group (NULL);
if (!next->alias)
next->set_section (NULL);
- symtab_make_decl_local (next->decl);
+ next->make_decl_local ();
next->unique_name = ((next->resolution == LDPR_PREVAILING_DEF_IRONLY
|| next->unique_name
|| next->resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
&& TREE_PUBLIC (next->decl));
}
- symtab_dissolve_same_comdat_group_list (vnode);
+ vnode->dissolve_same_comdat_group_list ();
}
if (TREE_PUBLIC (vnode->decl))
vnode->set_comdat_group (NULL);
if (DECL_COMDAT (vnode->decl) && !vnode->alias)
vnode->set_section (NULL);
- symtab_make_decl_local (vnode->decl);
+ vnode->make_decl_local ();
vnode->resolution = LDPR_PREVAILING_DEF_IRONLY;
}
update_visibility_by_resolution_info (vnode);
if (targets.length () == 1)
target = targets[0];
else
- target = cgraph_get_create_node
+ target = cgraph_node::get_create
(builtin_decl_implicit (BUILT_IN_UNREACHABLE));
if (dump_enabled_p ())
{
- location_t locus = gimple_location_safe (edge->call_stmt);
- dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
+ location_t locus = gimple_location (edge->call_stmt);
+ dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
"devirtualizing call in %s/%i to %s/%i\n",
edge->caller->name (), edge->caller->order,
target->name (),
if (node->definition
&& !node->global.inlined_to
&& !node->in_other_partition
- && !cgraph_can_remove_if_no_direct_calls_and_refs_p (node))
+ && !node->can_remove_if_no_direct_calls_and_refs_p ())
{
gcc_assert (!node->global.inlined_to);
pointer_set_insert (reachable, node);
&& DECL_ABSTRACT_ORIGIN (node->decl))
{
struct cgraph_node *origin_node
- = cgraph_get_create_node (DECL_ABSTRACT_ORIGIN (node->decl));
+ = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (node->decl));
origin_node->used_as_abstract_origin = true;
enqueue_node (origin_node, &first, reachable);
}
for (next = node->same_comdat_group;
next != node;
next = next->same_comdat_group)
- if (!symtab_comdat_local_p (next)
+ if (!next->comdat_local_p ()
&& !pointer_set_insert (reachable, next))
enqueue_node (next, &first, reachable);
}
if (DECL_EXTERNAL (e->callee->decl)
&& e->callee->alias
&& before_inlining_p)
- {
- pointer_set_insert (reachable,
- cgraph_function_node (e->callee));
- }
+ pointer_set_insert (reachable,
+ e->callee->function_symbol ());
pointer_set_insert (reachable, e->callee);
}
enqueue_node (e->callee, &first, reachable);
{
if (file)
fprintf (file, " %s/%i", node->name (), node->order);
- cgraph_remove_node (node);
+ node->remove ();
changed = true;
}
/* If node is unreachable, remove its body. */
else if (!pointer_set_contains (reachable, node))
{
if (!pointer_set_contains (body_needed_for_clonning, node->decl))
- cgraph_release_function_body (node);
+ node->release_body ();
else if (!node->clone_of)
gcc_assert (in_lto_p || DECL_RESULT (node->decl));
if (node->definition)
DECL_ATTRIBUTES (node->decl));
if (!node->in_other_partition)
node->local.local = false;
- cgraph_node_remove_callees (node);
- symtab_remove_from_same_comdat_group (node);
+ node->remove_callees ();
+ node->remove_from_same_comdat_group ();
node->remove_all_references ();
changed = true;
}
}
else
- gcc_assert (node->clone_of || !cgraph_function_with_gimple_body_p (node)
+ gcc_assert (node->clone_of || !node->has_gimple_body_p ()
|| in_lto_p || DECL_RESULT (node->decl));
}
{
if (file)
fprintf (file, " %s/%i", vnode->name (), vnode->order);
- varpool_remove_node (vnode);
+ vnode->remove ();
changed = true;
}
else if (!pointer_set_contains (reachable, vnode))
vnode->analyzed = false;
vnode->aux = NULL;
- symtab_remove_from_same_comdat_group (vnode);
+ vnode->remove_from_same_comdat_group ();
/* Keep body if it may be useful for constant folding. */
if ((init = ctor_for_folding (vnode->decl)) == error_mark_node)
if (node->address_taken
&& !node->used_from_other_partition)
{
- if (!cgraph_for_node_and_aliases (node, has_addr_references_p, NULL, true))
+ if (!node->call_for_symbol_thunks_and_aliases
+ (has_addr_references_p, NULL, true))
{
if (file)
fprintf (file, " %s", node->name ());
node->address_taken = false;
changed = true;
- if (cgraph_local_node_p (node))
+ if (node->local_p ())
{
node->local.local = true;
if (file)
fprintf (file, "\n");
#ifdef ENABLE_CHECKING
- verify_symtab ();
+ symtab_node::verify_symtab_nodes ();
#endif
/* If we removed something, perhaps profile could be improved. */
*written = true;
break;
case IPA_REF_ALIAS:
- process_references (varpool (ref->referring), written, address_taken,
- read, explicit_refs);
+ process_references (dyn_cast<varpool_node *> (ref->referring), written,
+ address_taken, read, explicit_refs);
break;
}
}
gimplify_function_tree (decl);
- cgraph_add_new_function (decl, false);
+ cgraph_node::add_new_function (decl, false);
set_cfun (NULL);
current_function_decl = NULL;
static_ctors.safe_push (node->decl);
if (DECL_STATIC_DESTRUCTOR (node->decl))
static_dtors.safe_push (node->decl);
- node = cgraph_get_node (node->decl);
+ node = cgraph_node::get (node->decl);
DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1;
}
function = meet (function, varpool_alias_target (vnode), single_user_map);
/* Check all users and see if they correspond to a single function. */
- for (i = 0;
- vnode->iterate_referring (i, ref)
- && function != BOTTOM; i++)
+ for (i = 0; vnode->iterate_referring (i, ref) && function != BOTTOM; i++)
{
struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
if (cnode)
single_user_map.put (var, user);
/* Enqueue all aliases for re-processing. */
- for (i = 0;
- var->iterate_referring (i, ref); i++)
+ for (i = 0; var->iterate_referring (i, ref); i++)
if (ref->use == IPA_REF_ALIAS
&& !ref->referring->aux)
{
first = dyn_cast <varpool_node *> (ref->referring);
}
/* Enqueue all users for re-processing. */
- for (i = 0;
- var->iterate_reference (i, ref); i++)
+ for (i = 0; var->iterate_reference (i, ref); i++)
if (!ref->referred->aux
&& ref->referred->definition
&& is_a <varpool_node *> (ref->referred))
/* Double check that we didn't pass the function to the callgraph early. */
if (TREE_CODE (decl) == FUNCTION_DECL)
{
- struct cgraph_node *node = cgraph_get_node (decl);
+ struct cgraph_node *node = cgraph_node::get (decl);
gcc_assert (!node || !node->definition);
}
#endif
translation units into SET during IPA-inlining. We make them as
local static nodes to prevent clashes with other local statics. */
if (boundary_p && node->analyzed
- && symtab_get_symbol_partitioning_class (node) == SYMBOL_PARTITION)
+ && node->get_partitioning_class () == SYMBOL_PARTITION)
{
/* Inline clones can not be part of boundary.
gcc_assert (!node->global.inlined_to);
bp_pack_value (&bp, node->implicit_section, 1);
bp_pack_value (&bp, node->address_taken, 1);
bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
- && symtab_get_symbol_partitioning_class (node) == SYMBOL_PARTITION
+ && node->get_partitioning_class () == SYMBOL_PARTITION
&& (reachable_from_other_partition_p (node, encoder)
|| referenced_from_other_partition_p (node, encoder)), 1);
bp_pack_value (&bp, node->lowered, 1);
/* Constant pool initializers can be de-unified into individual ltrans units.
FIXME: Alternatively at -Os we may want to avoid generating for them the local
labels and share them across LTRANS partitions. */
- if (symtab_get_symbol_partitioning_class (node) != SYMBOL_PARTITION)
+ if (node->get_partitioning_class () != SYMBOL_PARTITION)
{
bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
bp_pack_value (&bp, 0, 1); /* in_other_partition. */
if (DECL_ABSTRACT_ORIGIN (node->decl))
{
struct cgraph_node *origin_node
- = cgraph_get_node (DECL_ABSTRACT_ORIGIN (node->decl));
+ = cgraph_node::get (DECL_ABSTRACT_ORIGIN (node->decl));
add_node_to (encoder, origin_node, true);
}
}
if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
lto_output_node (ob, cnode, encoder);
else
- lto_output_varpool_node (ob, varpool (node), encoder);
-
+ lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
}
/* Go over the nodes in SET again to write edges. */
if (clone_ref != LCC_NOT_FOUND)
{
- node = cgraph_clone_node (cgraph (nodes[clone_ref]), fn_decl,
- 0, CGRAPH_FREQ_BASE, false,
- vNULL, false, NULL, NULL);
+ node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
+ 0, CGRAPH_FREQ_BASE, false,
+ vNULL, false, NULL, NULL);
}
else
{
/* Declaration of functions can be already merged with a declaration
from other input file. We keep cgraph unmerged until after streaming
of ipa passes is done. Alays forcingly create a fresh node. */
- node = cgraph_create_empty_node ();
+ node = cgraph_node::create_empty ();
node->decl = fn_decl;
- symtab_register_node (node);
+ node->register_symbol ();
}
node->order = order;
input_overwrite_node (file_data, node, tag, &bp);
/* Store a reference for now, and fix up later to be a pointer. */
- node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
+ node->global.inlined_to = (cgraph_node *) (intptr_t) ref;
if (group)
{
of ipa passes is done. Alays forcingly create a fresh node. */
node = varpool_create_empty_node ();
node->decl = var_decl;
- symtab_register_node (node);
+ node->register_symbol ();
node->order = order;
if (order >= symtab_order)
struct bitpack_d bp;
int ecf_flags = 0;
- caller = cgraph (nodes[streamer_read_hwi (ib)]);
+ caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
if (caller == NULL || caller->decl == NULL_TREE)
internal_error ("bytecode stream: no caller found while reading edge");
if (!indirect)
{
- callee = cgraph (nodes[streamer_read_hwi (ib)]);
+ callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
if (callee == NULL || callee->decl == NULL_TREE)
internal_error ("bytecode stream: no callee found while reading edge");
}
freq = (int) bp_unpack_var_len_unsigned (&bp);
if (indirect)
- edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq);
+ edge = caller->create_indirect_edge (NULL, 0, count, freq);
else
- edge = cgraph_create_edge (caller, callee, NULL, count, freq);
+ edge = caller->create_edge (callee, NULL, count, freq);
edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
edge->speculative = bp_unpack_value (&bp, 1);
/* Fixup inlined_to from reference to pointer. */
if (ref != LCC_NOT_FOUND)
- cgraph (node)->global.inlined_to = cgraph (nodes[ref]);
+ dyn_cast<cgraph_node *> (node)->global.inlined_to
+ = dyn_cast<cgraph_node *> (nodes[ref]);
else
cnode->global.inlined_to = NULL;
}
for (i = 0; i < count; i++)
{
int ref = streamer_read_uhwi (&ib_main);
- input_node_opt_summary (cgraph (nodes[ref]),
+ input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
&ib_main, data_in);
}
lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
gimple_register_cfg_hooks ();
- node = cgraph_get_node (fn_decl);
+ node = cgraph_node::get (fn_decl);
if (!node)
- node = cgraph_create_node (fn_decl);
+ node = cgraph_node::create (fn_decl);
input_struct_function_base (fn, data_in, ib);
input_cfg (ib_cfg, data_in, fn, node->count_materialization_scale);
gcc_assert (alias || TREE_CODE (t) != VAR_DECL
|| varpool_get_node (t)->definition);
gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
- || (cgraph_get_node (t)
- && cgraph_get_node (t)->definition));
+ || (cgraph_node::get (t)
+ && cgraph_node::get (t)->definition));
}
/* Imitate what default_elf_asm_output_external do.
output_symbol_p (symtab_node *node)
{
struct cgraph_node *cnode;
- if (!symtab_real_symbol_p (node))
+ if (!node->real_symbol_p ())
return false;
/* We keep external functions in symtab for sake of inlining
and devirtualization. We do not want to see them in symbol table as
static inline struct cgraph_node *
lsei_cgraph_node (lto_symtab_encoder_iterator lsei)
{
- return cgraph (lsei.encoder->nodes[lsei.index].node);
+ return dyn_cast<cgraph_node *> (lsei.encoder->nodes[lsei.index].node);
}
/* Return the node pointed to by LSI. */
static inline varpool_node *
lsei_varpool_node (lto_symtab_encoder_iterator lsei)
{
- return varpool (lsei.encoder->nodes[lsei.index].node);
+ return dyn_cast<varpool_node *> (lsei.encoder->nodes[lsei.index].node);
}
/* Return the cgraph node corresponding to REF using ENCODER. */
/* Add all duplicated references to the partition. */
for (i = 0; node->iterate_reference (i, ref); i++)
- if (symtab_get_symbol_partitioning_class (ref->referred) == SYMBOL_DUPLICATE)
+ if (ref->referred->get_partitioning_class () == SYMBOL_DUPLICATE)
add_symbol_to_partition (part, ref->referred);
/* References to a readonly variable may be constant foled into its value.
Recursively look into the initializers of the constant variable and add
references, too. */
else if (is_a <varpool_node *> (ref->referred)
- && varpool_ctor_useable_for_folding_p (varpool (ref->referred))
+ && varpool_ctor_useable_for_folding_p
+ (dyn_cast <varpool_node *> (ref->referred))
&& !lto_symtab_encoder_in_partition_p (part->encoder, ref->referred))
{
if (!part->initializers_visited)
static bool
add_symbol_to_partition_1 (ltrans_partition part, symtab_node *node)
{
- enum symbol_partitioning_class c = symtab_get_symbol_partitioning_class (node);
+ enum symbol_partitioning_class c = node->get_partitioning_class ();
struct ipa_ref *ref;
symtab_node *node1;
for (e = cnode->callees; e; e = e->next_callee)
if (!e->inline_failed)
add_symbol_to_partition_1 (part, e->callee);
- else if (symtab_get_symbol_partitioning_class (e->callee) == SYMBOL_DUPLICATE)
+ else if (e->callee->get_partitioning_class () == SYMBOL_DUPLICATE)
add_symbol_to_partition (part, e->callee);
/* Add all thunks associated with the function. */
return node;
if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
{
- cnode = cgraph_function_node (cnode, NULL);
+ cnode = cnode->function_symbol ();
if (cnode->global.inlined_to)
cnode = cnode->global.inlined_to;
return cnode;
symtab_node *node1;
/* Verify that we do not try to duplicate something that can not be. */
- gcc_checking_assert (symtab_get_symbol_partitioning_class (node) == SYMBOL_DUPLICATE
+ gcc_checking_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
|| !symbol_partitioned_p (node));
while ((node1 = contained_in_symbol (node)) != node)
Be lax about comdats; they may or may not be duplicated and we may
end up in need to duplicate keyed comdat because it has unkeyed alias. */
- gcc_assert (symtab_get_symbol_partitioning_class (node) == SYMBOL_DUPLICATE
+ gcc_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
|| DECL_COMDAT (node->decl)
|| !symbol_partitioned_p (node));
FOR_EACH_SYMBOL (node)
{
- if (symtab_get_symbol_partitioning_class (node) != SYMBOL_PARTITION
+ if (node->get_partitioning_class () != SYMBOL_PARTITION
|| symbol_partitioned_p (node))
continue;
FOR_EACH_SYMBOL (node)
{
- if (symtab_get_symbol_partitioning_class (node) != SYMBOL_PARTITION
+ if (node->get_partitioning_class () != SYMBOL_PARTITION
|| symbol_partitioned_p (node))
continue;
partition = new_partition (node->asm_name ());
gcc_assert (!vnode->aux);
FOR_EACH_DEFINED_FUNCTION (node)
- if (symtab_get_symbol_partitioning_class (node) == SYMBOL_PARTITION)
+ if (node->get_partitioning_class () == SYMBOL_PARTITION)
{
order[n_nodes++] = node;
if (!node->alias)
if (!flag_toplevel_reorder)
{
FOR_EACH_VARIABLE (vnode)
- if (symtab_get_symbol_partitioning_class (vnode) == SYMBOL_PARTITION)
+ if (vnode->get_partitioning_class () == SYMBOL_PARTITION)
n_varpool_nodes++;
varpool_order = XNEWVEC (varpool_node *, n_varpool_nodes);
n_varpool_nodes = 0;
FOR_EACH_VARIABLE (vnode)
- if (symtab_get_symbol_partitioning_class (vnode) == SYMBOL_PARTITION)
+ if (vnode->get_partitioning_class () == SYMBOL_PARTITION)
varpool_order[n_varpool_nodes++] = vnode;
qsort (varpool_order, n_varpool_nodes, sizeof (varpool_node *),
varpool_node_cmp);
if (!vnode->definition)
continue;
if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
- && symtab_get_symbol_partitioning_class (vnode) == SYMBOL_PARTITION)
+ && vnode->get_partitioning_class () == SYMBOL_PARTITION)
add_symbol_to_partition (partition, vnode);
index = lto_symtab_encoder_lookup (partition->encoder,
vnode);
number of symbols promoted to hidden. */
if (!symbol_partitioned_p (vnode) && flag_toplevel_reorder
&& !varpool_can_remove_if_no_refs (vnode)
- && symtab_get_symbol_partitioning_class (vnode) == SYMBOL_PARTITION)
+ && vnode->get_partitioning_class () == SYMBOL_PARTITION)
add_symbol_to_partition (partition, vnode);
index = lto_symtab_encoder_lookup (partition->encoder,
vnode);
if (flag_toplevel_reorder)
{
FOR_EACH_VARIABLE (vnode)
- if (symtab_get_symbol_partitioning_class (vnode) == SYMBOL_PARTITION
+ if (vnode->get_partitioning_class () == SYMBOL_PARTITION
&& !symbol_partitioned_p (vnode))
add_symbol_to_partition (partition, vnode);
}
struct cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
if (!cnode)
return false;
- if (symtab_real_symbol_p (node))
+ if (node->real_symbol_p ())
return false;
return (!encoder
|| (lto_symtab_encoder_lookup (encoder, node) != LCC_NOT_FOUND
external symbols (i.e. those not defined). Remove this test
once this is fixed. */
|| DECL_EXTERNAL (node->decl)
- || !symtab_real_symbol_p (node))
+ || !node->real_symbol_p ())
&& !may_need_named_section_p (encoder, node))
return;
same name as external or public symbol.) */
for (s = symtab_node_for_asm (name);
s; s = s->next_sharing_asm_name)
- if ((symtab_real_symbol_p (s) || may_need_named_section_p (encoder, s))
+ if ((s->real_symbol_p () || may_need_named_section_p (encoder, s))
&& s->decl != node->decl
&& (!encoder
|| lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
mangled name. */
for (s = symtab_node_for_asm (name); s;)
if (!s->externally_visible
- && ((symtab_real_symbol_p (s)
+ && ((s->real_symbol_p ()
&& !DECL_EXTERNAL (node->decl)
&& !TREE_PUBLIC (node->decl))
|| may_need_named_section_p (encoder, s))
|| lto_symtab_encoder_in_partition_p (encoder, node)
/* ... or if we do not partition it. This mean that it will
appear in every partition refernecing it. */
- || symtab_get_symbol_partitioning_class (node) != SYMBOL_PARTITION)
+ || node->get_partitioning_class () != SYMBOL_PARTITION)
continue;
promote_symbol (node);
/* Merge node flags. */
if (node->force_output)
- cgraph_mark_force_output_node (prevailing_node);
+ prevailing_node->mark_force_output ();
if (node->forced_by_abi)
prevailing_node->forced_by_abi = true;
if (node->address_taken)
{
gcc_assert (!prevailing_node->global.inlined_to);
- cgraph_mark_address_taken_node (prevailing_node);
+ prevailing_node->mark_address_taken ();
}
/* Redirect all incoming edges. */
lto_free_function_in_decl_state_for_node (node);
if (node->decl != prevailing_node->decl)
- cgraph_release_function_body (node);
+ node->release_body ();
/* Finally remove the replaced node. */
- cgraph_remove_node (node);
+ node->remove ();
}
/* Replace the cgraph node NODE with PREVAILING_NODE in the cgraph, merging
tls_model_names [prevailing_node->tls_model]);
}
/* Finally remove the replaced node. */
- varpool_remove_node (vnode);
+ vnode->remove ();
}
/* Merge two variable or function symbol table entries PREVAILING and ENTRY.
{
if (!TREE_PUBLIC (e->decl) && !DECL_EXTERNAL (e->decl))
return false;
- return symtab_real_symbol_p (e);
+ return e->real_symbol_p ();
}
/* Return true if the symtab entry E can be the prevailing one. */
first->asm_name ());
for (e = first; e; e = e->next_sharing_asm_name)
if (TREE_PUBLIC (e->decl))
- dump_symtab_node (cgraph_dump_file, e);
+ e->dump (cgraph_dump_file);
}
/* Compute the symbol resolutions. This is a no-op when using the
{
fprintf (cgraph_dump_file, "After resolution:\n");
for (e = prevailing; e; e = e->next_sharing_asm_name)
- dump_symtab_node (cgraph_dump_file, e);
+ e->dump (cgraph_dump_file);
}
}
continue;
cgraph_node *ce = dyn_cast <cgraph_node *> (e);
if (ce && !DECL_BUILT_IN (e->decl))
- lto_cgraph_replace_node (ce, cgraph (prevailing));
+ lto_cgraph_replace_node (ce, dyn_cast<cgraph_node *> (prevailing));
if (varpool_node *ve = dyn_cast <varpool_node *> (e))
- lto_varpool_replace_node (ve, varpool (prevailing));
+ lto_varpool_replace_node (ve, dyn_cast<varpool_node *> (prevailing));
}
return;
symtab_node *tgt = symtab_node_for_asm (node->alias_target);
gcc_assert (node->weakref);
if (tgt)
- symtab_resolve_alias (node, tgt);
+ node->resolve_alias (tgt);
}
node->aux = NULL;
possible that tree merging unified the declaration. We
do not want duplicate entries in symbol table. */
if (cnode && DECL_BUILT_IN (node->decl)
- && (cnode2 = cgraph_get_node (node->decl))
+ && (cnode2 = cgraph_node::get (node->decl))
&& cnode2 != cnode)
lto_cgraph_replace_node (cnode2, cnode);
nodes if tree merging occured. */
if ((vnode = dyn_cast <varpool_node *> (node))
&& DECL_HARD_REGISTER (vnode->decl)
- && (node2 = symtab_get_node (vnode->decl))
+ && (node2 = symtab_node::get (vnode->decl))
&& node2 != node)
lto_varpool_replace_node (dyn_cast <varpool_node *> (node2),
vnode);
/* Abstract functions may have duplicated cgraph nodes attached;
remove them. */
else if (cnode && DECL_ABSTRACT (cnode->decl)
- && (cnode2 = cgraph_get_node (node->decl))
+ && (cnode2 = cgraph_node::get (node->decl))
&& cnode2 != cnode)
- cgraph_remove_node (cnode2);
+ cnode2->remove ();
node->decl->decl_with_vis.symtab_node = node;
}
decl = node->decl;
/* Read in functions with body (analyzed nodes)
and also functions that are needed to produce virtual clones. */
- if ((cgraph_function_with_gimple_body_p (node) && node->analyzed)
+ if ((node->has_gimple_body_p () && node->analyzed)
|| node->used_as_abstract_origin
|| has_analyzed_clone_p (node))
{
/* Store resolutions into the symbol table. */
FOR_EACH_SYMBOL (snode)
- if (symtab_real_symbol_p (snode)
+ if (snode->real_symbol_p ()
&& snode->lto_file_data
&& snode->lto_file_data->resolution_map
&& (res = pointer_map_contains (snode->lto_file_data->resolution_map,
if (cgraph_dump_file)
{
fprintf (cgraph_dump_file, "Before merging:\n");
- dump_symtab (cgraph_dump_file);
+ symtab_node::dump_table (cgraph_dump_file);
}
lto_symtab_merge_symbols ();
/* Removal of unreacable symbols is needed to make verify_symtab to pass;
cgraph_function_flags_ready = true;
if (cgraph_dump_file)
- dump_symtab (cgraph_dump_file);
+ symtab_node::dump_table (cgraph_dump_file);
bitmap_obstack_initialize (NULL);
cgraph_state = CGRAPH_STATE_IPA_SSA;
if (cgraph_dump_file)
{
fprintf (cgraph_dump_file, "Optimized ");
- dump_symtab (cgraph_dump_file);
+ symtab_node::dump_table (cgraph_dump_file);
}
#ifdef ENABLE_CHECKING
- verify_symtab ();
+ symtab_node::verify_symtab_nodes ();
#endif
bitmap_obstack_release (NULL);
chain = CLASS_CLS_METHODS (impent->imp_context);
while (chain)
{
- cgraph_mark_force_output_node (
- cgraph_get_create_node (METHOD_DEFINITION (chain)));
+ cgraph_node::get_create (METHOD_DEFINITION (chain))->mark_force_output ();
chain = DECL_CHAIN (chain);
}
chain = CLASS_NST_METHODS (impent->imp_context);
while (chain)
{
- cgraph_mark_force_output_node (
- cgraph_get_create_node (METHOD_DEFINITION (chain)));
+ cgraph_node::get_create (METHOD_DEFINITION (chain))->mark_force_output ();
chain = DECL_CHAIN (chain);
}
}
{
ctx->cb.src_fn = current_function_decl;
ctx->cb.dst_fn = current_function_decl;
- ctx->cb.src_node = cgraph_get_node (current_function_decl);
+ ctx->cb.src_node = cgraph_node::get (current_function_decl);
gcc_checking_assert (ctx->cb.src_node);
ctx->cb.dst_node = ctx->cb.src_node;
ctx->cb.src_cfun = cfun;
pop_cfun ();
/* Inform the callgraph about the new function. */
- cgraph_add_new_function (child_fn, false);
+ cgraph_node::add_new_function (child_fn, false);
}
/* Destroy a omp_context data structures. Called through the splay tree
/* Inform the callgraph about the new function. */
DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
- cgraph_add_new_function (child_fn, true);
+ cgraph_node::add_new_function (child_fn, true);
/* Fix the callgraph edges for child_cfun. Those for cfun will be
fixed in a following pass. */
/* Inform the callgraph about the new function. */
DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
- cgraph_add_new_function (child_fn, true);
+ cgraph_node::add_new_function (child_fn, true);
/* Fix the callgraph edges for child_cfun. Those for cfun will be
fixed in a following pass. */
memset (&tcctx, '\0', sizeof (tcctx));
tcctx.cb.src_fn = ctx->cb.src_fn;
tcctx.cb.dst_fn = child_fn;
- tcctx.cb.src_node = cgraph_get_node (tcctx.cb.src_fn);
+ tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
gcc_checking_assert (tcctx.cb.src_node);
tcctx.cb.dst_node = tcctx.cb.src_node;
tcctx.cb.src_cfun = ctx->cb.src_cfun;
struct cgraph_node *new_node;
if (old_node->definition)
{
- if (!cgraph_function_with_gimple_body_p (old_node))
+ if (!old_node->has_gimple_body_p ())
return NULL;
- cgraph_get_body (old_node);
- new_node = cgraph_function_versioning (old_node, vNULL, NULL, NULL,
- false, NULL, NULL, "simdclone");
+ old_node->get_body ();
+ new_node = old_node->create_version_clone_with_body (vNULL, NULL, NULL,
+ false, NULL, NULL,
+ "simdclone");
}
else
{
SET_DECL_RTL (new_decl, NULL);
DECL_STATIC_CONSTRUCTOR (new_decl) = 0;
DECL_STATIC_DESTRUCTOR (new_decl) = 0;
- new_node
- = cgraph_copy_node_for_versioning (old_node, new_decl, vNULL, NULL);
- cgraph_call_function_insertion_hooks (new_node);
+ new_node = old_node->create_version_clone (new_decl, vNULL, NULL);
+ new_node->call_function_insertion_hooks ();
}
if (new_node == NULL)
return new_node;
entry_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
int freq = compute_call_stmt_bb_frequency (current_function_decl,
entry_bb);
- cgraph_create_edge (node, cgraph_get_create_node (fn),
- call, entry_bb->count, freq);
+ node->create_edge (cgraph_node::get_create (fn),
+ call, entry_bb->count, freq);
imm_use_iterator iter;
use_operand_p use_p;
if (!slot)
return false;
- cgraph_uid = func ? cgraph_get_node (func)->uid : 0;
+ cgraph_uid = func ? cgraph_node::get (func)->uid : 0;
if (func && DECL_ASSEMBLER_NAME_SET_P (func))
aname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (func));
keep the array visible to garbage collector to avoid reading collected
out nodes. */
static int nnodes;
-static GTY ((length ("nnodes"))) cgraph_node_ptr *order;
+static GTY ((length ("nnodes"))) cgraph_node **order;
/* If we are in IPA mode (i.e., current_function_decl is NULL), call
function CALLBACK for every function in the call graph. Otherwise,
else
{
gcc_assert (!order);
- order = ggc_vec_alloc<cgraph_node_ptr> (cgraph_n_nodes);
+ order = ggc_vec_alloc<cgraph_node *> (cgraph_n_nodes);
nnodes = ipa_reverse_postorder (order);
for (i = nnodes - 1; i >= 0; i--)
order[i]->process = 1;
/* Allow possibly removed nodes to be garbage collected. */
order[i] = NULL;
node->process = 0;
- if (cgraph_function_with_gimple_body_p (node))
+ if (node->has_gimple_body_p ())
callback (DECL_STRUCT_FUNCTION (node->decl), data);
}
}
if ((flags & TODO_dump_symtab) && dump_file && !current_function_decl)
{
gcc_assert (!cfun);
- dump_symtab (dump_file);
+ symtab_node::dump_table (dump_file);
/* Flush the file. If verification fails, we won't be able to
close the file before aborting. */
fflush (dump_file);
struct cgraph_node *node;
if (!cfun)
return;
- node = cgraph_get_node (current_function_decl);
+ node = cgraph_node::get (current_function_decl);
if (node->ipa_transforms_to_apply.exists ())
{
bool applied = false;
FOR_EACH_DEFINED_FUNCTION (node)
if (node->analyzed
- && cgraph_function_with_gimple_body_p (node)
+ && node->has_gimple_body_p ()
&& (!node->clone_of || node->decl != node->clone_of->decl))
{
if (!node->global.inlined_to
&& node->ipa_transforms_to_apply.exists ())
{
- cgraph_get_body (node);
+ node->get_body ();
push_cfun (DECL_STRUCT_FUNCTION (node->decl));
execute_all_ipa_transforms ();
rebuild_cgraph_edges ();
{
struct cgraph_node *node = order[i];
- if (cgraph_function_with_gimple_body_p (node))
+ if (node->has_gimple_body_p ())
{
/* When streaming out references to statements as part of some IPA
pass summary, the statements need to have uids assigned and the
function_called_by_processed_nodes_p (void)
{
struct cgraph_edge *e;
- for (e = cgraph_get_node (current_function_decl)->callers;
+ for (e = cgraph_node::get (current_function_decl)->callers;
e;
e = e->next_caller)
{
if (e->caller->decl == current_function_decl)
continue;
- if (!cgraph_function_with_gimple_body_p (e->caller))
+ if (!e->caller->has_gimple_body_p ())
continue;
if (TREE_ASM_WRITTEN (e->caller->decl))
continue;
if (dump_file && e)
{
fprintf (dump_file, "Already processed call to:\n");
- dump_cgraph_node (dump_file, e->caller);
+ e->caller->dump (dump_file);
}
return e != NULL;
}
static inline bool
maybe_hot_frequency_p (struct function *fun, int freq)
{
- struct cgraph_node *node = cgraph_get_node (fun->decl);
+ struct cgraph_node *node = cgraph_node::get (fun->decl);
if (!profile_info || !flag_branch_probabilities)
{
if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
return true;
}
if ((!profile_info || !flag_branch_probabilities)
- && (cgraph_get_node (fun->decl)->frequency
+ && (cgraph_node::get (fun->decl)->frequency
== NODE_FREQUENCY_UNLIKELY_EXECUTED))
return true;
return false;
return probably_never_executed (fun, e->count, EDGE_FREQUENCY (e));
}
-/* Return true if NODE should be optimized for size. */
+/* Return true if function should be optimized for size. */
bool
-cgraph_optimize_for_size_p (struct cgraph_node *node)
+cgraph_node::optimize_for_size_p (void)
{
if (optimize_size)
return true;
- if (node && (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED))
+ if (frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
return true;
else
return false;
return true;
if (!fun || !fun->decl)
return false;
- return cgraph_optimize_for_size_p (cgraph_get_node (fun->decl));
+
+ cgraph_node *n = cgraph_node::get (fun->decl);
+ return n && n->optimize_for_size_p ();
}
/* Return true when current function should always be optimized for speed. */
compute_function_frequency (void)
{
basic_block bb;
- struct cgraph_node *node = cgraph_get_node (current_function_decl);
+ struct cgraph_node *node = cgraph_node::get (current_function_decl);
if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
|| MAIN_NAME_P (DECL_NAME (current_function_decl)))
the corresponding call graph node. */
if (hist->type == HIST_TYPE_TIME_PROFILE)
{
- node = cgraph_get_node (hist->fun->decl);
-
- node->tp_first_run = hist->hvalue.counters[0];
+ node = cgraph_node::get (hist->fun->decl);
+ node->tp_first_run = hist->hvalue.counters[0];
if (dump_file)
fprintf (dump_file, "Read tp_first_run: %d\n", node->tp_first_run);
insert_to_assembler_name_hash (node, false);
}
-
-/* Add node into symbol table. This function is not used directly, but via
- cgraph/varpool node creation routines. */
-
-void
-symtab_register_node (symtab_node *node)
-{
- node->next = symtab_nodes;
- node->previous = NULL;
- if (symtab_nodes)
- symtab_nodes->previous = node;
- symtab_nodes = node;
-
- if (!node->decl->decl_with_vis.symtab_node)
- node->decl->decl_with_vis.symtab_node = node;
-
- node->ref_list.clear ();
-
- node->order = symtab_order++;
-
- /* Be sure to do this last; C++ FE might create new nodes via
- DECL_ASSEMBLER_NAME langhook! */
- insert_to_assembler_name_hash (node, false);
-}
-
-/* Remove NODE from same comdat group. */
-
-void
-symtab_remove_from_same_comdat_group (symtab_node *node)
-{
- if (node->same_comdat_group)
- {
- symtab_node *prev;
- for (prev = node->same_comdat_group;
- prev->same_comdat_group != node;
- prev = prev->same_comdat_group)
- ;
- if (node->same_comdat_group == prev)
- prev->same_comdat_group = NULL;
- else
- prev->same_comdat_group = node->same_comdat_group;
- node->same_comdat_group = NULL;
- node->set_comdat_group (NULL_TREE);
- }
-}
-
-/* Remove node from symbol table. This function is not used directly, but via
- cgraph/varpool node removal routines. */
-
-void
-symtab_unregister_node (symtab_node *node)
-{
- node->remove_all_references ();
- node->remove_all_referring ();
-
- /* Remove reference to section. */
- node->set_section_for_node (NULL);
-
- symtab_remove_from_same_comdat_group (node);
-
- if (node->previous)
- node->previous->next = node->next;
- else
- symtab_nodes = node->next;
- if (node->next)
- node->next->previous = node->previous;
- node->next = NULL;
- node->previous = NULL;
-
- /* During LTO symtab merging we temporarily corrupt decl to symtab node
- hash. */
- gcc_assert (node->decl->decl_with_vis.symtab_node || in_lto_p);
- if (node->decl->decl_with_vis.symtab_node == node)
- {
- symtab_node *replacement_node = NULL;
- if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
- replacement_node = cgraph_find_replacement_node (cnode);
- node->decl->decl_with_vis.symtab_node = replacement_node;
- }
- if (!is_a <varpool_node *> (node) || !DECL_HARD_REGISTER (node->decl))
- unlink_from_assembler_name_hash (node, false);
- if (node->in_init_priority_hash)
- {
- struct symbol_priority_map in;
- void **slot;
- in.symbol = node;
-
- slot = htab_find_slot (init_priority_hash, &in, NO_INSERT);
- if (slot)
- htab_clear_slot (init_priority_hash, slot);
- }
-}
-
-
-/* Remove symtab NODE from the symbol table. */
-
-void
-symtab_remove_node (symtab_node *node)
-{
- if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
- cgraph_remove_node (cnode);
- else if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
- varpool_remove_node (vnode);
-}
-
/* Initalize asm name hash unless. */
void
if ((TREE_CODE (decl) == VAR_DECL
&& (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
|| TREE_CODE (decl) == FUNCTION_DECL)
- node = symtab_get_node (decl);
+ node = symtab_node::get (decl);
if (!DECL_ASSEMBLER_NAME_SET_P (decl))
{
SET_DECL_ASSEMBLER_NAME (decl, name);
}
}
+/* Return true when RESOLUTION indicate that linker will use
+ the symbol from non-LTO object files. */
+
+bool
+resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution resolution)
+{
+ return (resolution == LDPR_PREVAILING_DEF
+ || resolution == LDPR_PREEMPTED_REG
+ || resolution == LDPR_RESOLVED_EXEC
+ || resolution == LDPR_RESOLVED_DYN);
+}
+
+/* Hash sections by their names. */
+
+static hashval_t
+hash_section_hash_entry (const void *p)
+{
+ const section_hash_entry *n = (const section_hash_entry *) p;
+ return htab_hash_string (n->name);
+}
+
+/* Return true if section P1 name equals to P2. */
+
+static int
+eq_sections (const void *p1, const void *p2)
+{
+ const section_hash_entry *n1 = (const section_hash_entry *) p1;
+ const char *name = (const char *)p2;
+ return n1->name == name || !strcmp (n1->name, name);
+}
+
+/* Add node into symbol table. This function is not used directly, but via
+ cgraph/varpool node creation routines. */
+
+void
+symtab_node::register_symbol (void)
+{
+ next = symtab_nodes;
+ previous = NULL;
+ if (symtab_nodes)
+ symtab_nodes->previous = this;
+ symtab_nodes = this;
+
+ if (!decl->decl_with_vis.symtab_node)
+ decl->decl_with_vis.symtab_node = this;
+
+ ref_list.clear ();
+
+ order = symtab_order++;
+
+ /* Be sure to do this last; C++ FE might create new nodes via
+ DECL_ASSEMBLER_NAME langhook! */
+ insert_to_assembler_name_hash (this, false);
+}
+
+/* Remove NODE from same comdat group. */
+
+void
+symtab_node::remove_from_same_comdat_group (void)
+{
+ if (same_comdat_group)
+ {
+ symtab_node *prev;
+ for (prev = same_comdat_group;
+ prev->same_comdat_group != this;
+ prev = prev->same_comdat_group)
+ ;
+ if (same_comdat_group == prev)
+ prev->same_comdat_group = NULL;
+ else
+ prev->same_comdat_group = same_comdat_group;
+ same_comdat_group = NULL;
+ set_comdat_group (NULL);
+ }
+}
+
+/* Remove node from symbol table. This function is not used directly, but via
+ cgraph/varpool node removal routines. */
+
+void
+symtab_node::unregister (void)
+{
+ remove_all_references ();
+ remove_all_referring ();
+
+ /* Remove reference to section. */
+ set_section_for_node (NULL);
+
+ remove_from_same_comdat_group ();
+
+ if (previous)
+ previous->next = next;
+ else
+ symtab_nodes = next;
+ if (next)
+ next->previous = previous;
+ next = NULL;
+ previous = NULL;
+
+ /* During LTO symtab merging we temporarily corrupt decl to symtab node
+ hash. */
+ gcc_assert (decl->decl_with_vis.symtab_node || in_lto_p);
+ if (decl->decl_with_vis.symtab_node == this)
+ {
+ symtab_node *replacement_node = NULL;
+ if (cgraph_node *cnode = dyn_cast <cgraph_node *> (this))
+ replacement_node = cnode->find_replacement ();
+ decl->decl_with_vis.symtab_node = replacement_node;
+ }
+ if (!is_a <varpool_node *> (this) || !DECL_HARD_REGISTER (decl))
+ unlink_from_assembler_name_hash (this, false);
+ if (in_init_priority_hash)
+ {
+ struct symbol_priority_map in;
+ void **slot;
+ in.symbol = this;
+
+ slot = htab_find_slot (init_priority_hash, &in, NO_INSERT);
+ if (slot)
+ htab_clear_slot (init_priority_hash, slot);
+ }
+}
+
+
+/* Remove symbol from symbol table. */
+
+void
+symtab_node::remove (void)
+{
+ if (cgraph_node *cnode = dyn_cast <cgraph_node *> (this))
+ cnode->remove ();
+ else if (varpool_node *vnode = dyn_cast <varpool_node *> (this))
+ vnode->remove ();
+}
+
/* Add NEW_ to the same comdat group that OLD is in. */
void
-symtab_add_to_same_comdat_group (symtab_node *new_node,
- symtab_node *old_node)
+symtab_node::add_to_same_comdat_group (symtab_node *old_node)
{
gcc_assert (old_node->get_comdat_group ());
- gcc_assert (!new_node->same_comdat_group);
- gcc_assert (new_node != old_node);
+ gcc_assert (!same_comdat_group);
+ gcc_assert (this != old_node);
- new_node->set_comdat_group (old_node->get_comdat_group ());
- new_node->same_comdat_group = old_node;
+ set_comdat_group (old_node->get_comdat_group ());
+ same_comdat_group = old_node;
if (!old_node->same_comdat_group)
- old_node->same_comdat_group = new_node;
+ old_node->same_comdat_group = this;
else
{
symtab_node *n;
n->same_comdat_group != old_node;
n = n->same_comdat_group)
;
- n->same_comdat_group = new_node;
+ n->same_comdat_group = this;
}
}
/* Dissolve the same_comdat_group list in which NODE resides. */
void
-symtab_dissolve_same_comdat_group_list (symtab_node *node)
+symtab_node::dissolve_same_comdat_group_list (void)
{
- symtab_node *n = node;
+ symtab_node *n = this;
symtab_node *next;
- if (!node->same_comdat_group)
+ if (!same_comdat_group)
return;
do
{
n->set_comdat_group (NULL);
n = next;
}
- while (n != node);
+ while (n != this);
}
/* Return printable assembler name of NODE.
if (val && (TREE_CODE (val) == FUNCTION_DECL
|| TREE_CODE (val) == VAR_DECL))
{
- symtab_node *referred = symtab_get_node (val);
+ symtab_node *referred = symtab_node::get (val);
gcc_checking_assert (referred);
return add_reference (referred, use_type, stmt);
}
return ref;
}
-
static const char * const symtab_type_names[] = {"symbol", "function", "variable"};
-/* Dump base fields of symtab nodes. Not to be used directly. */
+/* Dump base fields of symtab nodes to F. Not to be used directly. */
void
-dump_symtab_base (FILE *f, symtab_node *node)
+symtab_node::dump_base (FILE *f)
{
static const char * const visibility_types[] = {
"default", "protected", "hidden", "internal"
};
- fprintf (f, "%s/%i (%s)",
- node->asm_name (),
- node->order,
- node->name ());
- dump_addr (f, " @", (void *)node);
- fprintf (f, "\n Type: %s", symtab_type_names[node->type]);
+ fprintf (f, "%s/%i (%s)", asm_name (), order, name ());
+ dump_addr (f, " @", (void *)this);
+ fprintf (f, "\n Type: %s", symtab_type_names[type]);
- if (node->definition)
+ if (definition)
fprintf (f, " definition");
- if (node->analyzed)
+ if (analyzed)
fprintf (f, " analyzed");
- if (node->alias)
+ if (alias)
fprintf (f, " alias");
- if (node->weakref)
+ if (weakref)
fprintf (f, " weakref");
- if (node->cpp_implicit_alias)
+ if (cpp_implicit_alias)
fprintf (f, " cpp_implicit_alias");
- if (node->alias_target)
+ if (alias_target)
fprintf (f, " target:%s",
- DECL_P (node->alias_target)
+ DECL_P (alias_target)
? IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME
- (node->alias_target))
- : IDENTIFIER_POINTER (node->alias_target));
- if (node->body_removed)
+ (alias_target))
+ : IDENTIFIER_POINTER (alias_target));
+ if (body_removed)
fprintf (f, "\n Body removed by symtab_remove_unreachable_nodes");
fprintf (f, "\n Visibility:");
- if (node->in_other_partition)
+ if (in_other_partition)
fprintf (f, " in_other_partition");
- if (node->used_from_other_partition)
+ if (used_from_other_partition)
fprintf (f, " used_from_other_partition");
- if (node->force_output)
+ if (force_output)
fprintf (f, " force_output");
- if (node->forced_by_abi)
+ if (forced_by_abi)
fprintf (f, " forced_by_abi");
- if (node->externally_visible)
+ if (externally_visible)
fprintf (f, " externally_visible");
- if (node->resolution != LDPR_UNKNOWN)
+ if (resolution != LDPR_UNKNOWN)
fprintf (f, " %s",
- ld_plugin_symbol_resolution_names[(int)node->resolution]);
- if (TREE_ASM_WRITTEN (node->decl))
+ ld_plugin_symbol_resolution_names[(int)resolution]);
+ if (TREE_ASM_WRITTEN (decl))
fprintf (f, " asm_written");
- if (DECL_EXTERNAL (node->decl))
+ if (DECL_EXTERNAL (decl))
fprintf (f, " external");
- if (TREE_PUBLIC (node->decl))
+ if (TREE_PUBLIC (decl))
fprintf (f, " public");
- if (DECL_COMMON (node->decl))
+ if (DECL_COMMON (decl))
fprintf (f, " common");
- if (DECL_WEAK (node->decl))
+ if (DECL_WEAK (decl))
fprintf (f, " weak");
- if (DECL_DLLIMPORT_P (node->decl))
+ if (DECL_DLLIMPORT_P (decl))
fprintf (f, " dll_import");
- if (DECL_COMDAT (node->decl))
+ if (DECL_COMDAT (decl))
fprintf (f, " comdat");
- if (node->get_comdat_group ())
+ if (get_comdat_group ())
fprintf (f, " comdat_group:%s",
- IDENTIFIER_POINTER (node->get_comdat_group_id ()));
- if (DECL_ONE_ONLY (node->decl))
+ IDENTIFIER_POINTER (get_comdat_group_id ()));
+ if (DECL_ONE_ONLY (decl))
fprintf (f, " one_only");
- if (node->get_section ())
+ if (get_section ())
fprintf (f, " section:%s",
- node->get_section ());
- if (node->implicit_section)
+ get_section ());
+ if (implicit_section)
fprintf (f," (implicit_section)");
- if (DECL_VISIBILITY_SPECIFIED (node->decl))
+ if (DECL_VISIBILITY_SPECIFIED (decl))
fprintf (f, " visibility_specified");
- if (DECL_VISIBILITY (node->decl))
+ if (DECL_VISIBILITY (decl))
fprintf (f, " visibility:%s",
- visibility_types [DECL_VISIBILITY (node->decl)]);
- if (DECL_VIRTUAL_P (node->decl))
+ visibility_types [DECL_VISIBILITY (decl)]);
+ if (DECL_VIRTUAL_P (decl))
fprintf (f, " virtual");
- if (DECL_ARTIFICIAL (node->decl))
+ if (DECL_ARTIFICIAL (decl))
fprintf (f, " artificial");
- if (TREE_CODE (node->decl) == FUNCTION_DECL)
+ if (TREE_CODE (decl) == FUNCTION_DECL)
{
- if (DECL_STATIC_CONSTRUCTOR (node->decl))
+ if (DECL_STATIC_CONSTRUCTOR (decl))
fprintf (f, " constructor");
- if (DECL_STATIC_DESTRUCTOR (node->decl))
+ if (DECL_STATIC_DESTRUCTOR (decl))
fprintf (f, " destructor");
}
fprintf (f, "\n");
- if (node->same_comdat_group)
+ if (same_comdat_group)
fprintf (f, " Same comdat group as: %s/%i\n",
- node->same_comdat_group->asm_name (),
- node->same_comdat_group->order);
- if (node->next_sharing_asm_name)
+ same_comdat_group->asm_name (),
+ same_comdat_group->order);
+ if (next_sharing_asm_name)
fprintf (f, " next sharing asm name: %i\n",
- node->next_sharing_asm_name->order);
- if (node->previous_sharing_asm_name)
+ next_sharing_asm_name->order);
+ if (previous_sharing_asm_name)
fprintf (f, " previous sharing asm name: %i\n",
- node->previous_sharing_asm_name->order);
+ previous_sharing_asm_name->order);
- if (node->address_taken)
+ if (address_taken)
fprintf (f, " Address is taken.\n");
- if (node->aux)
+ if (aux)
{
fprintf (f, " Aux:");
- dump_addr (f, " @", (void *)node->aux);
+ dump_addr (f, " @", (void *)aux);
}
fprintf (f, " References: ");
- node->dump_references (f);
+ dump_references (f);
fprintf (f, " Referring: ");
- node->dump_referring (f);
- if (node->lto_file_data)
+ dump_referring (f);
+ if (lto_file_data)
fprintf (f, " Read from file: %s\n",
- node->lto_file_data->file_name);
+ lto_file_data->file_name);
}
-/* Dump symtab node. */
+/* Dump symtab node to F. */
void
-dump_symtab_node (FILE *f, symtab_node *node)
+symtab_node::dump (FILE *f)
{
- if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
- dump_cgraph_node (f, cnode);
- else if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
- dump_varpool_node (f, vnode);
+ if (cgraph_node *cnode = dyn_cast <cgraph_node *> (this))
+ cnode->dump (f);
+ else if (varpool_node *vnode = dyn_cast <varpool_node *> (this))
+ vnode->dump (f);
}
-/* Dump symbol table. */
+/* Dump symbol table to F. */
void
-dump_symtab (FILE *f)
+symtab_node::dump_table (FILE *f)
{
symtab_node *node;
fprintf (f, "Symbol table:\n\n");
FOR_EACH_SYMBOL (node)
- dump_symtab_node (f, node);
+ node->dump (f);
}
/* Dump symtab node NODE to stderr. */
DEBUG_FUNCTION void
-debug_symtab_node (symtab_node *node)
-{
- dump_symtab_node (stderr, node);
-}
-
-/* Dump symbol table to stderr. */
-
-DEBUG_FUNCTION void
-debug_symtab (void)
+symtab_node::debug (void)
{
- dump_symtab (stderr);
+ dump (stderr);
}
/* Verify common part of symtab nodes. */
DEBUG_FUNCTION bool
-verify_symtab_base (symtab_node *node)
+symtab_node::verify_base (void)
{
bool error_found = false;
symtab_node *hashed_node;
- if (is_a <cgraph_node *> (node))
+ if (is_a <cgraph_node *> (this))
{
- if (TREE_CODE (node->decl) != FUNCTION_DECL)
+ if (TREE_CODE (decl) != FUNCTION_DECL)
{
error ("function symbol is not function");
error_found = true;
}
}
- else if (is_a <varpool_node *> (node))
+ else if (is_a <varpool_node *> (this))
{
- if (TREE_CODE (node->decl) != VAR_DECL)
+ if (TREE_CODE (decl) != VAR_DECL)
{
error ("variable symbol is not variable");
error_found = true;
if (cgraph_state != CGRAPH_LTO_STREAMING)
{
- hashed_node = symtab_get_node (node->decl);
+ hashed_node = symtab_node::get (decl);
if (!hashed_node)
{
error ("node not found node->decl->decl_with_vis.symtab_node");
error_found = true;
}
- if (hashed_node != node
- && (!is_a <cgraph_node *> (node)
- || !dyn_cast <cgraph_node *> (node)->clone_of
- || dyn_cast <cgraph_node *> (node)->clone_of->decl
- != node->decl))
+ if (hashed_node != this
+ && (!is_a <cgraph_node *> (this)
+ || !dyn_cast <cgraph_node *> (this)->clone_of
+ || dyn_cast <cgraph_node *> (this)->clone_of->decl != decl))
{
error ("node differs from node->decl->decl_with_vis.symtab_node");
error_found = true;
}
if (assembler_name_hash)
{
- hashed_node = symtab_node_for_asm (DECL_ASSEMBLER_NAME (node->decl));
+ hashed_node = symtab_node_for_asm (DECL_ASSEMBLER_NAME (decl));
if (hashed_node && hashed_node->previous_sharing_asm_name)
{
error ("assembler name hash list corrupted");
}
while (hashed_node)
{
- if (hashed_node == node)
+ if (hashed_node == this)
break;
hashed_node = hashed_node->next_sharing_asm_name;
}
if (!hashed_node
- && !(is_a <varpool_node *> (node)
- || DECL_HARD_REGISTER (node->decl)))
+ && !(is_a <varpool_node *> (this)
+ || DECL_HARD_REGISTER (decl)))
{
error ("node not found in symtab assembler name hash");
error_found = true;
}
}
- if (node->previous_sharing_asm_name
- && node->previous_sharing_asm_name->next_sharing_asm_name != node)
+ if (previous_sharing_asm_name
+ && previous_sharing_asm_name->next_sharing_asm_name != this)
{
error ("double linked list of assembler names corrupted");
error_found = true;
}
- if (node->analyzed && !node->definition)
+ if (analyzed && !definition)
{
error ("node is analyzed byt it is not a definition");
error_found = true;
}
- if (node->cpp_implicit_alias && !node->alias)
+ if (cpp_implicit_alias && !alias)
{
error ("node is alias but not implicit alias");
error_found = true;
}
- if (node->alias && !node->definition
- && !node->weakref)
+ if (alias && !definition && !weakref)
{
error ("node is alias but not definition");
error_found = true;
}
- if (node->weakref && !node->alias)
+ if (weakref && !alias)
{
error ("node is weakref but not an alias");
error_found = true;
}
- if (node->same_comdat_group)
+ if (same_comdat_group)
{
- symtab_node *n = node->same_comdat_group;
+ symtab_node *n = same_comdat_group;
if (!n->get_comdat_group ())
{
error ("node is in same_comdat_group list but has no comdat_group");
error_found = true;
}
- if (n->get_comdat_group () != node->get_comdat_group ())
+ if (n->get_comdat_group () != get_comdat_group ())
{
error ("same_comdat_group list across different groups");
error_found = true;
error ("Node has same_comdat_group but it is not a definition");
error_found = true;
}
- if (n->type != node->type)
+ if (n->type != type)
{
error ("mixing different types of symbol in same comdat groups is not supported");
error_found = true;
}
- if (n == node)
+ if (n == this)
{
error ("node is alone in a comdat group");
error_found = true;
}
n = n->same_comdat_group;
}
- while (n != node);
- if (symtab_comdat_local_p (node))
+ while (n != this);
+ if (comdat_local_p ())
{
struct ipa_ref *ref = NULL;
- for (int i = 0; node->iterate_referring (i, ref); ++i)
+ for (int i = 0; iterate_referring (i, ref); ++i)
{
- if (!symtab_in_same_comdat_p (ref->referring, node))
+ if (!in_same_comdat_group_p (ref->referring))
{
error ("comdat-local symbol referred to by %s outside its "
"comdat",
}
}
}
- if (node->implicit_section && !node->get_section ())
+ if (implicit_section && !get_section ())
{
error ("implicit_section flag is set but section isn't");
error_found = true;
}
- if (node->get_section () && node->get_comdat_group ()
- && !node->implicit_section)
+ if (get_section () && get_comdat_group ()
+ && !implicit_section)
{
error ("Both section and comdat group is set");
error_found = true;
}
/* TODO: Add string table for sections, so we do not keep holding duplicated
strings. */
- if (node->alias && node->definition
- && node->get_section () != symtab_alias_target (node)->get_section ()
- && (!node->get_section()
- || !symtab_alias_target (node)->get_section ()
- || strcmp (node->get_section(),
- symtab_alias_target (node)->get_section ())))
+ if (alias && definition
+ && get_section () != get_alias_target ()->get_section ()
+ && (!get_section()
+ || !get_alias_target ()->get_section ()
+ || strcmp (get_section(),
+ get_alias_target ()->get_section ())))
{
error ("Alias and target's section differs");
- dump_symtab_node (stderr, symtab_alias_target (node));
+ get_alias_target ()->dump (stderr);
error_found = true;
}
- if (node->alias && node->definition
- && node->get_comdat_group () != symtab_alias_target (node)->get_comdat_group ())
+ if (alias && definition
+ && get_comdat_group () != get_alias_target ()->get_comdat_group ())
{
error ("Alias and target's comdat groups differs");
- dump_symtab_node (stderr, symtab_alias_target (node));
+ get_alias_target ()->dump (stderr);
error_found = true;
}
/* Verify consistency of NODE. */
DEBUG_FUNCTION void
-verify_symtab_node (symtab_node *node)
+symtab_node::verify (void)
{
if (seen_error ())
return;
timevar_push (TV_CGRAPH_VERIFY);
- if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
- verify_cgraph_node (cnode);
+ if (cgraph_node *node = dyn_cast <cgraph_node *> (this))
+ node->verify_node ();
else
- if (verify_symtab_base (node))
+ if (verify_base ())
{
- dump_symtab_node (stderr, node);
- internal_error ("verify_symtab_node failed");
+ debug ();
+ internal_error ("symtab_node::verify failed");
}
timevar_pop (TV_CGRAPH_VERIFY);
}
/* Verify symbol table for internal consistency. */
DEBUG_FUNCTION void
-verify_symtab (void)
+symtab_node::verify_symtab_nodes (void)
{
symtab_node *node;
hash_map<tree, symtab_node *> comdat_head_map (251);
FOR_EACH_SYMBOL (node)
{
- verify_symtab_node (node);
+ node->verify ();
if (node->get_comdat_group ())
{
symtab_node **entry, *s;
if (!s || s == *entry)
{
error ("Two symbols with same comdat_group are not linked by the same_comdat_group list.");
- dump_symtab_node (stderr, *entry);
- dump_symtab_node (stderr, node);
- internal_error ("verify_symtab failed");
+ (*entry)->debug ();
+ node->debug ();
+ internal_error ("symtab_node::verify failed");
}
}
}
}
-/* Return true when RESOLUTION indicate that linker will use
- the symbol from non-LTO object files. */
-
-bool
-resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution resolution)
-{
- return (resolution == LDPR_PREVAILING_DEF
- || resolution == LDPR_PREEMPTED_REG
- || resolution == LDPR_RESOLVED_EXEC
- || resolution == LDPR_RESOLVED_DYN);
-}
-
-/* Return true when NODE is known to be used from other (non-LTO) object file.
- Known only when doing LTO via linker plugin. */
+/* Return true when NODE is known to be used from other (non-LTO)
+ object file. Known only when doing LTO via linker plugin. */
bool
-symtab_used_from_object_file_p (symtab_node *node)
+symtab_node::used_from_object_file_p_worker (symtab_node *node)
{
if (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl))
return false;
return false;
}
+
+/* Return true when symtab_node is known to be used from other (non-LTO)
+ object file. Known only when doing LTO via linker plugin. */
+
+bool
+symtab_node::used_from_object_file_p (void)
+{
+ return symtab_node::used_from_object_file_p_worker (this);
+}
+
/* Make DECL local. FIXME: We shouldn't need to mess with rtl this early,
but other code such as notice_global_symbol generates rtl. */
void
-symtab_make_decl_local (tree decl)
+symtab_node::make_decl_local (void)
{
rtx rtl, symbol;
SYMBOL_REF_WEAK (symbol) = DECL_WEAK (decl);
}
-/* Return availability of NODE. */
-
-enum availability
-symtab_node_availability (symtab_node *node)
-{
- if (is_a <cgraph_node *> (node))
- return cgraph_function_body_availability (cgraph (node));
- else
- return cgraph_variable_initializer_availability (varpool (node));
-}
-
-/* Given NODE, walk the alias chain to return the symbol NODE is alias of.
+/* Walk the alias chain to return the symbol NODE is alias of.
If NODE is not an alias, return NODE.
When AVAILABILITY is non-NULL, get minimal availability in the chain. */
symtab_node *
-symtab_alias_ultimate_target (symtab_node *node, enum availability *availability)
+symtab_node::ultimate_alias_target (enum availability *availability)
{
bool weakref_p = false;
- if (!node->alias)
+ if (!alias)
{
if (availability)
- *availability = symtab_node_availability (node);
- return node;
+ *availability = get_availability ();
+ return this;
}
/* To determine visibility of the target, we follow ELF semantic of aliases.
if (availability)
{
- weakref_p = node->weakref;
+ weakref_p = weakref;
if (!weakref_p)
- *availability = symtab_node_availability (node);
+ *availability = get_availability ();
else
*availability = AVAIL_LOCAL;
}
+
+ symtab_node *node = this;
while (node)
{
if (node->alias && node->analyzed)
- node = symtab_alias_target (node);
+ node = node->get_alias_target ();
else
{
if (!availability)
{
if (weakref_p)
{
- enum availability a = symtab_node_availability (node);
+ enum availability a = node->get_availability ();
if (a < *availability)
*availability = a;
}
}
if (node && availability && weakref_p)
{
- enum availability a = symtab_node_availability (node);
+ enum availability a = node->get_availability ();
if (a < *availability)
*availability = a;
weakref_p = node->weakref;
copy the visibility from the target to get things right. */
void
-fixup_same_cpp_alias_visibility (symtab_node *node, symtab_node *target)
+symtab_node::fixup_same_cpp_alias_visibility (symtab_node *target)
{
- if (is_a <cgraph_node *> (node))
+ if (is_a <cgraph_node *> (this))
{
- DECL_DECLARED_INLINE_P (node->decl)
+ DECL_DECLARED_INLINE_P (decl)
= DECL_DECLARED_INLINE_P (target->decl);
- DECL_DISREGARD_INLINE_LIMITS (node->decl)
+ DECL_DISREGARD_INLINE_LIMITS (decl)
= DECL_DISREGARD_INLINE_LIMITS (target->decl);
}
/* FIXME: It is not really clear why those flags should not be copied for
functions, too. */
else
{
- DECL_WEAK (node->decl) = DECL_WEAK (target->decl);
- DECL_EXTERNAL (node->decl) = DECL_EXTERNAL (target->decl);
- DECL_VISIBILITY (node->decl) = DECL_VISIBILITY (target->decl);
+ DECL_WEAK (decl) = DECL_WEAK (target->decl);
+ DECL_EXTERNAL (decl) = DECL_EXTERNAL (target->decl);
+ DECL_VISIBILITY (decl) = DECL_VISIBILITY (target->decl);
}
- DECL_VIRTUAL_P (node->decl) = DECL_VIRTUAL_P (target->decl);
- if (TREE_PUBLIC (node->decl))
+ DECL_VIRTUAL_P (decl) = DECL_VIRTUAL_P (target->decl);
+ if (TREE_PUBLIC (decl))
{
tree group;
- DECL_EXTERNAL (node->decl) = DECL_EXTERNAL (target->decl);
- DECL_COMDAT (node->decl) = DECL_COMDAT (target->decl);
+ DECL_EXTERNAL (decl) = DECL_EXTERNAL (target->decl);
+ DECL_COMDAT (decl) = DECL_COMDAT (target->decl);
group = target->get_comdat_group ();
- node->set_comdat_group (group);
- if (group
- && !node->same_comdat_group)
- symtab_add_to_same_comdat_group (node, target);
+ set_comdat_group (group);
+ if (group && !same_comdat_group)
+ add_to_same_comdat_group (target);
}
- node->externally_visible = target->externally_visible;
-}
-
-/* Hash sections by their names. */
-
-static hashval_t
-hash_section_hash_entry (const void *p)
-{
- const section_hash_entry *n = (const section_hash_entry *) p;
- return htab_hash_string (n->name);
-}
-
-/* Return true if section P1 name equals to P2. */
-
-static int
-eq_sections (const void *p1, const void *p2)
-{
- const section_hash_entry *n1 = (const section_hash_entry *) p1;
- const char *name = (const char *)p2;
- return n1->name == name || !strcmp (n1->name, name);
+ externally_visible = target->externally_visible;
}
/* Set section, do not recurse into aliases.
When one wants to change section of symbol and its aliases,
- use set_section */
+ use set_section. */
void
symtab_node::set_section_for_node (const char *section)
/* Worker for set_section. */
-static bool
-set_section_1 (struct symtab_node *n, void *s)
+bool
+symtab_node::set_section (symtab_node *n, void *s)
{
n->set_section_for_node ((char *)s);
return false;
symtab_node::set_section (const char *section)
{
gcc_assert (!this->alias);
- symtab_for_node_and_aliases (this, set_section_1, const_cast<char *>(section), true);
+ call_for_symbol_and_aliases
+ (symtab_node::set_section, const_cast<char *>(section), true);
}
/* Return the initialization priority. */
return h ? h->init : DEFAULT_INIT_PRIORITY;
}
+/* Return availability of NODE. */
+enum availability symtab_node::get_availability (void)
+{
+ if (is_a <cgraph_node *> (this))
+ return dyn_cast <cgraph_node *> (this)->get_availability ();
+ else
+ return cgraph_variable_initializer_availability
+ (dyn_cast <varpool_node *> (this));
+}
+
+
/* Return the finalization priority. */
priority_type
DECL. If there is no previous priority information, a freshly
allocated structure is returned. */
-static struct symbol_priority_map *
-symbol_priority_info (struct symtab_node *symbol)
+struct symbol_priority_map *
+symtab_node::priority_info (void)
{
struct symbol_priority_map in;
struct symbol_priority_map *h;
void **loc;
- in.symbol = symbol;
+ in.symbol = this;
if (!init_priority_hash)
init_priority_hash = htab_create_ggc (512, symbol_priority_map_hash,
symbol_priority_map_eq, 0);
{
h = ggc_cleared_alloc<symbol_priority_map> ();
*loc = h;
- h->symbol = symbol;
+ h->symbol = this;
h->init = DEFAULT_INIT_PRIORITY;
h->fini = DEFAULT_INIT_PRIORITY;
- symbol->in_init_priority_hash = true;
+ in_init_priority_hash = true;
}
return h;
gcc_assert (get_init_priority() == priority);
return;
}
- h = symbol_priority_info (this);
+ h = priority_info ();
h->init = priority;
}
gcc_assert (get_fini_priority() == priority);
return;
}
- h = symbol_priority_info (this);
+ h = priority_info ();
h->fini = priority;
}
/* Worker for symtab_resolve_alias. */
-static bool
-set_implicit_section (struct symtab_node *n, void *data ATTRIBUTE_UNUSED)
+bool
+symtab_node::set_implicit_section (symtab_node *n,
+ void *data ATTRIBUTE_UNUSED)
{
n->implicit_section = true;
return false;
}
-/* Add reference recording that NODE is alias of TARGET.
+/* Add reference recording that symtab node is alias of TARGET.
The function can fail in the case of aliasing cycles; in this case
it returns false. */
bool
-symtab_resolve_alias (symtab_node *node, symtab_node *target)
+symtab_node::resolve_alias (symtab_node *target)
{
symtab_node *n;
- gcc_assert (!node->analyzed
- && !vec_safe_length (node->ref_list.references));
+ gcc_assert (!analyzed && !vec_safe_length (ref_list.references));
/* Never let cycles to creep into the symbol table alias references;
those will make alias walkers to be infinite. */
for (n = target; n && n->alias;
- n = n->analyzed ? symtab_alias_target (n) : NULL)
- if (n == node)
+ n = n->analyzed ? n->get_alias_target () : NULL)
+ if (n == this)
{
- if (is_a <cgraph_node *> (node))
- error ("function %q+D part of alias cycle", node->decl);
- else if (is_a <varpool_node *> (node))
- error ("variable %q+D part of alias cycle", node->decl);
+ if (is_a <cgraph_node *> (this))
+ error ("function %q+D part of alias cycle", decl);
+ else if (is_a <varpool_node *> (this))
+ error ("variable %q+D part of alias cycle", decl);
else
gcc_unreachable ();
- node->alias = false;
+ alias = false;
return false;
}
/* "analyze" the node - i.e. mark the reference. */
- node->definition = true;
- node->alias = true;
- node->analyzed = true;
- node->add_reference (target, IPA_REF_ALIAS, NULL);
+ definition = true;
+ alias = true;
+ analyzed = true;
+ add_reference (target, IPA_REF_ALIAS, NULL);
/* Add alias into the comdat group of its target unless it is already there. */
- if (node->same_comdat_group)
- symtab_remove_from_same_comdat_group (node);
- node->set_comdat_group (NULL);
+ if (same_comdat_group)
+ remove_from_same_comdat_group ();
+ set_comdat_group (NULL);
if (target->get_comdat_group ())
- symtab_add_to_same_comdat_group (node, target);
+ add_to_same_comdat_group (target);
- if ((node->get_section () != target->get_section ()
- || target->get_comdat_group ())
- && node->get_section () && !node->implicit_section)
+ if ((get_section () != target->get_section ()
+ || target->get_comdat_group ()) && get_section () && !implicit_section)
{
- error ("section of alias %q+D must match section of its target",
- node->decl);
+ error ("section of alias %q+D must match section of its target", decl);
}
- symtab_for_node_and_aliases (node, set_section_1,
- const_cast<char *>(target->get_section ()), true);
+ call_for_symbol_and_aliases (symtab_node::set_section,
+ const_cast<char *>(target->get_section ()), true);
if (target->implicit_section)
- symtab_for_node_and_aliases (node,
- set_implicit_section, NULL, true);
+ call_for_symbol_and_aliases (set_implicit_section, NULL, true);
/* Alias targets become redundant after alias is resolved into an reference.
We do not want to keep it around or we would have to mind updating them
when renaming symbols. */
- node->alias_target = NULL;
+ alias_target = NULL;
- if (node->cpp_implicit_alias && cgraph_state >= CGRAPH_STATE_CONSTRUCTION)
- fixup_same_cpp_alias_visibility (node, target);
+ if (cpp_implicit_alias && cgraph_state >= CGRAPH_STATE_CONSTRUCTION)
+ fixup_same_cpp_alias_visibility (target);
/* If alias has address taken, so does the target. */
- if (node->address_taken)
- symtab_alias_ultimate_target (target, NULL)->address_taken = true;
+ if (address_taken)
+ target->ultimate_alias_target ()->address_taken = true;
return true;
}
-/* Call calback on NODE and aliases associated to NODE.
+/* Call calback on symtab node and aliases associated to this node.
When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
skipped. */
bool
-symtab_for_node_and_aliases (symtab_node *node,
- bool (*callback) (symtab_node *, void *),
- void *data,
- bool include_overwritable)
+symtab_node::call_for_symbol_and_aliases (bool (*callback) (symtab_node *,
+ void *),
+ void *data, bool include_overwritable)
{
int i;
struct ipa_ref *ref;
- if (callback (node, data))
+ if (callback (this, data))
return true;
- for (i = 0; node->iterate_referring (i, ref); i++)
+ for (i = 0; iterate_referring (i, ref); i++)
if (ref->use == IPA_REF_ALIAS)
{
symtab_node *alias = ref->referring;
if (include_overwritable
- || symtab_node_availability (alias) > AVAIL_OVERWRITABLE)
- if (symtab_for_node_and_aliases (alias, callback, data,
- include_overwritable))
+ || alias->get_availability () > AVAIL_INTERPOSABLE)
+ if (alias->call_for_symbol_and_aliases (callback, data,
+ include_overwritable))
return true;
}
return false;
}
-/* Worker searching nonoverwritable alias. */
+/* Worker searching noninterposable alias. */
-static bool
-symtab_nonoverwritable_alias_1 (symtab_node *node, void *data)
+bool
+symtab_node::noninterposable_alias (symtab_node *node, void *data)
{
if (decl_binds_to_current_def_p (node->decl))
{
- symtab_node *fn = symtab_alias_ultimate_target (node);
+ symtab_node *fn = node->ultimate_alias_target ();
/* Ensure that the alias is well formed this may not be the case
of user defined aliases and currently it is not always the case
return false;
}
-/* If NODE can not be overwriten by static or dynamic linker to point to different
- definition, return NODE. Otherwise look for alias with such property and if
- none exists, introduce new one. */
+/* If node can not be overwriten by static or dynamic linker to point to
+ different definition, return NODE. Otherwise look for alias with such
+ property and if none exists, introduce new one. */
symtab_node *
-symtab_nonoverwritable_alias (symtab_node *node)
+symtab_node::noninterposable_alias (void)
{
tree new_decl;
symtab_node *new_node = NULL;
/* First try to look up existing alias or base object
(if that is already non-overwritable). */
- node = symtab_alias_ultimate_target (node, NULL);
+ symtab_node *node = ultimate_alias_target ();
gcc_assert (!node->alias && !node->weakref);
- symtab_for_node_and_aliases (node, symtab_nonoverwritable_alias_1,
- (void *)&new_node, true);
+ node->call_for_symbol_and_aliases (symtab_node::noninterposable_alias,
+ (void *)&new_node, true);
if (new_node)
return new_node;
#ifndef ASM_OUTPUT_DEF
{
DECL_STATIC_CONSTRUCTOR (new_decl) = 0;
DECL_STATIC_DESTRUCTOR (new_decl) = 0;
- new_node = cgraph_create_function_alias
- (new_decl, node->decl);
+ new_node = cgraph_node::create_alias (new_decl, node->decl);
}
else
{
DECL_INITIAL (new_decl) = error_mark_node;
new_node = varpool_create_variable_alias (new_decl, node->decl);
}
- symtab_resolve_alias (new_node, node);
+ new_node->resolve_alias (node);
gcc_assert (decl_binds_to_current_def_p (new_decl)
&& targetm.binds_local_p (new_decl));
return new_node;
}
-/* Return true if A and B represents semantically equivalent symbols. */
+/* Return true if symtab node and TARGET represents
+ semantically equivalent symbols. */
bool
-symtab_semantically_equivalent_p (symtab_node *a,
- symtab_node *b)
+symtab_node::semantically_equivalent_p (symtab_node *target)
{
enum availability avail;
symtab_node *ba;
symtab_node *bb;
/* Equivalent functions are equivalent. */
- if (a->decl == b->decl)
+ if (decl == target->decl)
return true;
/* If symbol is not overwritable by different implementation,
walk to the base object it defines. */
- ba = symtab_alias_ultimate_target (a, &avail);
+ ba = ultimate_alias_target (&avail);
if (avail >= AVAIL_AVAILABLE)
{
- if (ba == b)
+ if (target == ba)
return true;
}
else
- ba = a;
- bb = symtab_alias_ultimate_target (b, &avail);
+ ba = this;
+ bb = target->ultimate_alias_target (&avail);
if (avail >= AVAIL_AVAILABLE)
{
- if (a == bb)
+ if (this == bb)
return true;
}
else
- bb = b;
+ bb = target;
return bb == ba;
}
-/* Classify symbol NODE for partitioning. */
+/* Classify symbol symtab node for partitioning. */
enum symbol_partitioning_class
-symtab_get_symbol_partitioning_class (symtab_node *node)
+symtab_node::get_partitioning_class (void)
{
/* Inline clones are always duplicated.
This include external delcarations. */
- cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
+ cgraph_node *cnode = dyn_cast <cgraph_node *> (this);
- if (DECL_ABSTRACT (node->decl))
+ if (DECL_ABSTRACT (decl))
return SYMBOL_EXTERNAL;
if (cnode && cnode->global.inlined_to)
return SYMBOL_DUPLICATE;
/* Weakref aliases are always duplicated. */
- if (node->weakref)
+ if (weakref)
return SYMBOL_DUPLICATE;
/* External declarations are external. */
- if (DECL_EXTERNAL (node->decl))
+ if (DECL_EXTERNAL (decl))
return SYMBOL_EXTERNAL;
- if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
+ if (varpool_node *vnode = dyn_cast <varpool_node *> (this))
{
/* Constant pool references use local symbol names that can not
be promoted global. We should never put into a constant pool
objects that can not be duplicated across partitions. */
- if (DECL_IN_CONSTANT_POOL (node->decl))
+ if (DECL_IN_CONSTANT_POOL (decl))
return SYMBOL_DUPLICATE;
gcc_checking_assert (vnode->definition);
}
Handle them as external; compute_ltrans_boundary take care to make
proper things to happen (i.e. to make them appear in the boundary but
with body streamed, so clone can me materialized). */
- else if (!cgraph (node)->definition)
+ else if (!dyn_cast <cgraph_node *> (this)->definition)
return SYMBOL_EXTERNAL;
/* Linker discardable symbols are duplicated to every use unless they are
keyed. */
- if (DECL_ONE_ONLY (node->decl)
- && !node->force_output
- && !node->forced_by_abi
- && !symtab_used_from_object_file_p (node))
+ if (DECL_ONE_ONLY (decl)
+ && !force_output
+ && !forced_by_abi
+ && !used_from_object_file_p ())
return SYMBOL_DUPLICATE;
return SYMBOL_PARTITION;
{
if (this->analyzed)
{
- symtab_node *target = symtab_alias_ultimate_target (this);
+ symtab_node *target = ultimate_alias_target ();
if (target->alias && target->weakref)
return false;
return false;
}
- node = cgraph_get_node (fn_decl);
+ node = cgraph_node::get (fn_decl);
/* All calls should have cgraph here. */
if (!node)
{
{
gimple_call_set_fndecl (stmt, repl);
update_stmt (stmt);
- node = cgraph_create_node (repl);
+ node = cgraph_node::create (repl);
node->local.tm_may_enter_irr = false;
return expand_call_tm (region, gsi);
}
bool want_irr_scan_normal;
};
-typedef vec<cgraph_node_ptr> cgraph_node_queue;
+typedef vec<cgraph_node *> cgraph_node_queue;
/* Return the ipa data associated with NODE, allocating zeroed memory
if necessary. TRAVERSE_ALIASES is true if we must traverse aliases
struct tm_ipa_cg_data *d;
if (traverse_aliases && (*node)->alias)
- *node = cgraph_alias_target (*node);
+ *node = (*node)->get_alias_target ();
d = (struct tm_ipa_cg_data *) (*node)->aux;
if (find_tm_replacement_function (fndecl))
continue;
- node = cgraph_get_node (fndecl);
+ node = cgraph_node::get (fndecl);
gcc_assert (node != NULL);
d = get_cg_data (&node, true);
if (find_tm_replacement_function (fn))
break;
- node = cgraph_get_node (fn);
+ node = cgraph_node::get (fn);
d = get_cg_data (&node, true);
/* Return true if irrevocable, but above all, believe
if (find_tm_replacement_function (fndecl))
continue;
- tnode = cgraph_get_node (fndecl);
+ tnode = cgraph_node::get (fndecl);
d = get_cg_data (&tnode, true);
pcallers = (for_clone ? &d->tm_callers_clone
/* If we aren't seeing the final version of the function we don't
know what it will contain at runtime. */
- if (cgraph_function_body_availability (node) < AVAIL_AVAILABLE)
+ if (node->get_availability () < AVAIL_AVAILABLE)
return true;
/* If the function must go irrevocable, then of course true. */
result in one of the bits above being set so that we will not
have to recurse next time. */
if (node->alias)
- return ipa_tm_mayenterirr_function (cgraph_get_node (node->thunk.alias));
+ return ipa_tm_mayenterirr_function (cgraph_node::get (node->thunk.alias));
/* What remains is unmarked local functions without items that force
the function to go irrevocable. */
static inline void
ipa_tm_mark_force_output_node (struct cgraph_node *node)
{
- cgraph_mark_force_output_node (node);
+ node->mark_force_output ();
node->analyzed = true;
}
if (DECL_ONE_ONLY (new_decl))
varpool_get_node (new_decl)->set_comdat_group (tm_mangle (decl_comdat_group_id (old_decl)));
- new_node = cgraph_same_body_alias (NULL, new_decl, info->new_decl);
+ new_node = cgraph_node::create_same_body_alias (new_decl, info->new_decl);
new_node->tm_clone = true;
new_node->externally_visible = info->old_node->externally_visible;
/* ?? Do not traverse aliases here. */
varpool_get_node (new_decl)->set_comdat_group (tm_mangle (DECL_COMDAT_GROUP (old_decl)));
gcc_assert (!old_node->ipa_transforms_to_apply.exists ());
- new_node = cgraph_copy_node_for_versioning (old_node, new_decl, vNULL, NULL);
+ new_node = old_node->create_version_clone (new_decl, vNULL, NULL);
new_node->local.local = false;
new_node->externally_visible = old_node->externally_visible;
new_node->lowered = true;
new_node->tm_clone = 1;
get_cg_data (&old_node, true)->clone = new_node;
- if (cgraph_function_body_availability (old_node) >= AVAIL_OVERWRITABLE)
+ if (old_node->get_availability () >= AVAIL_INTERPOSABLE)
{
/* Remap extern inline to static inline. */
/* ??? Is it worth trying to use make_decl_one_only? */
record_tm_clone_pair (old_decl, new_decl);
- cgraph_call_function_insertion_hooks (new_node);
+ new_node->call_function_insertion_hooks ();
if (old_node->force_output
|| old_node->ref_list.first_referring ())
ipa_tm_mark_force_output_node (new_node);
struct create_version_alias_info data;
data.old_node = old_node;
data.new_decl = new_decl;
- cgraph_for_node_and_aliases (old_node, ipa_tm_create_version_alias,
- &data, true);
+ old_node->call_for_symbol_thunks_and_aliases (ipa_tm_create_version_alias,
+ &data, true);
}
}
gsi = gsi_after_labels (bb);
gsi_insert_before (&gsi, g, GSI_SAME_STMT);
- cgraph_create_edge (node,
- cgraph_get_create_node
- (builtin_decl_explicit (BUILT_IN_TM_IRREVOCABLE)),
- g, 0,
- compute_call_stmt_bb_frequency (node->decl,
- gimple_bb (g)));
+ node->create_edge (cgraph_node::get_create
+ (builtin_decl_explicit (BUILT_IN_TM_IRREVOCABLE)),
+ g, 0,
+ compute_call_stmt_bb_frequency (node->decl,
+ gimple_bb (g)));
}
/* Construct a call to TM_GETTMCLONE and insert it before GSI. */
technically taking the address of the original function and
its clone. Explain this so inlining will know this function
is needed. */
- cgraph_mark_address_taken_node (cgraph_get_node (fndecl));
+ cgraph_node::get (fndecl)->mark_address_taken () ;
if (clone)
- cgraph_mark_address_taken_node (cgraph_get_node (clone));
+ cgraph_node::get (clone)->mark_address_taken ();
}
safe = is_tm_safe (TREE_TYPE (old_fn));
gsi_insert_before (gsi, g, GSI_SAME_STMT);
- cgraph_create_edge (node, cgraph_get_create_node (gettm_fn), g, 0,
- compute_call_stmt_bb_frequency (node->decl,
- gimple_bb (g)));
+ node->create_edge (cgraph_node::get_create (gettm_fn), g, 0,
+ compute_call_stmt_bb_frequency (node->decl,
+ gimple_bb (g)));
/* Cast return value from tm_gettmclone* into appropriate function
pointer. */
{
gimple stmt = gsi_stmt (*gsi);
struct cgraph_node *new_node;
- struct cgraph_edge *e = cgraph_edge (node, stmt);
+ struct cgraph_edge *e = node->get_edge (stmt);
tree fndecl = gimple_call_fndecl (stmt);
/* For indirect calls, pass the address through the runtime. */
fndecl = find_tm_replacement_function (fndecl);
if (fndecl)
{
- new_node = cgraph_get_create_node (fndecl);
+ new_node = cgraph_node::get_create (fndecl);
/* ??? Mark all transaction_wrap functions tm_may_enter_irr.
unsigned int i;
#ifdef ENABLE_CHECKING
- verify_cgraph ();
+ cgraph_node::verify_cgraph_nodes ();
#endif
bitmap_obstack_initialize (&tm_obstack);
/* For all local functions marked tm_callable, queue them. */
FOR_EACH_DEFINED_FUNCTION (node)
if (is_tm_callable (node->decl)
- && cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
+ && node->get_availability () >= AVAIL_INTERPOSABLE)
{
d = get_cg_data (&node, true);
maybe_push_queue (node, &tm_callees, &d->in_callee_queue);
/* For all local reachable functions... */
FOR_EACH_DEFINED_FUNCTION (node)
if (node->lowered
- && cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
+ && node->get_availability () >= AVAIL_INTERPOSABLE)
{
/* ... marked tm_pure, record that fact for the runtime by
indicating that the pure function is its own tm_callable.
for (i = 0; i < tm_callees.length (); ++i)
{
node = tm_callees[i];
- a = cgraph_function_body_availability (node);
+ a = node->get_availability ();
d = get_cg_data (&node, true);
/* Put it in the worklist so we can scan the function later
else if (a <= AVAIL_NOT_AVAILABLE
&& !is_tm_safe_or_pure (node->decl))
ipa_tm_note_irrevocable (node, &irr_worklist);
- else if (a >= AVAIL_OVERWRITABLE)
+ else if (a >= AVAIL_INTERPOSABLE)
{
if (!tree_versionable_function_p (node->decl))
ipa_tm_note_irrevocable (node, &irr_worklist);
we need not scan the callees now, as the base will do. */
if (node->alias)
{
- node = cgraph_get_node (node->thunk.alias);
+ node = cgraph_node::get (node->thunk.alias);
d = get_cg_data (&node, true);
maybe_push_queue (node, &tm_callees, &d->in_callee_queue);
continue;
/* Propagate back to referring aliases as well. */
FOR_EACH_ALIAS (node, ref)
{
- caller = cgraph (ref->referring);
+ caller = dyn_cast<cgraph_node *> (ref->referring);
if (!caller->local.tm_may_enter_irr)
{
/* ?? Do not traverse aliases here. */
other functions. */
FOR_EACH_DEFINED_FUNCTION (node)
if (node->lowered
- && cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
+ && node->get_availability () >= AVAIL_INTERPOSABLE)
{
d = get_cg_data (&node, true);
if (is_tm_safe (node->decl))
if (node->cpp_implicit_alias)
continue;
- a = cgraph_function_body_availability (node);
+ a = node->get_availability ();
d = get_cg_data (&node, true);
if (a <= AVAIL_NOT_AVAILABLE)
}
FOR_EACH_DEFINED_FUNCTION (node)
if (node->lowered
- && cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
+ && node->get_availability () >= AVAIL_INTERPOSABLE)
{
d = get_cg_data (&node, true);
if (d->all_tm_regions)
node->aux = NULL;
#ifdef ENABLE_CHECKING
- verify_cgraph ();
+ cgraph_node::verify_cgraph_nodes ();
#endif
return 0;
edge_iterator ei;
count_scale
- = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
+ = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
- cgraph_get_node (current_function_decl)->count;
+ cgraph_node::get (current_function_decl)->count;
EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
count_scale);
struct cgraph_node *node;
if (!DECL_EXTERNAL (expr))
return false;
- node = cgraph_function_node (cgraph_get_node (expr), NULL);
+ node = cgraph_node::get (expr)->function_symbol ();
if (node && node->in_other_partition)
return false;
return true;
the index of a TLS variable equals the index of its control variable in
the other vector. */
static varpool_node_set tls_vars;
-static vec<varpool_node_ptr> control_vars;
+static vec<varpool_node *> control_vars;
/* For the current basic block, an SSA_NAME that has computed the address
of the TLS variable at the corresponding index. */
gimple_seq_add_stmt (&d->seq, x);
- cgraph_create_edge (d->cfun_node, d->builtin_node, x,
- d->bb->count, d->bb_freq);
+ d->cfun_node->create_edge (d->builtin_node, x, d->bb->count, d->bb_freq);
/* We may be adding a new reference to a new variable to the function.
This means we have to play with the ipa-reference web. */
d.builtin_decl = builtin_decl_explicit (BUILT_IN_EMUTLS_GET_ADDRESS);
/* This is where we introduce the declaration to the IL and so we have to
create a node for it. */
- d.builtin_node = cgraph_get_create_node (d.builtin_decl);
+ d.builtin_node = cgraph_node::get_create (d.builtin_decl);
FOR_EACH_BB_FN (d.bb, cfun)
{
switch (id->transform_call_graph_edges)
{
case CB_CGE_DUPLICATE:
- edge = cgraph_edge (id->src_node, orig_stmt);
+ edge = id->src_node->get_edge (orig_stmt);
if (edge)
{
int edge_freq = edge->frequency;
break;
case CB_CGE_MOVE_CLONES:
- cgraph_set_call_stmt_including_clones (id->dst_node,
- orig_stmt, stmt);
- edge = cgraph_edge (id->dst_node, stmt);
+ id->dst_node->set_call_stmt_including_clones (orig_stmt,
+ stmt);
+ edge = id->dst_node->get_edge (stmt);
break;
case CB_CGE_MOVE:
- edge = cgraph_edge (id->dst_node, orig_stmt);
+ edge = id->dst_node->get_edge (orig_stmt);
if (edge)
cgraph_set_call_stmt (edge, stmt);
break;
&& id->dst_node->definition
&& (fn = gimple_call_fndecl (stmt)) != NULL)
{
- struct cgraph_node *dest = cgraph_get_node (fn);
+ struct cgraph_node *dest = cgraph_node::get (fn);
/* We have missing edge in the callgraph. This can happen
when previous inlining turned an indirect call into a
|| !id->src_node->definition
|| !id->dst_node->definition);
if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
- cgraph_create_edge_including_clones
- (id->dst_node, dest, orig_stmt, stmt, bb->count,
+ id->dst_node->create_edge_including_clones
+ (dest, orig_stmt, stmt, bb->count,
compute_call_stmt_bb_frequency (id->dst_node->decl,
copy_basic_block),
CIF_ORIGINALLY_INDIRECT_CALL);
else
- cgraph_create_edge (id->dst_node, dest, stmt,
+ id->dst_node->create_edge (dest, stmt,
bb->count,
compute_call_stmt_bb_frequency
(id->dst_node->decl,
{
if (is_gimple_call (gsi_stmt (si)))
{
- struct cgraph_edge *edge = cgraph_edge (id->dst_node, gsi_stmt (si));
+ struct cgraph_edge *edge = id->dst_node->get_edge (gsi_stmt (si));
if (edge)
cgraph_redirect_edge_call_stmt_to_callee (edge);
}
/* Do not special case builtins where we see the body.
This just confuse inliner. */
struct cgraph_node *node;
- if (!(node = cgraph_get_node (decl))
+ if (!(node = cgraph_node::get (decl))
|| node->definition)
;
/* For buitins that are likely expanded to nothing or
if (gimple_code (stmt) != GIMPLE_CALL)
goto egress;
- cg_edge = cgraph_edge (id->dst_node, stmt);
+ cg_edge = id->dst_node->get_edge (stmt);
gcc_checking_assert (cg_edge);
/* First, see if we can figure out what function is being called.
If we cannot, then there is no hope of inlining the function. */
goto egress;
}
fn = cg_edge->callee->decl;
- cgraph_get_body (cg_edge->callee);
+ cg_edge->callee->get_body ();
#ifdef ENABLE_CHECKING
if (cg_edge->callee->decl != id->dst_node->decl)
- verify_cgraph_node (cg_edge->callee);
+ cg_edge->callee->verify ();
#endif
/* We will be inlining this callee. */
(*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
/* Update callgraph if needed. */
- cgraph_remove_node (cg_edge->callee);
+ cg_edge->callee->remove ();
id->block = NULL_TREE;
successfully_inlined = TRUE;
/* Clear out ID. */
memset (&id, 0, sizeof (id));
- id.src_node = id.dst_node = cgraph_get_node (fn);
+ id.src_node = id.dst_node = cgraph_node::get (fn);
gcc_assert (id.dst_node->definition);
id.dst_fn = fn;
/* Or any functions that aren't finished yet. */
{
struct cgraph_edge *e;
- verify_cgraph_node (id.dst_node);
+ id.dst_node->verify ();
/* Double check that we inlined everything we are supposed to inline. */
for (e = id.dst_node->callees; e; e = e->next_callee)
delete_unreachable_blocks_update_callgraph (&id);
#ifdef ENABLE_CHECKING
- verify_cgraph_node (id.dst_node);
+ id.dst_node->verify ();
#endif
/* It would be nice to check SSA/CFG/statement consistency here, but it is
id->dst_node->remove_stmt_references (gsi_stmt (bsi));
if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
- &&(e = cgraph_edge (id->dst_node, gsi_stmt (bsi))) != NULL)
+ &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
{
if (!e->inline_failed)
- cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
+ e->callee->remove_symbol_and_inline_clones (id->dst_node);
else
cgraph_remove_edge (e);
}
{
node->remove_stmt_references (gsi_stmt (bsi));
if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
- && (e = cgraph_edge (node, gsi_stmt (bsi))) != NULL)
+ && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
{
if (!e->inline_failed)
- cgraph_remove_node_and_inline_clones (e->callee, id->dst_node);
+ e->callee->remove_symbol_and_inline_clones (id->dst_node);
else
cgraph_remove_edge (e);
}
*/
void
tree_function_versioning (tree old_decl, tree new_decl,
- vec<ipa_replace_map_p, va_gc> *tree_map,
+ vec<ipa_replace_map *, va_gc> *tree_map,
bool update_clones, bitmap args_to_skip,
bool skip_return, bitmap blocks_to_copy,
basic_block new_entry)
&& TREE_CODE (new_decl) == FUNCTION_DECL);
DECL_POSSIBLY_INLINED (old_decl) = 1;
- old_version_node = cgraph_get_node (old_decl);
+ old_version_node = cgraph_node::get (old_decl);
gcc_checking_assert (old_version_node);
- new_version_node = cgraph_get_node (new_decl);
+ new_version_node = cgraph_node::get (new_decl);
gcc_checking_assert (new_version_node);
/* Copy over debug args. */
static bool
check_for_nested_with_variably_modified (tree fndecl, tree orig_fndecl)
{
- struct cgraph_node *cgn = cgraph_get_node (fndecl);
+ struct cgraph_node *cgn = cgraph_node::get (fndecl);
tree arg;
for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
static void
unnest_nesting_tree_1 (struct nesting_info *root)
{
- struct cgraph_node *node = cgraph_get_node (root->context);
+ struct cgraph_node *node = cgraph_node::get (root->context);
/* For nested functions update the cgraph to reflect unnesting.
We also delay finalizing of these functions up to this point. */
if (node->origin)
{
- cgraph_unnest_node (node);
+ node->unnest ();
cgraph_finalize_function (root->context, true);
}
}
struct nesting_info *root;
/* If there are no nested functions, there's nothing to do. */
- cgn = cgraph_get_node (fndecl);
+ cgn = cgraph_node::get (fndecl);
if (!cgn->nested)
return;
dump_function_header (FILE *dump_file, tree fdecl, int flags)
{
const char *dname, *aname;
- struct cgraph_node *node = cgraph_get_node (fdecl);
+ struct cgraph_node *node = cgraph_node::get (fdecl);
struct function *fun = DECL_STRUCT_FUNCTION (fdecl);
dname = lang_hooks.decl_printable_name (fdecl, 2);
void
gimple_gen_ic_func_profiler (void)
{
- struct cgraph_node * c_node = cgraph_get_node (current_function_decl);
+ struct cgraph_node * c_node = cgraph_node::get (current_function_decl);
gimple_stmt_iterator gsi;
gimple stmt1, stmt2;
tree tree_uid, cur_func, void0;
- if (cgraph_only_called_directly_p (c_node))
+ if (c_node->only_called_directly_p ())
return;
gimple_init_edge_profiler ();
true, NULL_TREE,
true, GSI_SAME_STMT);
tree_uid = build_int_cst
- (gcov_type_node, cgraph_get_node (current_function_decl)->profile_id);
+ (gcov_type_node, cgraph_node::get (current_function_decl)->profile_id);
/* Workaround for binutils bug 14342. Once it is fixed, remove lto path. */
if (flag_lto)
{
if (DECL_SOURCE_LOCATION (node->decl) == BUILTINS_LOCATION)
continue;
- cgraph_set_const_flag (node, false, false);
- cgraph_set_pure_flag (node, false, false);
+ node->set_const_flag (false, false);
+ node->set_pure_flag (false, false);
}
/* Update call statements and rebuild the cgraph. */
{
basic_block this_block;
- cgraph_for_node_and_aliases (node, convert_callers_for_node,
- &adjustments, false);
+ node->call_for_symbol_thunks_and_aliases (convert_callers_for_node,
+ &adjustments, false);
if (!encountered_recursive_call)
return;
/* This must be done after rebuilding cgraph edges for node above.
Otherwise any recursive calls to node that are recorded in
redirect_callers will be corrupted. */
- vec<cgraph_edge_p> redirect_callers = collect_callers_of_node (node);
- new_node = cgraph_function_versioning (node, redirect_callers,
- NULL,
- NULL, false, NULL, NULL, "isra");
+ vec<cgraph_edge *> redirect_callers = node->collect_callers ();
+ new_node = node->create_version_clone_with_body (redirect_callers, NULL,
+ NULL, false, NULL, NULL,
+ "isra");
redirect_callers.release ();
push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
cfg_changed = ipa_sra_modify_function_body (adjustments);
sra_ipa_reset_debug_stmts (adjustments);
convert_callers (new_node, node->decl, adjustments);
- cgraph_make_node_local (new_node);
+ new_node->make_local ();
return cfg_changed;
}
static bool
ipa_sra_preliminary_function_checks (struct cgraph_node *node)
{
- if (!cgraph_node_can_be_local_p (node))
+ if (!node->can_be_local_p ())
{
if (dump_file)
fprintf (dump_file, "Function not local to this compilation unit.\n");
return false;
}
- if (!cgraph_for_node_and_aliases (node, has_caller_p, NULL, true))
+ if (!node->call_for_symbol_thunks_and_aliases (has_caller_p, NULL, true))
{
if (dump_file)
fprintf (dump_file,
static unsigned int
ipa_early_sra (void)
{
- struct cgraph_node *node = cgraph_get_node (current_function_decl);
+ struct cgraph_node *node = cgraph_node::get (current_function_decl);
ipa_parm_adjustment_vec adjustments;
int ret = 0;
goto simple_out;
}
- if (cgraph_for_node_and_aliases (node,
- some_callers_have_mismatched_arguments_p,
- NULL, true))
+ if (node->call_for_symbol_thunks_and_aliases
+ (some_callers_have_mismatched_arguments_p, NULL, true))
{
if (dump_file)
fprintf (dump_file, "There are callers with insufficient number of "
&& TREE_CODE (base) == VAR_DECL
&& TREE_STATIC (base))
{
- struct cgraph_node *node = cgraph_get_node (callee);
+ struct cgraph_node *node = cgraph_node::get (callee);
bitmap not_read;
/* FIXME: Callee can be an OMP builtin that does not have a call graph
&& TREE_CODE (base) == VAR_DECL
&& TREE_STATIC (base))
{
- struct cgraph_node *node = cgraph_get_node (callee);
+ struct cgraph_node *node = cgraph_node::get (callee);
bitmap not_written;
if (node
unsigned cost;
/* Avoid using hard regs in ways which may be unsupported. */
int regno = LAST_VIRTUAL_REGISTER + 1;
- struct cgraph_node *node = cgraph_get_node (current_function_decl);
+ struct cgraph_node *node = cgraph_node::get (current_function_decl);
enum node_frequency real_frequency = node->frequency;
node->frequency = NODE_FREQUENCY_NORMAL;
dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
"converting indirect call to "
"function %s\n",
- cgraph_get_node (fn)->name ());
+ cgraph_node::get (fn)->name ());
}
gimple_call_set_fndecl (stmt, fn);
gimple_set_modified (stmt, true);
if (dump_file && (dump_flags & TDF_DETAILS))
{
- dump_symtab (dump_file);
+ symtab_node::dump_table (dump_file);
fprintf (dump_file, "\n");
}
/* Nodes without a body are not interesting. Especially do not
visit clones at this point for now - we get duplicate decls
there for inline clones at least. */
- if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
+ if (!node->has_gimple_body_p () || node->clone_of)
continue;
- cgraph_get_body (node);
+ node->get_body ();
gcc_assert (!node->clone_of);
vi = create_function_info_for (node->decl,
alias_get_name (node->decl));
- cgraph_for_node_and_aliases (node, associate_varinfo_to_alias, vi, true);
+ node->call_for_symbol_thunks_and_aliases
+ (associate_varinfo_to_alias, vi, true);
}
/* Create constraints for global variables and their initializers. */
basic_block bb;
/* Nodes without a body are not interesting. */
- if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
+ if (!node->has_gimple_body_p () || node->clone_of)
continue;
if (dump_file)
basic_block bb;
/* Nodes without a body are not interesting. */
- if (!cgraph_function_with_gimple_body_p (node) || node->clone_of)
+ if (!node->has_gimple_body_p () || node->clone_of)
continue;
fn = DECL_STRUCT_FUNCTION (node->decl);
tree fndecl = gimple_call_fndecl (stmt), op;
if (fndecl != NULL_TREE)
{
- struct cgraph_node *node = cgraph_get_node (fndecl);
+ struct cgraph_node *node = cgraph_node::get (fndecl);
if (node != NULL && node->simd_clones != NULL)
{
unsigned int j, n = gimple_call_num_args (stmt);
/* When compiling partition, be sure the symbol is not output by other
partition. */
- snode = symtab_get_node (decl);
+ snode = symtab_node::get (decl);
if (flag_ltrans
&& (snode->in_other_partition
- || symtab_get_symbol_partitioning_class (snode) == SYMBOL_DUPLICATE))
+ || snode->get_partitioning_class () == SYMBOL_DUPLICATE))
return false;
}
software projects. */
if (TREE_STATIC (decl)
&& DECL_SECTION_NAME (decl) != NULL
- && !symtab_get_node (decl)->implicit_section)
+ && !symtab_node::get (decl)->implicit_section)
return false;
/* If symbol is an alias, we need to check that target is OK. */
if (TREE_STATIC (decl))
{
- tree target = symtab_alias_ultimate_target (symtab_get_node (decl))->decl;
+ tree target = symtab_node::get (decl)->ultimate_alias_target ()->decl;
if (target != decl)
{
if (DECL_PRESERVE_P (target))
if (fndecl == NULL_TREE)
return false;
- struct cgraph_node *node = cgraph_get_node (fndecl);
+ struct cgraph_node *node = cgraph_node::get (fndecl);
if (node == NULL || node->simd_clones == NULL)
return false;
unsigned int badness = 0;
struct cgraph_node *bestn = NULL;
if (STMT_VINFO_SIMD_CLONE_FNDECL (stmt_info))
- bestn = cgraph_get_node (STMT_VINFO_SIMD_CLONE_FNDECL (stmt_info));
+ bestn = cgraph_node::get (STMT_VINFO_SIMD_CLONE_FNDECL (stmt_info));
else
for (struct cgraph_node *n = node->simd_clones; n != NULL;
n = n->simdclone->next_clone)
DECL_USER_ALIGN (decl) = 1;
if (TREE_STATIC (decl))
{
- tree target = symtab_alias_ultimate_target (symtab_get_node (decl))->decl;
+ tree target = symtab_node::get (decl)->ultimate_alias_target ()->decl;
DECL_ALIGN (target) = TYPE_ALIGN (vectype);
DECL_USER_ALIGN (target) = 1;
}
tree
decl_comdat_group (const_tree node)
{
- struct symtab_node *snode = symtab_get_node (node);
+ struct symtab_node *snode = symtab_node::get (node);
if (!snode)
return NULL;
return snode->get_comdat_group ();
tree
decl_comdat_group_id (const_tree node)
{
- struct symtab_node *snode = symtab_get_node (node);
+ struct symtab_node *snode = symtab_node::get (node);
if (!snode)
return NULL;
return snode->get_comdat_group_id ();
const char *
decl_section_name (const_tree node)
{
- struct symtab_node *snode = symtab_get_node (node);
+ struct symtab_node *snode = symtab_node::get (node);
if (!snode)
return NULL;
return snode->get_section ();
if (value == NULL)
{
- snode = symtab_get_node (node);
+ snode = symtab_node::get (node);
if (!snode)
return;
}
else if (TREE_CODE (node) == VAR_DECL)
snode = varpool_node_for_decl (node);
else
- snode = cgraph_get_create_node (node);
+ snode = cgraph_node::get_create (node);
snode->set_section (value);
}
return false;
/* Functions represented in the callgraph need an assembler name. */
- if (cgraph_get_node (decl) != NULL)
+ if (cgraph_node::get (decl) != NULL)
return true;
/* Unused and not public functions don't need an assembler name. */
if (TREE_CODE (decl) == FUNCTION_DECL)
{
struct cgraph_node *node;
- if (!(node = cgraph_get_node (decl))
+ if (!(node = cgraph_node::get (decl))
|| (!node->definition && !node->clones))
{
if (node)
- cgraph_release_function_body (node);
+ node->release_body ();
else
{
release_function_body (decl);
priority_type
decl_init_priority_lookup (tree decl)
{
- symtab_node *snode = symtab_get_node (decl);
+ symtab_node *snode = symtab_node::get (decl);
if (!snode)
return DEFAULT_INIT_PRIORITY;
priority_type
decl_fini_priority_lookup (tree decl)
{
- cgraph_node *node = cgraph_get_node (decl);
+ cgraph_node *node = cgraph_node::get (decl);
if (!node)
return DEFAULT_INIT_PRIORITY;
if (priority == DEFAULT_INIT_PRIORITY)
{
- snode = symtab_get_node (decl);
+ snode = symtab_node::get (decl);
if (!snode)
return;
}
else if (TREE_CODE (decl) == VAR_DECL)
snode = varpool_node_for_decl (decl);
else
- snode = cgraph_get_create_node (decl);
+ snode = cgraph_node::get_create (decl);
snode->set_init_priority (priority);
}
if (priority == DEFAULT_INIT_PRIORITY)
{
- node = cgraph_get_node (decl);
+ node = cgraph_node::get (decl);
if (!node)
return;
}
else
- node = cgraph_get_create_node (decl);
+ node = cgraph_node::get_create (decl);
node->set_fini_priority (priority);
}
cgraph_node_map = pointer_map_create ();
FOR_EACH_DEFINED_FUNCTION (n)
- if (cgraph_function_with_gimple_body_p (n)
- && !cgraph_only_called_directly_p (n))
+ if (n->has_gimple_body_p ()
+ && !n->only_called_directly_p ())
{
void **val;
if (local)
{
targetm.asm_out.unique_section (decl, reloc);
if (DECL_SECTION_NAME (decl))
- symtab_for_node_and_aliases (symtab_get_node (decl),
- set_implicit_section, NULL, true);
+ symtab_node::get (decl)->call_for_symbol_and_aliases
+ (set_implicit_section, NULL, true);
}
}
buffer = ACONCAT ((stripped_name, named_section_suffix, NULL));
return get_named_section (decl, buffer, 0);
}
- else if (symtab_get_node (decl)->implicit_section)
+ else if (symtab_node::get (decl)->implicit_section)
{
const char *name;
/* Old GNU linkers have buggy --gc-section support, which sometimes
results in .gcc_except_table* sections being garbage collected. */
if (decl
- && symtab_get_node (decl)->implicit_section)
+ && symtab_node::get (decl)->implicit_section)
return NULL;
#endif
if (decl)
{
- struct cgraph_node *node = cgraph_get_node (decl);
+ struct cgraph_node *node = cgraph_node::get (decl);
if (node)
{
{
addr_space_t as = ADDR_SPACE_GENERIC;
int reloc;
- symtab_node *snode = symtab_get_node (decl);
+ symtab_node *snode = symtab_node::get (decl);
if (snode)
- decl = symtab_alias_ultimate_target (snode)->decl;
+ decl = snode->ultimate_alias_target ()->decl;
if (TREE_TYPE (decl) != error_mark_node)
as = TYPE_ADDR_SPACE (TREE_TYPE (decl));
/* If this decl is an alias, then we don't want to emit a
definition. */
if (TREE_CODE (decl) == VAR_DECL
- && (snode = symtab_get_node (decl)) != NULL
+ && (snode = symtab_node::get (decl)) != NULL
&& snode->alias)
return false;
if (DECL_SECTION_NAME (decl))
{
- struct cgraph_node *node = cgraph_get_node (current_function_decl);
+ struct cgraph_node *node = cgraph_node::get (current_function_decl);
/* Calls to function_section rely on first_function_block_is_cold
being accurate. */
first_function_block_is_cold = (node
If we know a method will be emitted in other TU and no new
functions can be marked reachable, just use the external
definition. */
- struct cgraph_node *node = cgraph_get_create_node (decl);
+ struct cgraph_node *node = cgraph_node::get_create (decl);
if (!DECL_EXTERNAL (decl)
&& !node->definition)
- cgraph_mark_force_output_node (node);
+ node->mark_force_output ();
}
else if (TREE_CODE (decl) == VAR_DECL)
{
/* Allow aliases to aliases. */
if (TREE_CODE (decl) == FUNCTION_DECL)
- cgraph_get_create_node (decl)->alias = true;
+ cgraph_node::get_create (decl)->alias = true;
else
varpool_node_for_decl (decl)->alias = true;
{
tree src = p->from;
tree dst = p->to;
- struct cgraph_node *src_n = cgraph_get_node (src);
- struct cgraph_node *dst_n = cgraph_get_node (dst);
+ struct cgraph_node *src_n = cgraph_node::get (src);
+ struct cgraph_node *dst_n = cgraph_node::get (dst);
/* The function ipa_tm_create_version() marks the clone as needed if
the original function was needed. But we also mark the clone as
if (TREE_CODE (decl) == VAR_DECL)
symbol = varpool_node_for_decl (decl);
else
- symbol = cgraph_get_create_node (decl);
+ symbol = cgraph_node::get_create (decl);
if (SUPPORTS_ONE_ONLY)
{
}
else if (TREE_CODE (exp) == FUNCTION_DECL && TREE_PUBLIC (exp))
{
- struct cgraph_node *node = cgraph_get_node (exp);
+ struct cgraph_node *node = cgraph_node::get (exp);
if (node
&& (resolution_local_p (node->resolution) || node->in_other_partition))
resolved_locally = true;
}
else if (TREE_CODE (decl) == FUNCTION_DECL)
{
- struct cgraph_node *node = cgraph_get_node (decl);
+ struct cgraph_node *node = cgraph_node::get (decl);
if (node
&& node->resolution != LDPR_UNKNOWN)
return resolution_to_local_definition_p (node->resolution);
struct symtab_node *snode;
decl = SYMBOL_REF_DECL (symbol);
- snode = symtab_get_node (decl);
+ snode = symtab_node::get (decl);
if (snode->alias)
{
- rtx target = DECL_RTL (symtab_alias_ultimate_target (snode)->decl);
+ rtx target = DECL_RTL (snode->ultimate_alias_target ()->decl);
place_block_symbol (target);
SYMBOL_REF_BLOCK_OFFSET (symbol) = SYMBOL_REF_BLOCK_OFFSET (target);
node = varpool_create_empty_node ();
node->decl = decl;
- symtab_register_node (node);
+ node->register_symbol ();
return node;
}
-/* Remove node from the varpool. */
+/* Remove variable from symbol table. */
+
void
-varpool_remove_node (varpool_node *node)
+varpool_node::remove (void)
{
- varpool_call_node_removal_hooks (node);
- symtab_unregister_node (node);
+ varpool_call_node_removal_hooks (this);
+ unregister ();
/* When streaming we can have multiple nodes associated with decl. */
if (cgraph_state == CGRAPH_LTO_STREAMING)
;
/* Keep constructor when it may be used for folding. We remove
references to external variables before final compilation. */
- else if (DECL_INITIAL (node->decl) && DECL_INITIAL (node->decl) != error_mark_node
- && !varpool_ctor_useable_for_folding_p (node))
- varpool_remove_initializer (node);
- ggc_free (node);
+ else if (DECL_INITIAL (decl) && DECL_INITIAL (decl) != error_mark_node
+ && !varpool_ctor_useable_for_folding_p (this))
+ varpool_remove_initializer (this);
+ ggc_free (this);
}
/* Renove node initializer when it is no longer needed. */
/* Dump given cgraph node. */
void
-dump_varpool_node (FILE *f, varpool_node *node)
+varpool_node::dump (FILE *f)
{
- dump_symtab_base (f, node);
+ dump_base (f);
fprintf (f, " Availability: %s\n",
cgraph_function_flags_ready
- ? cgraph_availability_names[cgraph_variable_initializer_availability (node)]
+ ? cgraph_availability_names[cgraph_variable_initializer_availability (this)]
: "not-ready");
fprintf (f, " Varpool flags:");
- if (DECL_INITIAL (node->decl))
+ if (DECL_INITIAL (decl))
fprintf (f, " initialized");
- if (node->output)
+ if (output)
fprintf (f, " output");
- if (node->used_by_single_function)
+ if (used_by_single_function)
fprintf (f, " used-by-single-function");
- if (TREE_READONLY (node->decl))
+ if (TREE_READONLY (decl))
fprintf (f, " read-only");
- if (varpool_ctor_useable_for_folding_p (node))
+ if (varpool_ctor_useable_for_folding_p (this))
fprintf (f, " const-value-known");
- if (node->writeonly)
+ if (writeonly)
fprintf (f, " write-only");
- if (node->tls_model)
- fprintf (f, " %s", tls_model_names [node->tls_model]);
+ if (tls_model)
+ fprintf (f, " %s", tls_model_names [tls_model]);
fprintf (f, "\n");
}
-/* Dump the variable pool. */
+/* Dump the variable pool to F. */
void
dump_varpool (FILE *f)
{
fprintf (f, "variable pool:\n\n");
FOR_EACH_VARIABLE (node)
- dump_varpool_node (f, node);
+ node->dump (f);
}
/* Dump the variable pool to stderr. */
used to share template instantiations in C++. */
if (decl_replaceable_p (node->decl)
|| DECL_EXTERNAL (node->decl))
- return AVAIL_OVERWRITABLE;
+ return AVAIL_INTERPOSABLE;
return AVAIL_AVAILABLE;
}
align_variable (decl, 0);
}
if (node->alias)
- symtab_resolve_alias
- (node, varpool_get_node (node->alias_target));
+ node->resolve_alias (varpool_get_node (node->alias_target));
else if (DECL_INITIAL (decl))
record_references_in_initializer (decl, node->analyzed);
node->analyzed = true;
next = next->same_comdat_group)
{
varpool_node *vnext = dyn_cast <varpool_node *> (next);
- if (vnext && vnext->analyzed && !symtab_comdat_local_p (next))
+ if (vnext && vnext->analyzed && !next->comdat_local_p ())
enqueue_node (vnext, &first);
}
}
if (pointer_set_contains (referenced, node))
varpool_remove_initializer (node);
else
- varpool_remove_node (node);
+ node->remove ();
}
}
pointer_set_destroy (referenced);
This is unfortunate because they are not going through the
standard channels. Ensure they get output. */
if (cpp_implicit_aliases_done)
- symtab_resolve_alias (alias_node,
- varpool_node_for_decl (decl));
+ alias_node->resolve_alias (varpool_node_for_decl (decl));
return alias_node;
}
{
varpool_node *alias = dyn_cast <varpool_node *> (ref->referring);
if (include_overwritable
- || cgraph_variable_initializer_availability (alias) > AVAIL_OVERWRITABLE)
+ || cgraph_variable_initializer_availability (alias) > AVAIL_INTERPOSABLE)
if (varpool_for_node_and_aliases (alias, callback, data,
include_overwritable))
return true;