/* Callgraph based analysis of static variables.
- Copyright (C) 2004-2013 Free Software Foundation, Inc.
+ Copyright (C) 2004-2020 Free Software Foundation, Inc.
Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
This file is part of GCC.
#include "config.h"
#include "system.h"
#include "coretypes.h"
-#include "tm.h"
+#include "backend.h"
#include "tree.h"
-#include "tree-inline.h"
+#include "gimple.h"
#include "tree-pass.h"
-#include "pointer-set.h"
-#include "splay-tree.h"
-#include "ggc.h"
+#include "cgraph.h"
+#include "data-streamer.h"
+#include "calls.h"
#include "ipa-utils.h"
#include "ipa-reference.h"
-#include "flags.h"
-#include "diagnostic.h"
-#include "data-streamer.h"
-#include "lto-streamer.h"
-
-static void remove_node_data (struct cgraph_node *node,
- void *data ATTRIBUTE_UNUSED);
-static void duplicate_node_data (struct cgraph_node *src,
- struct cgraph_node *dst,
- void *data ATTRIBUTE_UNUSED);
+#include "alloc-pool.h"
+#include "symbol-summary.h"
/* The static variables defined within the compilation unit that are
loaded or stored directly by function that owns this structure. */
struct ipa_reference_optimization_summary_d
{
- bitmap statics_not_read;
- bitmap statics_not_written;
+ bitmap statics_read;
+ bitmap statics_written;
};
-typedef struct ipa_reference_local_vars_info_d *ipa_reference_local_vars_info_t;
-typedef struct ipa_reference_global_vars_info_d *ipa_reference_global_vars_info_t;
-typedef struct ipa_reference_optimization_summary_d *ipa_reference_optimization_summary_t;
+typedef ipa_reference_local_vars_info_d *ipa_reference_local_vars_info_t;
+typedef ipa_reference_global_vars_info_d *ipa_reference_global_vars_info_t;
+typedef ipa_reference_optimization_summary_d *
+ ipa_reference_optimization_summary_t;
struct ipa_reference_vars_info_d
{
typedef struct ipa_reference_vars_info_d *ipa_reference_vars_info_t;
-/* This splay tree contains all of the static variables that are
+/* This map contains all of the static variables that are
being considered by the compilation level alias analysis. */
-static splay_tree reference_vars_to_consider;
+typedef hash_map<tree, int> reference_vars_map_t;
+static reference_vars_map_t *ipa_reference_vars_map;
+static int ipa_reference_vars_uids;
+static vec<tree> *reference_vars_to_consider;
+varpool_node_hook_list *varpool_node_hooks;
/* Set of all interesting module statics. A bit is set for every module
static we are considering. This is added to the local info when asm
code is found that clobbers all memory. */
static bitmap all_module_statics;
+/* Zero bitmap. */
+static bitmap no_module_statics;
+/* Set of all statics that should be ignored because they are touched by
+ -fno-ipa-reference code. */
+static bitmap ignore_module_statics;
/* Obstack holding bitmaps of local analysis (live from analysis to
propagation) */
/* Obstack holding global analysis live forever. */
static bitmap_obstack optimization_summary_obstack;
-/* Holders of ipa cgraph hooks: */
-static struct cgraph_2node_hook_list *node_duplication_hook_holder;
-static struct cgraph_node_hook_list *node_removal_hook_holder;
+class ipa_ref_var_info_summary_t: public fast_function_summary
+ <ipa_reference_vars_info_d *, va_heap>
+{
+public:
+ ipa_ref_var_info_summary_t (symbol_table *symtab):
+ fast_function_summary <ipa_reference_vars_info_d *, va_heap> (symtab) {}
+};
+
+static ipa_ref_var_info_summary_t *ipa_ref_var_info_summaries = NULL;
-/* Vector where the reference var infos are actually stored.
- Indexed by UID of call graph nodes. */
-static vec<ipa_reference_vars_info_t> ipa_reference_vars_vector;
+class ipa_ref_opt_summary_t: public fast_function_summary
+ <ipa_reference_optimization_summary_d *, va_heap>
+{
+public:
+ ipa_ref_opt_summary_t (symbol_table *symtab):
+ fast_function_summary <ipa_reference_optimization_summary_d *, va_heap> (symtab) {}
+
+ virtual void remove (cgraph_node *src_node,
+ ipa_reference_optimization_summary_d *data);
+ virtual void duplicate (cgraph_node *src_node, cgraph_node *dst_node,
+ ipa_reference_optimization_summary_d *src_data,
+ ipa_reference_optimization_summary_d *dst_data);
+};
-static vec<ipa_reference_optimization_summary_t> ipa_reference_opt_sum_vector;
+static ipa_ref_opt_summary_t *ipa_ref_opt_sum_summaries = NULL;
+
+/* Return ID used by ipa-reference bitmaps. -1 if failed. */
+int
+ipa_reference_var_uid (tree t)
+{
+ if (!ipa_reference_vars_map)
+ return -1;
+ int *id = ipa_reference_vars_map->get
+ (symtab_node::get (t)->ultimate_alias_target (NULL)->decl);
+ if (!id)
+ return -1;
+ return *id;
+}
+
+/* Return ID used by ipa-reference bitmaps. Create new entry if
+ T is not in map. Set EXISTED accordinly */
+int
+ipa_reference_var_get_or_insert_uid (tree t, bool *existed)
+{
+ int &id = ipa_reference_vars_map->get_or_insert
+ (symtab_node::get (t)->ultimate_alias_target (NULL)->decl, existed);
+ if (!*existed)
+ id = ipa_reference_vars_uids++;
+ return id;
+}
/* Return the ipa_reference_vars structure starting from the cgraph NODE. */
static inline ipa_reference_vars_info_t
get_reference_vars_info (struct cgraph_node *node)
{
- if (!ipa_reference_vars_vector.exists ()
- || ipa_reference_vars_vector.length () <= (unsigned int) node->uid)
+ if (ipa_ref_var_info_summaries == NULL)
return NULL;
- return ipa_reference_vars_vector[node->uid];
+
+ ipa_reference_vars_info_t v = ipa_ref_var_info_summaries->get (node);
+ return v == NULL ? NULL : v;
}
/* Return the ipa_reference_vars structure starting from the cgraph NODE. */
static inline ipa_reference_optimization_summary_t
get_reference_optimization_summary (struct cgraph_node *node)
{
- if (!ipa_reference_opt_sum_vector.exists ()
- || (ipa_reference_opt_sum_vector.length () <= (unsigned int) node->uid))
+ if (ipa_ref_opt_sum_summaries == NULL)
return NULL;
- return ipa_reference_opt_sum_vector[node->uid];
-}
-/* Return the ipa_reference_vars structure starting from the cgraph NODE. */
-static inline void
-set_reference_vars_info (struct cgraph_node *node,
- ipa_reference_vars_info_t info)
-{
- if (!ipa_reference_vars_vector.exists ()
- || ipa_reference_vars_vector.length () <= (unsigned int) node->uid)
- ipa_reference_vars_vector.safe_grow_cleared (node->uid + 1);
- ipa_reference_vars_vector[node->uid] = info;
-}
+ ipa_reference_optimization_summary_t v
+ = ipa_ref_opt_sum_summaries->get (node);
-/* Return the ipa_reference_vars structure starting from the cgraph NODE. */
-static inline void
-set_reference_optimization_summary (struct cgraph_node *node,
- ipa_reference_optimization_summary_t info)
-{
- if (!ipa_reference_opt_sum_vector.exists ()
- || (ipa_reference_opt_sum_vector.length () <= (unsigned int) node->uid))
- ipa_reference_opt_sum_vector.safe_grow_cleared (node->uid + 1);
- ipa_reference_opt_sum_vector[node->uid] = info;
+ return v == NULL ? NULL : v;
}
-/* Return a bitmap indexed by DECL_UID for the static variables that
- are *not* read during the execution of the function FN. Returns
+/* Return a bitmap indexed by ipa_reference_var_uid for the static variables
+ that are *not* read during the execution of the function FN. Returns
NULL if no data is available. */
bitmap
-ipa_reference_get_not_read_global (struct cgraph_node *fn)
+ipa_reference_get_read_global (struct cgraph_node *fn)
{
+ if (!opt_for_fn (current_function_decl, flag_ipa_reference))
+ return NULL;
+
+ enum availability avail;
+ struct cgraph_node *fn2 = fn->function_symbol (&avail);
ipa_reference_optimization_summary_t info =
- get_reference_optimization_summary (cgraph_function_node (fn, NULL));
- if (info)
- return info->statics_not_read;
- else if (flags_from_decl_or_type (fn->symbol.decl) & ECF_LEAF)
- return all_module_statics;
+ get_reference_optimization_summary (fn2);
+
+ if (info
+ && (avail >= AVAIL_AVAILABLE
+ || (avail == AVAIL_INTERPOSABLE
+ && flags_from_decl_or_type (fn->decl) & ECF_LEAF))
+ && opt_for_fn (fn2->decl, flag_ipa_reference))
+ return info->statics_read;
+ else if (avail == AVAIL_NOT_AVAILABLE
+ && flags_from_decl_or_type (fn->decl) & ECF_LEAF)
+ return no_module_statics;
else
return NULL;
}
-/* Return a bitmap indexed by DECL_UID for the static variables that
- are *not* written during the execution of the function FN. Note
+/* Return a bitmap indexed by ipa_reference_var_uid for the static variables
+ that are *not* written during the execution of the function FN. Note
that variables written may or may not be read during the function
call. Returns NULL if no data is available. */
bitmap
-ipa_reference_get_not_written_global (struct cgraph_node *fn)
+ipa_reference_get_written_global (struct cgraph_node *fn)
{
+ if (!opt_for_fn (current_function_decl, flag_ipa_reference))
+ return NULL;
+
+ enum availability avail;
+ struct cgraph_node *fn2 = fn->function_symbol (&avail);
ipa_reference_optimization_summary_t info =
- get_reference_optimization_summary (fn);
- if (info)
- return info->statics_not_written;
- else if (flags_from_decl_or_type (fn->symbol.decl) & ECF_LEAF)
- return all_module_statics;
+ get_reference_optimization_summary (fn2);
+
+ if (info
+ && (avail >= AVAIL_AVAILABLE
+ || (avail == AVAIL_INTERPOSABLE
+ && flags_from_decl_or_type (fn->decl) & ECF_LEAF))
+ && opt_for_fn (fn2->decl, flag_ipa_reference))
+ return info->statics_written;
+ else if (avail == AVAIL_NOT_AVAILABLE
+ && flags_from_decl_or_type (fn->decl) & ECF_LEAF)
+ return no_module_statics;
else
return NULL;
}
-
\f
-/* Add VAR to all_module_statics and the two
- reference_vars_to_consider* sets. */
-
-static inline void
-add_static_var (tree var)
-{
- int uid = DECL_UID (var);
- gcc_assert (TREE_CODE (var) == VAR_DECL);
- if (dump_file)
- splay_tree_insert (reference_vars_to_consider,
- uid, (splay_tree_value)var);
- bitmap_set_bit (all_module_statics, uid);
-}
-
-/* Return true if the variable T is the right kind of static variable to
- perform compilation unit scope escape analysis. */
-
-static inline bool
-is_proper_for_analysis (tree t)
+/* Hepler for is_proper_for_analysis. */
+static bool
+is_improper (symtab_node *n, void *v ATTRIBUTE_UNUSED)
{
+ tree t = n->decl;
/* If the variable has the "used" attribute, treat it as if it had a
been touched by the devil. */
if (DECL_PRESERVE_P (t))
- return false;
+ return true;
/* Do not want to do anything with volatile except mark any
function that uses one to be not const or pure. */
if (TREE_THIS_VOLATILE (t))
- return false;
+ return true;
/* We do not need to analyze readonly vars, we already know they do not
alias. */
if (TREE_READONLY (t))
+ return true;
+
+ /* We cannot track variables with address taken. */
+ if (TREE_ADDRESSABLE (t))
+ return true;
+
+ /* TODO: We could track public variables that are not addressable, but
+ currently frontends don't give us those. */
+ if (TREE_PUBLIC (t))
+ return true;
+
+ return false;
+}
+
+/* Return true if the variable T is the right kind of static variable to
+ perform compilation unit scope escape analysis. */
+
+static inline bool
+is_proper_for_analysis (tree t)
+{
+ int id = ipa_reference_var_uid (t);
+
+ if (id != -1 && bitmap_bit_p (ignore_module_statics, id))
return false;
- /* This is a variable we care about. Check if we have seen it
- before, and if not add it the set of variables we care about. */
- if (all_module_statics
- && !bitmap_bit_p (all_module_statics, DECL_UID (t)))
- add_static_var (t);
+ if (symtab_node::get (t)
+ ->call_for_symbol_and_aliases (is_improper, NULL, true))
+ return false;
return true;
}
static const char *
get_static_name (int index)
{
- splay_tree_node stn =
- splay_tree_lookup (reference_vars_to_consider, index);
- return fndecl_name ((tree)(stn->value));
+ return fndecl_name ((*reference_vars_to_consider)[index]);
}
/* Dump a set of static vars to FILE. */
return;
else if (set == all_module_statics)
fprintf (f, "ALL");
+ else if (set == no_module_statics)
+ fprintf (f, "NO");
else
EXECUTE_IF_SET_IN_BITMAP (set, 0, index, bi)
{
return x == all_module_statics;
}
-/* Compute X &= Y, taking into account the possibility that
- X may become the maximum set. */
-
-static bool
-intersect_static_var_sets (bitmap &x, bitmap y)
-{
- if (x != all_module_statics)
- {
- bitmap_and_into (x, y);
- /* As with union_static_var_sets, reducing to the maximum
- set as early as possible is an overall win. */
- if (bitmap_equal_p (x, all_module_statics))
- {
- BITMAP_FREE (x);
- x = all_module_statics;
- }
- }
- return x == all_module_statics;
-}
-
/* Return a copy of SET on the bitmap obstack containing SET.
But if SET is NULL or the maximum set, return that instead. */
static bitmap
-copy_static_var_set (bitmap set)
+copy_static_var_set (bitmap set, bool for_propagation)
{
if (set == NULL || set == all_module_statics)
return set;
+ if (!for_propagation && set == no_module_statics)
+ return set;
bitmap_obstack *o = set->obstack;
gcc_checking_assert (o);
bitmap copy = BITMAP_ALLOC (o);
e = e->next_callee)
{
enum availability avail;
- struct cgraph_node *y = cgraph_function_node (e->callee, &avail);
+ struct cgraph_node *y = e->callee->function_symbol (&avail);
if (!y)
continue;
/* Only look into nodes we can propagate something. */
- int flags = flags_from_decl_or_type (y->symbol.decl);
- if (avail > AVAIL_OVERWRITABLE
- || (avail == AVAIL_OVERWRITABLE && (flags & ECF_LEAF)))
+ int flags = flags_from_decl_or_type (y->decl);
+ if (opt_for_fn (y->decl, flag_ipa_reference)
+ && (avail > AVAIL_INTERPOSABLE
+ || (avail == AVAIL_INTERPOSABLE && (flags & ECF_LEAF))))
{
if (get_reference_vars_info (y))
{
seems so to local analysis. If we cannot return from
the function, we can safely ignore the call. */
if ((flags & ECF_PURE)
- || cgraph_edge_cannot_lead_to_return (e))
+ || e->cannot_lead_to_return_p ())
continue;
union_static_var_sets (x_global->statics_written,
}
}
+/* Delete NODE from map. */
+
+static void
+varpool_removal_hook (varpool_node *node, void *)
+{
+ ipa_reference_vars_map->remove (node->decl);
+}
+
+static bool ipa_init_p = false;
+
/* The init routine for analyzing global static variable usage. See
comments at top for description. */
static void
ipa_init (void)
{
- static bool init_p = false;
-
- if (init_p)
+ if (ipa_init_p)
return;
- init_p = true;
+ ipa_init_p = true;
- if (dump_file)
- reference_vars_to_consider = splay_tree_new (splay_tree_compare_ints, 0, 0);
+ vec_alloc (reference_vars_to_consider, 10);
+
+
+ if (ipa_ref_opt_sum_summaries != NULL)
+ {
+ delete ipa_ref_opt_sum_summaries;
+ ipa_ref_opt_sum_summaries = NULL;
+ delete ipa_reference_vars_map;
+ }
+ ipa_reference_vars_map = new reference_vars_map_t(257);
+ varpool_node_hooks
+ = symtab->add_varpool_removal_hook (varpool_removal_hook, NULL);
+ ipa_reference_vars_uids = 0;
bitmap_obstack_initialize (&local_info_obstack);
bitmap_obstack_initialize (&optimization_summary_obstack);
all_module_statics = BITMAP_ALLOC (&optimization_summary_obstack);
+ no_module_statics = BITMAP_ALLOC (&optimization_summary_obstack);
+ ignore_module_statics = BITMAP_ALLOC (&optimization_summary_obstack);
- node_removal_hook_holder =
- cgraph_add_node_removal_hook (&remove_node_data, NULL);
- node_duplication_hook_holder =
- cgraph_add_node_duplication_hook (&duplicate_node_data, NULL);
+ if (ipa_ref_var_info_summaries == NULL)
+ ipa_ref_var_info_summaries = new ipa_ref_var_info_summary_t (symtab);
}
init_function_info (struct cgraph_node *fn)
{
ipa_reference_vars_info_t info
- = XCNEW (struct ipa_reference_vars_info_d);
-
- /* Add the info to the tree's annotation. */
- set_reference_vars_info (fn, info);
+ = ipa_ref_var_info_summaries->get_create (fn);
info->local.statics_read = BITMAP_ALLOC (&local_info_obstack);
info->local.statics_written = BITMAP_ALLOC (&local_info_obstack);
+ info->global.statics_read = NULL;
return &info->local;
}
analyze_function (struct cgraph_node *fn)
{
ipa_reference_local_vars_info_t local;
- struct ipa_ref *ref;
+ struct ipa_ref *ref = NULL;
int i;
tree var;
+ if (!opt_for_fn (fn->decl, flag_ipa_reference))
+ return;
local = init_function_info (fn);
- for (i = 0; ipa_ref_list_reference_iterate (&fn->symbol.ref_list, i, ref); i++)
+ for (i = 0; fn->iterate_reference (i, ref); i++)
{
- if (!is_a <varpool_node> (ref->referred))
+ int id;
+ bool existed;
+ if (!is_a <varpool_node *> (ref->referred))
continue;
- var = ipa_ref_varpool_node (ref)->symbol.decl;
+ var = ref->referred->decl;
if (!is_proper_for_analysis (var))
continue;
+ /* This is a variable we care about. Check if we have seen it
+ before, and if not add it the set of variables we care about. */
+ id = ipa_reference_var_get_or_insert_uid (var, &existed);
+ if (!existed)
+ {
+ bitmap_set_bit (all_module_statics, id);
+ if (dump_file)
+ reference_vars_to_consider->safe_push (var);
+ }
switch (ref->use)
{
case IPA_REF_LOAD:
- bitmap_set_bit (local->statics_read, DECL_UID (var));
+ bitmap_set_bit (local->statics_read, id);
break;
case IPA_REF_STORE:
- if (ipa_ref_cannot_lead_to_return (ref))
+ if (ref->cannot_lead_to_return ())
break;
- bitmap_set_bit (local->statics_written, DECL_UID (var));
+ bitmap_set_bit (local->statics_written, id);
break;
case IPA_REF_ADDR:
break;
+ default:
+ gcc_unreachable ();
}
}
- if (cgraph_node_cannot_return (fn))
+ if (fn->cannot_return_p ())
bitmap_clear (local->statics_written);
}
/* Called when new clone is inserted to callgraph late. */
-static void
-duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
- void *data ATTRIBUTE_UNUSED)
+void
+ipa_ref_opt_summary_t::duplicate (cgraph_node *, cgraph_node *,
+ ipa_reference_optimization_summary_d *ginfo,
+ ipa_reference_optimization_summary_d
+ *dst_ginfo)
{
- ipa_reference_optimization_summary_t ginfo;
- ipa_reference_optimization_summary_t dst_ginfo;
-
- ginfo = get_reference_optimization_summary (src);
- if (!ginfo)
- return;
- dst_ginfo = XCNEW (struct ipa_reference_optimization_summary_d);
- set_reference_optimization_summary (dst, dst_ginfo);
- dst_ginfo->statics_not_read =
- copy_static_var_set (ginfo->statics_not_read);
- dst_ginfo->statics_not_written =
- copy_static_var_set (ginfo->statics_not_written);
+ dst_ginfo->statics_read =
+ copy_static_var_set (ginfo->statics_read, false);
+ dst_ginfo->statics_written =
+ copy_static_var_set (ginfo->statics_written, false);
}
/* Called when node is removed. */
-static void
-remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
+void
+ipa_ref_opt_summary_t::remove (cgraph_node *,
+ ipa_reference_optimization_summary_d *ginfo)
{
- ipa_reference_optimization_summary_t ginfo;
- ginfo = get_reference_optimization_summary (node);
- if (ginfo)
- {
- if (ginfo->statics_not_read
- && ginfo->statics_not_read != all_module_statics)
- BITMAP_FREE (ginfo->statics_not_read);
-
- if (ginfo->statics_not_written
- && ginfo->statics_not_written != all_module_statics)
- BITMAP_FREE (ginfo->statics_not_written);
- free (ginfo);
- set_reference_optimization_summary (node, NULL);
- }
+ if (ginfo->statics_read
+ && ginfo->statics_read != all_module_statics
+ && ginfo->statics_read != no_module_statics)
+ BITMAP_FREE (ginfo->statics_read);
+
+ if (ginfo->statics_written
+ && ginfo->statics_written != all_module_statics
+ && ginfo->statics_written != no_module_statics)
+ BITMAP_FREE (ginfo->statics_written);
}
/* Analyze each function in the cgraph to see which global or statics
ipa_init ();
/* Process all of the functions next. */
+ FOR_EACH_DEFINED_FUNCTION (node)
+ if (!node->alias && !opt_for_fn (node->decl, flag_ipa_reference))
+ {
+ struct ipa_ref *ref = NULL;
+ int i;
+ tree var;
+ for (i = 0; node->iterate_reference (i, ref); i++)
+ {
+ if (!is_a <varpool_node *> (ref->referred))
+ continue;
+ var = ref->referred->decl;
+ if (!is_proper_for_analysis (var))
+ continue;
+ bitmap_set_bit (ignore_module_statics, ipa_reference_var_uid (var));
+ }
+ }
FOR_EACH_DEFINED_FUNCTION (node)
analyze_function (node);
if (dump_file)
FOR_EACH_DEFINED_FUNCTION (node)
- if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
+ if (node->get_availability () >= AVAIL_INTERPOSABLE
+ && opt_for_fn (node->decl, flag_ipa_reference))
{
ipa_reference_local_vars_info_t l;
unsigned int index;
l = &get_reference_vars_info (node)->local;
fprintf (dump_file,
- "\nFunction name:%s/%i:",
- cgraph_node_asm_name (node), node->symbol.order);
+ "\nFunction name:%s:", node->dump_name ());
fprintf (dump_file, "\n locals read: ");
if (l->statics_read)
EXECUTE_IF_SET_IN_BITMAP (l->statics_read,
read_write_all_from_decl (struct cgraph_node *node,
bool &read_all, bool &write_all)
{
- tree decl = node->symbol.decl;
+ tree decl = node->decl;
int flags = flags_from_decl_or_type (decl);
if ((flags & ECF_LEAF)
- && cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
+ && node->get_availability () < AVAIL_INTERPOSABLE)
;
else if (flags & ECF_CONST)
;
- else if ((flags & ECF_PURE)
- || cgraph_node_cannot_return (node))
+ else if ((flags & ECF_PURE) || node->cannot_return_p ())
{
read_all = true;
if (dump_file && (dump_flags & TDF_DETAILS))
- fprintf (dump_file, " %s/%i -> read all\n",
- cgraph_node_asm_name (node), node->symbol.order);
+ fprintf (dump_file, " %s -> read all\n", node->dump_name ());
}
else
{
read_all = true;
write_all = true;
if (dump_file && (dump_flags & TDF_DETAILS))
- fprintf (dump_file, " %s/%i -> read all, write all\n",
- cgraph_node_asm_name (node), node->symbol.order);
+ fprintf (dump_file, " %s -> read all, write all\n",
+ node->dump_name ());
}
}
{
struct cgraph_edge *e, *ie;
- /* When function is overwritable, we can not assume anything. */
- if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
+ /* When function is overwritable, we cannot assume anything. */
+ if (node->get_availability () <= AVAIL_INTERPOSABLE
+ || (node->analyzed && !opt_for_fn (node->decl, flag_ipa_reference)))
read_write_all_from_decl (node, read_all, write_all);
for (e = node->callees;
e = e->next_callee)
{
enum availability avail;
- struct cgraph_node *callee = cgraph_function_node (e->callee, &avail);
+ struct cgraph_node *callee = e->callee->function_symbol (&avail);
gcc_checking_assert (callee);
- if (avail <= AVAIL_OVERWRITABLE)
+ if (avail <= AVAIL_INTERPOSABLE
+ || (callee->analyzed && !opt_for_fn (callee->decl,
+ flag_ipa_reference)))
read_write_all_from_decl (callee, read_all, write_all);
}
read_all = true;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " indirect call -> read all\n");
- if (!cgraph_edge_cannot_lead_to_return (ie)
+ if (!ie->cannot_lead_to_return_p ()
&& !(ie->indirect_info->ecf_flags & ECF_PURE))
{
if (dump_file && (dump_flags & TDF_DETAILS))
}
}
+/* Skip edges from and to nodes without ipa_reference enabled.
+ Ignore not available symbols. This leave
+ them out of strongly connected components and makes them easy to skip in the
+ propagation loop bellow. */
+
+static bool
+ignore_edge_p (cgraph_edge *e)
+{
+ enum availability avail;
+ cgraph_node *ultimate_target
+ = e->callee->function_or_virtual_thunk_symbol (&avail, e->caller);
+
+ return (avail < AVAIL_INTERPOSABLE
+ || (avail == AVAIL_INTERPOSABLE
+ && !(flags_from_decl_or_type (e->callee->decl) & ECF_LEAF))
+ || !opt_for_fn (e->caller->decl, flag_ipa_reference)
+ || !opt_for_fn (ultimate_target->decl, flag_ipa_reference));
+}
+
/* Produce the global information by preforming a transitive closure
on the local information that was produced by ipa_analyze_function. */
propagate (void)
{
struct cgraph_node *node;
- struct varpool_node *vnode;
struct cgraph_node **order =
- XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
+ XCNEWVEC (struct cgraph_node *, symtab->cgraph_count);
int order_pos;
int i;
+ bool remove_p;
if (dump_file)
- dump_cgraph (dump_file);
+ cgraph_node::dump_cgraph (dump_file);
- ipa_discover_readonly_nonaddressable_vars ();
+ remove_p = ipa_discover_variable_flags ();
generate_summary ();
- /* Now we know what vars are really statics; prune out those that aren't. */
- FOR_EACH_VARIABLE (vnode)
- if (vnode->symbol.externally_visible
- || TREE_ADDRESSABLE (vnode->symbol.decl)
- || TREE_READONLY (vnode->symbol.decl)
- || !is_proper_for_analysis (vnode->symbol.decl)
- || !vnode->symbol.definition)
- bitmap_clear_bit (all_module_statics, DECL_UID (vnode->symbol.decl));
-
- /* Forget info we collected "just for fun" on variables that turned out to be
- non-local. */
- FOR_EACH_DEFINED_FUNCTION (node)
- {
- ipa_reference_local_vars_info_t node_l;
- node_l = &get_reference_vars_info (node)->local;
- intersect_static_var_sets (node_l->statics_read, all_module_statics);
- intersect_static_var_sets (node_l->statics_written, all_module_statics);
- }
-
/* Propagate the local information through the call graph to produce
the global information. All the nodes within a cycle will have
the same info so we collapse cycles first. Then we can do the
propagation in one pass from the leaves to the roots. */
- order_pos = ipa_reduced_postorder (order, true, true, NULL);
+ order_pos = ipa_reduced_postorder (order, true, ignore_edge_p);
if (dump_file)
ipa_print_order (dump_file, "reduced", order, order_pos);
bool write_all = false;
node = order[i];
- if (node->symbol.alias)
+ if (node->alias || !opt_for_fn (node->decl, flag_ipa_reference))
continue;
node_info = get_reference_vars_info (node);
node_g = &node_info->global;
if (dump_file && (dump_flags & TDF_DETAILS))
- fprintf (dump_file, "Starting cycle with %s/%i\n",
- cgraph_node_asm_name (node), node->symbol.order);
+ fprintf (dump_file, "Starting cycle with %s\n", node->dump_name ());
- vec<cgraph_node_ptr> cycle_nodes = ipa_get_nodes_in_cycle (node);
+ vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (node);
/* If any node in a cycle is read_all or write_all, they all are. */
FOR_EACH_VEC_ELT (cycle_nodes, x, w)
{
if (dump_file && (dump_flags & TDF_DETAILS))
- fprintf (dump_file, " Visiting %s/%i\n",
- cgraph_node_asm_name (w), w->symbol.order);
+ fprintf (dump_file, " Visiting %s\n", w->dump_asm_name ());
get_read_write_all_from_node (w, read_all, write_all);
if (read_all && write_all)
break;
if (read_all)
node_g->statics_read = all_module_statics;
else
- node_g->statics_read = copy_static_var_set (node_l->statics_read);
+ node_g->statics_read = copy_static_var_set (node_l->statics_read, true);
if (write_all)
node_g->statics_written = all_module_statics;
else
- node_g->statics_written = copy_static_var_set (node_l->statics_written);
+ node_g->statics_written
+ = copy_static_var_set (node_l->statics_written, true);
/* Merge the sets of this cycle with all sets of callees reached
from this cycle. */
{
ipa_reference_vars_info_t w_ri = get_reference_vars_info (w);
ipa_reference_local_vars_info_t w_l = &w_ri->local;
- int flags = flags_from_decl_or_type (w->symbol.decl);
+ int flags = flags_from_decl_or_type (w->decl);
if (!(flags & ECF_CONST))
read_all = union_static_var_sets (node_g->statics_read,
w_l->statics_read);
if (!(flags & ECF_PURE)
- && !cgraph_node_cannot_return (w))
+ && !w->cannot_return_p ())
write_all = union_static_var_sets (node_g->statics_written,
w_l->statics_written);
}
struct cgraph_node *w;
node = order[i];
- if (node->symbol.alias)
+ if (node->alias || !opt_for_fn (node->decl, flag_ipa_reference))
continue;
- fprintf (dump_file,
- "\nFunction name:%s/%i:",
- cgraph_node_asm_name (node), node->symbol.order);
+ fprintf (dump_file, "\nFunction name:%s:", node->dump_asm_name ());
ipa_reference_vars_info_t node_info = get_reference_vars_info (node);
ipa_reference_global_vars_info_t node_g = &node_info->global;
- vec<cgraph_node_ptr> cycle_nodes = ipa_get_nodes_in_cycle (node);
+ vec<cgraph_node *> cycle_nodes = ipa_get_nodes_in_cycle (node);
FOR_EACH_VEC_ELT (cycle_nodes, x, w)
{
ipa_reference_vars_info_t w_ri = get_reference_vars_info (w);
ipa_reference_local_vars_info_t w_l = &w_ri->local;
if (w != node)
- fprintf (dump_file, "\n next cycle: %s/%i ",
- cgraph_node_asm_name (w), w->symbol.order);
+ fprintf (dump_file, "\n next cycle: %s ", w->dump_asm_name ());
fprintf (dump_file, "\n locals read: ");
dump_static_vars_set_to_file (dump_file, w_l->statics_read);
fprintf (dump_file, "\n locals written: ");
}
}
+ if (ipa_ref_opt_sum_summaries == NULL)
+ ipa_ref_opt_sum_summaries = new ipa_ref_opt_summary_t (symtab);
+
/* Cleanup. */
FOR_EACH_DEFINED_FUNCTION (node)
{
ipa_reference_vars_info_t node_info;
ipa_reference_global_vars_info_t node_g;
- ipa_reference_optimization_summary_t opt;
+
+ /* No need to produce summaries for inline clones. */
+ if (node->inlined_to)
+ continue;
node_info = get_reference_vars_info (node);
- if (!node->symbol.alias
- && (cgraph_function_body_availability (node) > AVAIL_OVERWRITABLE
- || (flags_from_decl_or_type (node->symbol.decl) & ECF_LEAF)))
+ if (!node->alias && opt_for_fn (node->decl, flag_ipa_reference))
{
node_g = &node_info->global;
+ bool read_all =
+ (node_g->statics_read == all_module_statics
+ || bitmap_equal_p (node_g->statics_read, all_module_statics));
+ bool written_all =
+ (node_g->statics_written == all_module_statics
+ || bitmap_equal_p (node_g->statics_written,
+ all_module_statics));
+
+ /* There is no need to produce summary if we collected nothing
+ useful. */
+ if (read_all && written_all)
+ continue;
- opt = XCNEW (struct ipa_reference_optimization_summary_d);
- set_reference_optimization_summary (node, opt);
+ ipa_reference_optimization_summary_d *opt
+ = ipa_ref_opt_sum_summaries->get_create (node);
/* Create the complimentary sets. */
if (bitmap_empty_p (node_g->statics_read))
- opt->statics_not_read = all_module_statics;
+ opt->statics_read = no_module_statics;
+ else if (read_all)
+ opt->statics_read = all_module_statics;
else
{
- opt->statics_not_read
+ opt->statics_read
= BITMAP_ALLOC (&optimization_summary_obstack);
- if (node_g->statics_read != all_module_statics)
- bitmap_and_compl (opt->statics_not_read,
- all_module_statics,
- node_g->statics_read);
+ bitmap_copy (opt->statics_read, node_g->statics_read);
}
if (bitmap_empty_p (node_g->statics_written))
- opt->statics_not_written = all_module_statics;
+ opt->statics_written = no_module_statics;
+ else if (written_all)
+ opt->statics_written = all_module_statics;
else
{
- opt->statics_not_written
+ opt->statics_written
= BITMAP_ALLOC (&optimization_summary_obstack);
- if (node_g->statics_written != all_module_statics)
- bitmap_and_compl (opt->statics_not_written,
- all_module_statics,
- node_g->statics_written);
+ bitmap_copy (opt->statics_written, node_g->statics_written);
}
}
- free (node_info);
}
ipa_free_postorder_info ();
free (order);
bitmap_obstack_release (&local_info_obstack);
- ipa_reference_vars_vector.release ();
+
+ if (ipa_ref_var_info_summaries != NULL)
+ {
+ delete ipa_ref_var_info_summaries;
+ ipa_ref_var_info_summaries = NULL;
+ }
+
if (dump_file)
- splay_tree_delete (reference_vars_to_consider);
+ vec_free (reference_vars_to_consider);
reference_vars_to_consider = NULL;
- return 0;
+ return remove_p ? TODO_remove_functions : 0;
}
/* Return true if we need to write summary of NODE. */
ipa_reference_optimization_summary_t info;
/* See if we have (non-empty) info. */
- if (!node->symbol.definition || node->global.inlined_to)
+ if (!node->definition || node->inlined_to)
return false;
info = get_reference_optimization_summary (node);
- if (!info || (bitmap_empty_p (info->statics_not_read)
- && bitmap_empty_p (info->statics_not_written)))
+ if (!info)
return false;
/* See if we want to encode it.
In future we might also want to include summaries of functions references
by initializers of constant variables references in current unit. */
if (!reachable_from_this_partition_p (node, encoder)
- && !referenced_from_this_partition_p (&node->symbol.ref_list, encoder))
+ && !referenced_from_this_partition_p (node, encoder))
return false;
- /* See if the info has non-empty intersections with vars we want to encode. */
- if (!bitmap_intersect_p (info->statics_not_read, ltrans_statics)
- && !bitmap_intersect_p (info->statics_not_written, ltrans_statics))
- return false;
- return true;
+ /* See if the info has non-empty intersections with vars we want to
+ encode. */
+ bitmap_iterator bi;
+ unsigned int i;
+ EXECUTE_IF_AND_COMPL_IN_BITMAP (ltrans_statics, info->statics_read, 0,
+ i, bi)
+ return true;
+ EXECUTE_IF_AND_COMPL_IN_BITMAP (ltrans_statics, info->statics_written, 0,
+ i, bi)
+ return true;
+ return false;
}
/* Stream out BITS<RANS_STATICS as list of decls to OB.
return;
EXECUTE_IF_AND_IN_BITMAP (bits, ltrans_statics, 0, index, bi)
{
- tree decl = (tree)splay_tree_lookup (reference_vars_to_consider, index)->value;
+ tree decl = (*reference_vars_to_consider) [index];
lto_output_var_decl_index (ob->decl_state, ob->main_stream, decl);
}
}
unsigned int count = 0;
int ltrans_statics_bitcount = 0;
lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
- bitmap ltrans_statics = BITMAP_ALLOC (NULL);
+ auto_bitmap ltrans_statics;
int i;
- reference_vars_to_consider = splay_tree_new (splay_tree_compare_ints, 0, 0);
+ vec_alloc (reference_vars_to_consider, ipa_reference_vars_uids);
+ reference_vars_to_consider->safe_grow (ipa_reference_vars_uids);
/* See what variables we are interested in. */
for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
{
- symtab_node snode = lto_symtab_encoder_deref (encoder, i);
- varpool_node *vnode = dyn_cast <varpool_node> (snode);
+ symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
+ varpool_node *vnode = dyn_cast <varpool_node *> (snode);
+ int id;
+
if (vnode
- && bitmap_bit_p (all_module_statics, DECL_UID (vnode->symbol.decl))
- && referenced_from_this_partition_p (&vnode->symbol.ref_list, encoder))
+ && (id = ipa_reference_var_uid (vnode->decl)) != -1
+ && referenced_from_this_partition_p (vnode, encoder))
{
- tree decl = vnode->symbol.decl;
- bitmap_set_bit (ltrans_statics, DECL_UID (decl));
- splay_tree_insert (reference_vars_to_consider,
- DECL_UID (decl), (splay_tree_value)decl);
+ tree decl = vnode->decl;
+ bitmap_set_bit (ltrans_statics, id);
+ (*reference_vars_to_consider)[id] = decl;
ltrans_statics_bitcount ++;
}
}
if (ltrans_statics_bitcount)
for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
{
- symtab_node snode = lto_symtab_encoder_deref (encoder, i);
- cgraph_node *cnode = dyn_cast <cgraph_node> (snode);
+ symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
+ cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
if (cnode && write_node_summary_p (cnode, encoder, ltrans_statics))
count++;
}
if (ltrans_statics_bitcount)
for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
{
- symtab_node snode = lto_symtab_encoder_deref (encoder, i);
- cgraph_node *cnode = dyn_cast <cgraph_node> (snode);
+ symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
+ cgraph_node *cnode = dyn_cast <cgraph_node *> (snode);
if (cnode && write_node_summary_p (cnode, encoder, ltrans_statics))
{
ipa_reference_optimization_summary_t info;
node_ref = lto_symtab_encoder_encode (encoder, snode);
streamer_write_uhwi_stream (ob->main_stream, node_ref);
- stream_out_bitmap (ob, info->statics_not_read, ltrans_statics,
+ stream_out_bitmap (ob, info->statics_read, ltrans_statics,
ltrans_statics_bitcount);
- stream_out_bitmap (ob, info->statics_not_written, ltrans_statics,
+ stream_out_bitmap (ob, info->statics_written, ltrans_statics,
ltrans_statics_bitcount);
}
}
- BITMAP_FREE (ltrans_statics);
lto_destroy_simple_output_block (ob);
- splay_tree_delete (reference_vars_to_consider);
+ delete reference_vars_to_consider;
}
/* Deserialize the ipa info for lto. */
unsigned int j = 0;
bitmap_obstack_initialize (&optimization_summary_obstack);
- node_removal_hook_holder =
- cgraph_add_node_removal_hook (&remove_node_data, NULL);
- node_duplication_hook_holder =
- cgraph_add_node_duplication_hook (&duplicate_node_data, NULL);
+ gcc_checking_assert (ipa_ref_opt_sum_summaries == NULL);
+ ipa_ref_opt_sum_summaries = new ipa_ref_opt_summary_t (symtab);
+ ipa_reference_vars_map = new reference_vars_map_t(257);
+ varpool_node_hooks
+ = symtab->add_varpool_removal_hook (varpool_removal_hook, NULL);
+ ipa_reference_vars_uids = 0;
+
all_module_statics = BITMAP_ALLOC (&optimization_summary_obstack);
+ no_module_statics = BITMAP_ALLOC (&optimization_summary_obstack);
while ((file_data = file_data_vec[j++]))
{
const char *data;
size_t len;
- struct lto_input_block *ib
+ class lto_input_block *ib
= lto_create_simple_input_block (file_data,
LTO_section_ipa_reference,
&data, &len);
unsigned int var_index = streamer_read_uhwi (ib);
tree v_decl = lto_file_decl_data_get_var_decl (file_data,
var_index);
- bitmap_set_bit (all_module_statics, DECL_UID (v_decl));
+ bool existed;
+ bitmap_set_bit (all_module_statics,
+ ipa_reference_var_get_or_insert_uid
+ (v_decl, &existed));
+ gcc_checking_assert (!existed);
if (dump_file)
fprintf (dump_file, " %s", fndecl_name (v_decl));
}
{
unsigned int j, index;
struct cgraph_node *node;
- ipa_reference_optimization_summary_t info;
int v_count;
lto_symtab_encoder_t encoder;
index = streamer_read_uhwi (ib);
encoder = file_data->symtab_node_encoder;
- node = cgraph (lto_symtab_encoder_deref (encoder, index));
- info = XCNEW (struct ipa_reference_optimization_summary_d);
- set_reference_optimization_summary (node, info);
- info->statics_not_read = BITMAP_ALLOC (&optimization_summary_obstack);
- info->statics_not_written = BITMAP_ALLOC (&optimization_summary_obstack);
+ node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref
+ (encoder, index));
+
+ ipa_reference_optimization_summary_d *info
+ = ipa_ref_opt_sum_summaries->get_create (node);
+
if (dump_file)
fprintf (dump_file,
- "\nFunction name:%s/%i:\n static not read:",
- cgraph_node_asm_name (node), node->symbol.order);
+ "\nFunction name:%s:\n static read:",
+ node->dump_asm_name ());
- /* Set the statics not read. */
+ /* Set the statics read. */
v_count = streamer_read_hwi (ib);
if (v_count == -1)
{
- info->statics_not_read = all_module_statics;
+ info->statics_read = all_module_statics;
if (dump_file)
fprintf (dump_file, " all module statics");
}
+ else if (v_count == 0)
+ info->statics_read = no_module_statics;
else
- for (j = 0; j < (unsigned int)v_count; j++)
- {
- unsigned int var_index = streamer_read_uhwi (ib);
- tree v_decl = lto_file_decl_data_get_var_decl (file_data,
- var_index);
- bitmap_set_bit (info->statics_not_read, DECL_UID (v_decl));
- if (dump_file)
- fprintf (dump_file, " %s", fndecl_name (v_decl));
- }
+ {
+ info->statics_read = BITMAP_ALLOC
+ (&optimization_summary_obstack);
+ for (j = 0; j < (unsigned int)v_count; j++)
+ {
+ unsigned int var_index = streamer_read_uhwi (ib);
+ tree v_decl = lto_file_decl_data_get_var_decl (file_data,
+ var_index);
+ bitmap_set_bit (info->statics_read,
+ ipa_reference_var_uid (v_decl));
+ if (dump_file)
+ fprintf (dump_file, " %s", fndecl_name (v_decl));
+ }
+ }
if (dump_file)
fprintf (dump_file,
- "\n static not written:");
- /* Set the statics not written. */
+ "\n static written:");
+ /* Set the statics written. */
v_count = streamer_read_hwi (ib);
if (v_count == -1)
{
- info->statics_not_written = all_module_statics;
+ info->statics_written = all_module_statics;
if (dump_file)
fprintf (dump_file, " all module statics");
}
+ else if (v_count == 0)
+ info->statics_written = no_module_statics;
else
- for (j = 0; j < (unsigned int)v_count; j++)
- {
- unsigned int var_index = streamer_read_uhwi (ib);
- tree v_decl = lto_file_decl_data_get_var_decl (file_data,
- var_index);
- bitmap_set_bit (info->statics_not_written, DECL_UID (v_decl));
- if (dump_file)
- fprintf (dump_file, " %s", fndecl_name (v_decl));
- }
+ {
+ info->statics_written = BITMAP_ALLOC
+ (&optimization_summary_obstack);
+ for (j = 0; j < (unsigned int)v_count; j++)
+ {
+ unsigned int var_index = streamer_read_uhwi (ib);
+ tree v_decl = lto_file_decl_data_get_var_decl (file_data,
+ var_index);
+ bitmap_set_bit (info->statics_written,
+ ipa_reference_var_uid (v_decl));
+ if (dump_file)
+ fprintf (dump_file, " %s", fndecl_name (v_decl));
+ }
+ }
if (dump_file)
fprintf (dump_file, "\n");
}
ib, data, len);
}
else
- /* Fatal error here. We do not want to support compiling ltrans units with
- different version of compiler or different flags than the WPA unit, so
- this should never happen. */
- fatal_error ("ipa reference summary is missing in ltrans unit");
+ /* Fatal error here. We do not want to support compiling ltrans units
+ with different version of compiler or different flags than
+ the WPA unit, so this should never happen. */
+ fatal_error (input_location,
+ "ipa reference summary is missing in ltrans unit");
}
}
-static bool
-gate_reference (void)
-{
- return (flag_ipa_reference
- /* Don't bother doing anything if the program has errors. */
- && !seen_error ());
-}
-
namespace {
const pass_data pass_data_ipa_reference =
IPA_PASS, /* type */
"static-var", /* name */
OPTGROUP_NONE, /* optinfo_flags */
- true, /* has_gate */
- true, /* has_execute */
TV_IPA_REFERENCE, /* tv_id */
0, /* properties_required */
0, /* properties_provided */
{}
/* opt_pass methods: */
- bool gate () { return gate_reference (); }
- unsigned int execute () { return propagate (); }
+ virtual bool gate (function *)
+ {
+ return ((in_lto_p || flag_ipa_reference)
+ /* Don't bother doing anything if the program has errors. */
+ && !seen_error ());
+ }
+
+ virtual unsigned int execute (function *) { return propagate (); }
}; // class pass_ipa_reference
{
return new pass_ipa_reference (ctxt);
}
+
+/* Reset all state within ipa-reference.c so that we can rerun the compiler
+ within the same process. For use by toplev::finalize. */
+
+void
+ipa_reference_c_finalize (void)
+{
+ if (ipa_ref_opt_sum_summaries != NULL)
+ {
+ delete ipa_ref_opt_sum_summaries;
+ ipa_ref_opt_sum_summaries = NULL;
+ delete ipa_reference_vars_map;
+ ipa_reference_vars_map = NULL;
+ symtab->remove_varpool_removal_hook (varpool_node_hooks);
+ }
+
+ if (ipa_init_p)
+ {
+ bitmap_obstack_release (&optimization_summary_obstack);
+ ipa_init_p = false;
+ }
+}