smap->on_unknown_change (sval, is_mutable, m_ext_state);
}
+void
+impl_region_model_context::on_escaped_function (tree fndecl)
+{
+ m_eg->on_escaped_function (fndecl);
+}
+
/* class setjmp_svalue : public svalue. */
/* Implementation of svalue::accept vfunc for setjmp_svalue. */
exploded_node *
exploded_graph::add_function_entry (function *fun)
{
+ /* Be idempotent. */
+ if (m_functions_with_enodes.contains (fun))
+ {
+ logger * const logger = get_logger ();
+ if (logger)
+ logger->log ("entrypoint for %qE already exists", fun->decl);
+ return NULL;
+ }
+
program_point point = program_point::from_function_entry (m_sg, fun);
program_state state (m_ext_state);
state.push_frame (m_ext_state, fun);
/* We should never fail to add such a node. */
gcc_assert (enode);
add_edge (m_origin, enode, NULL);
+
+ m_functions_with_enodes.add (fun);
+
return enode;
}
return true;
}
+/* Callback for walk_tree for finding callbacks within initializers;
+ ensure they are treated as possible entrypoints to the analysis. */
+
+static tree
+add_any_callbacks (tree *tp, int *, void *data)
+{
+ exploded_graph *eg = (exploded_graph *)data;
+ if (TREE_CODE (*tp) == FUNCTION_DECL)
+ eg->on_escaped_function (*tp);
+ return NULL_TREE;
+}
+
/* Add initial nodes to EG, with entrypoints for externally-callable
functions. */
logger->log ("did not create enode for %qE entrypoint", fun->decl);
}
}
+
+ /* Find callbacks that are reachable from global initializers. */
+ varpool_node *vpnode;
+ FOR_EACH_VARIABLE (vpnode)
+ {
+ tree decl = vpnode->decl;
+ if (!TREE_PUBLIC (decl))
+ continue;
+ tree init = DECL_INITIAL (decl);
+ if (!init)
+ continue;
+ walk_tree (&init, add_any_callbacks, this, NULL);
+ }
}
/* The main loop of the analysis.
return enode;
}
+/* Ensure that there is an exploded_node for a top-level call to FNDECL. */
+
+void
+exploded_graph::on_escaped_function (tree fndecl)
+{
+ logger * const logger = get_logger ();
+ LOG_FUNC_1 (logger, "%qE", fndecl);
+
+ cgraph_node *cgnode = cgraph_node::get (fndecl);
+ if (!cgnode)
+ return;
+
+ function *fun = cgnode->get_fun ();
+ if (!fun)
+ return;
+
+ exploded_node *enode = add_function_entry (fun);
+ if (logger)
+ {
+ if (enode)
+ logger->log ("created EN %i for %qE entrypoint",
+ enode->m_index, fun->decl);
+ else
+ logger->log ("did not create enode for %qE entrypoint", fun->decl);
+ }
+}
+
/* A collection of classes for visualizing the callgraph in .dot form
(as represented in the supergraph). */
void on_unexpected_tree_code (tree t,
const dump_location_t &loc) FINAL OVERRIDE;
+ void on_escaped_function (tree fndecl) FINAL OVERRIDE;
+
exploded_graph *m_eg;
log_user m_logger;
const exploded_node *m_enode_for_diag;
return m_worklist.get_scc_id (node);
}
+ void on_escaped_function (tree fndecl);
+
private:
void print_bar_charts (pretty_printer *pp) const;
call_string_data_map_t m_per_call_string_data;
auto_vec<int> m_PK_AFTER_SUPERNODE_per_snode;
+
+ /* Functions with a top-level enode, to make add_function_entry
+ be idempotent, for use in handling callbacks. */
+ hash_set<function *> m_functions_with_enodes;
};
/* A path within an exploded_graph: a sequence of edges. */
namespace ana {
-reachable_regions::reachable_regions (store *store,
+reachable_regions::reachable_regions (region_model *model,
region_model_manager *mgr)
-: m_store (store), m_mgr (mgr),
+: m_model (model), m_store (model->get_store ()), m_mgr (mgr),
m_reachable_base_regs (), m_mutable_base_regs ()
{
}
if (binding_cluster *bind_cluster = m_store->get_cluster (base_reg))
bind_cluster->for_each_value (handle_sval_cb, this);
else
- handle_sval (m_mgr->get_or_create_initial_value (base_reg));
+ handle_sval (m_model->get_store_value (reg));
}
void
}
}
-/* Update m_store to mark the clusters that were found to be mutable
- as having escaped. */
+/* Update the store to mark the clusters that were found to be mutable
+ as having escaped.
+ Notify CTXT about escaping function_decls. */
void
-reachable_regions::mark_escaped_clusters ()
+reachable_regions::mark_escaped_clusters (region_model_context *ctxt)
{
+ gcc_assert (ctxt);
for (hash_set<const region *>::iterator iter = m_mutable_base_regs.begin ();
iter != m_mutable_base_regs.end (); ++iter)
{
const region *base_reg = *iter;
m_store->mark_as_escaped (base_reg);
+
+ /* If we have a function that's escaped, potentially add
+ it to the worklist. */
+ if (const function_region *fn_reg = base_reg->dyn_cast_function_region ())
+ ctxt->on_escaped_function (fn_reg->get_fndecl ());
}
}
class reachable_regions
{
public:
- reachable_regions (store *store, region_model_manager *mgr);
+ reachable_regions (region_model *model, region_model_manager *mgr);
/* Callback called for each cluster when initializing this object. */
static void init_cluster_cb (const region *base_reg,
void handle_parm (const svalue *sval, tree param_type);
/* Update the store to mark the clusters that were found to be mutable
- as having escaped. */
- void mark_escaped_clusters ();
+ as having escaped.
+ Notify CTXT about escaping function_decls. */
+ void mark_escaped_clusters (region_model_context *ctxt);
/* Iteration over reachable base regions. */
hash_set<const region *>::iterator begin ()
DEBUG_FUNCTION void dump () const;
private:
+ region_model *m_model;
store *m_store;
region_model_manager *m_mgr;
{
tree fndecl = get_fndecl_for_call (call, ctxt);
- reachable_regions reachable_regs (&m_store, m_mgr);
+ reachable_regions reachable_regs (this, m_mgr);
/* Determine the reachable regions and their mutability. */
{
}
/* Mark any clusters that have escaped. */
- reachable_regs.mark_escaped_clusters ();
+ reachable_regs.mark_escaped_clusters (ctxt);
/* Update bindings for all clusters that have escaped, whether above,
or previously. */
region_model::get_reachable_svalues (svalue_set *out,
const svalue *extra_sval)
{
- reachable_regions reachable_regs (&m_store, m_mgr);
+ reachable_regions reachable_regs (this, m_mgr);
/* Add globals and regions that already escaped in previous
unknown calls. */
an unknown value if an unknown call has occurred, unless this is
static to-this-TU and hasn't escaped. Globals that have escaped
are explicitly tracked, so we shouldn't hit this case for them. */
- if (m_store.called_unknown_fn_p () && TREE_PUBLIC (decl))
+ if (m_store.called_unknown_fn_p ()
+ && TREE_PUBLIC (decl)
+ && !TREE_READONLY (decl))
return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
/* If we are on a path from the entrypoint from "main" and we have a
global decl defined in this TU that hasn't been touched yet, then
the initial value of REG can be taken from the initialization value
of the decl. */
- if (called_from_main_p () && !DECL_EXTERNAL (decl))
+ if ((called_from_main_p () && !DECL_EXTERNAL (decl))
+ || TREE_READONLY (decl))
{
/* Get the initializer value for base_reg. */
const svalue *base_reg_init
know how to handle the tree code of T at LOC. */
virtual void on_unexpected_tree_code (tree t,
const dump_location_t &loc) = 0;
+
+ /* Hook for clients to be notified when a function_decl escapes. */
+ virtual void on_escaped_function (tree fndecl) = 0;
};
/* A "do nothing" subclass of region_model_context. */
{
}
void on_unexpected_tree_code (tree, const dump_location_t &) OVERRIDE {}
+
+ void on_escaped_function (tree) OVERRIDE {}
};
/* A subclass of region_model_context for determining if operations fail
--- /dev/null
+/* Reproducer for PR analyzer/97258: we should report the double-free
+ inside a static callback if the callback escapes. */
+
+#include <stdlib.h>
+
+static void callback_1 (void *p)
+{
+ free (p);
+ free (p); /* { dg-warning "double-'free' of 'p'" } */
+}
+
+struct ops {
+ void (*cb) (void *);
+};
+
+static const struct ops ops_1 = {
+ .cb = callback_1
+};
+
+extern void registration (const void *);
+
+void register_1 (void)
+{
+ registration (&ops_1);
+}
--- /dev/null
+/* Reproducer for PR analyzer/97258: we should report the double-free
+ inside a static callback if the callback is accessible via a global
+ initializer. */
+
+#include <stdlib.h>
+
+static void callback_1 (void *p)
+{
+ free (p);
+ free (p); /* { dg-warning "double-'free' of 'p'" } */
+}
+
+struct ops {
+ void (*cb) (void *);
+};
+
+/* Callback struct is not static, and so could be accessed via
+ another TU. */
+
+const struct ops ops_1 = {
+ .cb = callback_1
+};
--- /dev/null
+#include "analyzer-decls.h"
+
+typedef __SIZE_TYPE__ size_t;
+typedef int (*__compar_fn_t)(const void *, const void *);
+extern void qsort(void *__base, size_t __nmemb, size_t __size,
+ __compar_fn_t __compar)
+ __attribute__((__nonnull__(1, 4)));
+
+static int
+test_1_callback (const void *p1, const void *p2)
+{
+ __analyzer_dump_path (); /* { dg-message "here" } */
+ return 0;
+}
+
+void test_1_caller (int *arr, size_t n)
+{
+ qsort (arr, n, sizeof (int), test_1_callback);
+}