]>
Commit | Line | Data |
---|---|---|
808f4dfe | 1 | /* Finding reachable regions and values. |
99dee823 | 2 | Copyright (C) 2020-2021 Free Software Foundation, Inc. |
808f4dfe DM |
3 | Contributed by David Malcolm <dmalcolm@redhat.com>. |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it | |
8 | under the terms of the GNU General Public License as published by | |
9 | the Free Software Foundation; either version 3, or (at your option) | |
10 | any later version. | |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but | |
13 | WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | |
15 | General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GCC; see the file COPYING3. If not see | |
19 | <http://www.gnu.org/licenses/>. */ | |
20 | ||
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
24 | #include "tree.h" | |
25 | #include "function.h" | |
26 | #include "basic-block.h" | |
27 | #include "gimple.h" | |
28 | #include "gimple-iterator.h" | |
29 | #include "diagnostic-core.h" | |
30 | #include "graphviz.h" | |
31 | #include "options.h" | |
32 | #include "cgraph.h" | |
33 | #include "tree-dfa.h" | |
34 | #include "stringpool.h" | |
35 | #include "convert.h" | |
36 | #include "target.h" | |
37 | #include "fold-const.h" | |
38 | #include "tree-pretty-print.h" | |
39 | #include "tristate.h" | |
40 | #include "bitmap.h" | |
41 | #include "selftest.h" | |
42 | #include "function.h" | |
43 | #include "analyzer/analyzer.h" | |
44 | #include "analyzer/analyzer-logging.h" | |
45 | #include "ordered-hash-map.h" | |
46 | #include "options.h" | |
47 | #include "cgraph.h" | |
48 | #include "cfg.h" | |
49 | #include "digraph.h" | |
809192e7 | 50 | #include "json.h" |
808f4dfe DM |
51 | #include "analyzer/call-string.h" |
52 | #include "analyzer/program-point.h" | |
53 | #include "analyzer/store.h" | |
54 | #include "analyzer/region-model.h" | |
55 | #include "analyzer/region-model-reachability.h" | |
56 | ||
57 | #if ENABLE_ANALYZER | |
58 | ||
59 | namespace ana { | |
60 | ||
c710051a ML |
61 | reachable_regions::reachable_regions (region_model *model) |
62 | : m_model (model), m_store (model->get_store ()), | |
808f4dfe DM |
63 | m_reachable_base_regs (), m_mutable_base_regs () |
64 | { | |
65 | } | |
66 | ||
67 | /* Callback called for each cluster when initializing this object. */ | |
68 | ||
69 | void | |
70 | reachable_regions::init_cluster_cb (const region *base_reg, | |
71 | reachable_regions *this_ptr) | |
72 | { | |
73 | this_ptr->init_cluster (base_reg); | |
74 | } | |
75 | ||
76 | /* Called for each cluster when initializing this object. */ | |
77 | void | |
78 | reachable_regions::init_cluster (const region *base_reg) | |
79 | { | |
80 | /* Mark any globals as mutable (and traverse what they point to). */ | |
81 | const region *parent = base_reg->get_parent_region (); | |
82 | gcc_assert (parent); | |
83 | if (parent->get_kind () == RK_GLOBALS) | |
84 | add (base_reg, true); | |
85 | ||
86 | /* Mark any clusters that already escaped in previous unknown calls | |
87 | as mutable (and traverse what they currently point to). */ | |
88 | if (m_store->escaped_p (base_reg)) | |
89 | add (base_reg, true); | |
90 | ||
91 | /* If BASE_REG is *INIT_VAL(REG) for some other REG, see if REG is | |
92 | unbound and untouched. If so, then add BASE_REG as a root. */ | |
93 | if (const symbolic_region *sym_reg = base_reg->dyn_cast_symbolic_region ()) | |
94 | { | |
95 | const svalue *ptr = sym_reg->get_pointer (); | |
96 | if (const initial_svalue *init_sval = ptr->dyn_cast_initial_svalue ()) | |
97 | { | |
98 | const region *init_sval_reg = init_sval->get_region (); | |
99 | const region *other_base_reg = init_sval_reg->get_base_region (); | |
100 | const binding_cluster *other_cluster | |
101 | = m_store->get_cluster (other_base_reg); | |
102 | if (other_cluster == NULL | |
103 | || !other_cluster->touched_p ()) | |
104 | add (base_reg, true); | |
105 | } | |
106 | } | |
107 | } | |
108 | ||
109 | /* Lazily mark the cluster containing REG as being reachable, recursively | |
110 | adding clusters reachable from REG's cluster. */ | |
111 | void | |
112 | reachable_regions::add (const region *reg, bool is_mutable) | |
113 | { | |
114 | gcc_assert (reg); | |
115 | ||
116 | const region *base_reg = const_cast <region *> (reg->get_base_region ()); | |
117 | gcc_assert (base_reg); | |
118 | ||
119 | /* Bail out if this cluster is already in the sets at the IS_MUTABLE | |
120 | level of mutability. */ | |
121 | if (!is_mutable && m_reachable_base_regs.contains (base_reg)) | |
122 | return; | |
123 | m_reachable_base_regs.add (base_reg); | |
124 | ||
125 | if (is_mutable) | |
126 | { | |
127 | if (m_mutable_base_regs.contains (base_reg)) | |
128 | return; | |
129 | else | |
130 | m_mutable_base_regs.add (base_reg); | |
131 | } | |
132 | ||
133 | /* Add values within the cluster. If any are pointers, add the pointee. */ | |
134 | if (binding_cluster *bind_cluster = m_store->get_cluster (base_reg)) | |
135 | bind_cluster->for_each_value (handle_sval_cb, this); | |
136 | else | |
af66094d | 137 | handle_sval (m_model->get_store_value (reg)); |
808f4dfe DM |
138 | } |
139 | ||
140 | void | |
141 | reachable_regions::handle_sval_cb (const svalue *sval, | |
142 | reachable_regions *this_ptr) | |
143 | { | |
144 | this_ptr->handle_sval (sval); | |
145 | } | |
146 | ||
147 | /* Add SVAL. If it is a pointer, add the pointed-to region. */ | |
148 | ||
149 | void | |
150 | reachable_regions::handle_sval (const svalue *sval) | |
151 | { | |
152 | m_reachable_svals.add (sval); | |
153 | if (const region_svalue *ptr = sval->dyn_cast_region_svalue ()) | |
154 | { | |
155 | const region *pointee = ptr->get_pointee (); | |
156 | /* Use const-ness of pointer type to affect mutability. */ | |
157 | bool ptr_is_mutable = true; | |
158 | if (ptr->get_type () | |
159 | && TREE_CODE (ptr->get_type ()) == POINTER_TYPE | |
160 | && TYPE_READONLY (TREE_TYPE (ptr->get_type ()))) | |
161 | { | |
162 | ptr_is_mutable = false; | |
163 | } | |
164 | else | |
165 | { | |
166 | m_mutable_svals.add (sval); | |
167 | } | |
168 | add (pointee, ptr_is_mutable); | |
169 | } | |
170 | /* Treat all svalues within a compound_svalue as reachable. */ | |
171 | if (const compound_svalue *compound_sval | |
172 | = sval->dyn_cast_compound_svalue ()) | |
173 | { | |
174 | for (compound_svalue::iterator_t iter = compound_sval->begin (); | |
175 | iter != compound_sval->end (); ++iter) | |
176 | { | |
177 | const svalue *iter_sval = (*iter).second; | |
178 | handle_sval (iter_sval); | |
179 | } | |
180 | } | |
181 | if (const svalue *cast = sval->maybe_undo_cast ()) | |
182 | handle_sval (cast); | |
1a9af271 DM |
183 | |
184 | /* If SVAL is the result of a reversible operation, then the operands | |
185 | are reachable. */ | |
186 | switch (sval->get_kind ()) | |
187 | { | |
188 | default: | |
189 | break; | |
190 | case SK_UNARYOP: | |
191 | { | |
192 | const unaryop_svalue *unaryop_sval = (const unaryop_svalue *)sval; | |
193 | switch (unaryop_sval->get_op ()) | |
194 | { | |
195 | default: | |
196 | break; | |
197 | case NEGATE_EXPR: | |
198 | handle_sval (unaryop_sval->get_arg ()); | |
199 | break; | |
200 | } | |
201 | } | |
202 | break; | |
203 | case SK_BINOP: | |
204 | { | |
205 | const binop_svalue *binop_sval = (const binop_svalue *)sval; | |
206 | switch (binop_sval->get_op ()) | |
207 | { | |
208 | default: | |
209 | break; | |
210 | case POINTER_PLUS_EXPR: | |
211 | handle_sval (binop_sval->get_arg0 ()); | |
212 | handle_sval (binop_sval->get_arg1 ()); | |
213 | break; | |
214 | } | |
215 | } | |
216 | } | |
808f4dfe DM |
217 | } |
218 | ||
219 | /* Add SVAL. If it is a pointer, add the pointed-to region. | |
220 | Use PARAM_TYPE for determining mutability. */ | |
221 | ||
222 | void | |
223 | reachable_regions::handle_parm (const svalue *sval, tree param_type) | |
224 | { | |
225 | bool is_mutable = true; | |
226 | if (param_type | |
227 | && TREE_CODE (param_type) == POINTER_TYPE | |
228 | && TYPE_READONLY (TREE_TYPE (param_type))) | |
229 | is_mutable = false; | |
230 | if (is_mutable) | |
231 | m_mutable_svals.add (sval); | |
232 | else | |
233 | m_reachable_svals.add (sval); | |
234 | if (const region_svalue *parm_ptr | |
235 | = sval->dyn_cast_region_svalue ()) | |
236 | { | |
237 | const region *pointee_reg = parm_ptr->get_pointee (); | |
238 | add (pointee_reg, is_mutable); | |
239 | } | |
240 | } | |
241 | ||
af66094d DM |
242 | /* Update the store to mark the clusters that were found to be mutable |
243 | as having escaped. | |
244 | Notify CTXT about escaping function_decls. */ | |
808f4dfe DM |
245 | |
246 | void | |
af66094d | 247 | reachable_regions::mark_escaped_clusters (region_model_context *ctxt) |
808f4dfe | 248 | { |
af66094d | 249 | gcc_assert (ctxt); |
f635f0ce DM |
250 | auto_vec<const function_region *> escaped_fn_regs |
251 | (m_mutable_base_regs.elements ()); | |
808f4dfe DM |
252 | for (hash_set<const region *>::iterator iter = m_mutable_base_regs.begin (); |
253 | iter != m_mutable_base_regs.end (); ++iter) | |
254 | { | |
255 | const region *base_reg = *iter; | |
256 | m_store->mark_as_escaped (base_reg); | |
af66094d DM |
257 | |
258 | /* If we have a function that's escaped, potentially add | |
259 | it to the worklist. */ | |
260 | if (const function_region *fn_reg = base_reg->dyn_cast_function_region ()) | |
f635f0ce | 261 | escaped_fn_regs.quick_push (fn_reg); |
808f4dfe | 262 | } |
f635f0ce DM |
263 | /* Sort to ensure deterministic results. */ |
264 | escaped_fn_regs.qsort (region::cmp_ptr_ptr); | |
265 | unsigned i; | |
266 | const function_region *fn_reg; | |
267 | FOR_EACH_VEC_ELT (escaped_fn_regs, i, fn_reg) | |
268 | ctxt->on_escaped_function (fn_reg->get_fndecl ()); | |
808f4dfe DM |
269 | } |
270 | ||
b0702ac5 DM |
271 | /* Dump SET to PP, sorting it to avoid churn when comparing dumps. */ |
272 | ||
273 | template <typename T> | |
274 | static void | |
275 | dump_set (const hash_set<const T *> &set, pretty_printer *pp) | |
276 | { | |
277 | auto_vec<const T *> elements (set.elements ()); | |
278 | for (typename hash_set<const T *>::iterator iter = set.begin (); | |
279 | iter != set.end (); ++iter) | |
280 | elements.quick_push (*iter); | |
281 | ||
282 | elements.qsort (T::cmp_ptr_ptr); | |
283 | ||
284 | unsigned i; | |
285 | const T *element; | |
286 | FOR_EACH_VEC_ELT (elements, i, element) | |
287 | { | |
288 | pp_string (pp, " "); | |
289 | element->dump_to_pp (pp, true); | |
290 | pp_newline (pp); | |
291 | } | |
292 | } | |
293 | ||
808f4dfe DM |
294 | /* Dump a multiline representation of this object to PP. */ |
295 | ||
296 | void | |
297 | reachable_regions::dump_to_pp (pretty_printer *pp) const | |
298 | { | |
299 | pp_string (pp, "reachable clusters: "); | |
300 | pp_newline (pp); | |
b0702ac5 DM |
301 | dump_set (m_reachable_base_regs, pp); |
302 | ||
808f4dfe DM |
303 | pp_string (pp, "mutable clusters: "); |
304 | pp_newline (pp); | |
b0702ac5 DM |
305 | dump_set (m_mutable_base_regs, pp); |
306 | ||
808f4dfe DM |
307 | pp_string (pp, "reachable svals: "); |
308 | pp_newline (pp); | |
b0702ac5 DM |
309 | dump_set (m_reachable_svals, pp); |
310 | ||
808f4dfe DM |
311 | pp_string (pp, "mutable svals: "); |
312 | pp_newline (pp); | |
b0702ac5 | 313 | dump_set (m_mutable_svals, pp); |
808f4dfe DM |
314 | } |
315 | ||
316 | /* Dump a multiline representation of this object to stderr. */ | |
317 | ||
318 | DEBUG_FUNCTION void | |
319 | reachable_regions::dump () const | |
320 | { | |
321 | pretty_printer pp; | |
322 | pp_format_decoder (&pp) = default_tree_printer; | |
323 | pp_show_color (&pp) = pp_show_color (global_dc->printer); | |
324 | pp.buffer->stream = stderr; | |
325 | dump_to_pp (&pp); | |
326 | pp_flush (&pp); | |
327 | } | |
328 | ||
329 | } // namespace ana | |
330 | ||
331 | #endif /* #if ENABLE_ANALYZER */ |