]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/analyzer/region-model-reachability.cc
analyzer: fix ICE on negative values for size_t [PR114472]
[thirdparty/gcc.git] / gcc / analyzer / region-model-reachability.cc
1 /* Finding reachable regions and values.
2 Copyright (C) 2020-2024 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #define INCLUDE_MEMORY
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tree.h"
26 #include "function.h"
27 #include "basic-block.h"
28 #include "gimple.h"
29 #include "gimple-iterator.h"
30 #include "diagnostic-core.h"
31 #include "graphviz.h"
32 #include "options.h"
33 #include "cgraph.h"
34 #include "tree-dfa.h"
35 #include "stringpool.h"
36 #include "convert.h"
37 #include "target.h"
38 #include "fold-const.h"
39 #include "tree-pretty-print.h"
40 #include "bitmap.h"
41 #include "analyzer/analyzer.h"
42 #include "analyzer/analyzer-logging.h"
43 #include "ordered-hash-map.h"
44 #include "options.h"
45 #include "analyzer/call-string.h"
46 #include "analyzer/program-point.h"
47 #include "analyzer/store.h"
48 #include "analyzer/region-model.h"
49 #include "analyzer/region-model-reachability.h"
50 #include "diagnostic.h"
51 #include "tree-diagnostic.h"
52
53 #if ENABLE_ANALYZER
54
55 namespace ana {
56
57 reachable_regions::reachable_regions (region_model *model)
58 : m_model (model), m_store (model->get_store ()),
59 m_reachable_base_regs (), m_mutable_base_regs ()
60 {
61 }
62
63 /* Callback called for each cluster when initializing this object. */
64
65 void
66 reachable_regions::init_cluster_cb (const region *base_reg,
67 reachable_regions *this_ptr)
68 {
69 this_ptr->init_cluster (base_reg);
70 }
71
72 /* Called for each cluster when initializing this object. */
73 void
74 reachable_regions::init_cluster (const region *base_reg)
75 {
76 /* Mark any globals as mutable (and traverse what they point to). */
77 const region *parent = base_reg->get_parent_region ();
78 gcc_assert (parent);
79 if (parent->get_kind () == RK_GLOBALS)
80 add (base_reg, true);
81
82 /* Mark any clusters that already escaped in previous unknown calls
83 as mutable (and traverse what they currently point to). */
84 if (m_store->escaped_p (base_reg))
85 add (base_reg, true);
86
87 if (const symbolic_region *sym_reg = base_reg->dyn_cast_symbolic_region ())
88 {
89 const svalue *ptr = sym_reg->get_pointer ();
90 if (ptr->implicitly_live_p (NULL, m_model))
91 add (base_reg, true);
92 switch (ptr->get_kind ())
93 {
94 default:
95 break;
96 case SK_INITIAL:
97 {
98 /* If BASE_REG is *INIT_VAL(REG) for some other REG, see if REG is
99 unbound and untouched. If so, then add BASE_REG as a root. */
100 const initial_svalue *init_sval
101 = as_a <const initial_svalue *> (ptr);
102 const region *init_sval_reg = init_sval->get_region ();
103 const region *other_base_reg = init_sval_reg->get_base_region ();
104 const binding_cluster *other_cluster
105 = m_store->get_cluster (other_base_reg);
106 if (other_cluster == NULL
107 || !other_cluster->touched_p ())
108 add (base_reg, true);
109 }
110 break;
111
112 case SK_UNKNOWN:
113 case SK_CONJURED:
114 {
115 /* If this cluster is due to dereferencing an unknown/conjured
116 pointer, any values written through the pointer could still
117 be live. */
118 add (base_reg, true);
119 }
120 break;
121 }
122 }
123 }
124
125 /* Lazily mark the cluster containing REG as being reachable, recursively
126 adding clusters reachable from REG's cluster. */
127 void
128 reachable_regions::add (const region *reg, bool is_mutable)
129 {
130 gcc_assert (reg);
131
132 const region *base_reg = const_cast <region *> (reg->get_base_region ());
133 gcc_assert (base_reg);
134
135 /* Bail out if this cluster is already in the sets at the IS_MUTABLE
136 level of mutability. */
137 if (!is_mutable && m_reachable_base_regs.contains (base_reg))
138 return;
139 m_reachable_base_regs.add (base_reg);
140
141 if (is_mutable)
142 {
143 if (m_mutable_base_regs.contains (base_reg))
144 return;
145 else
146 m_mutable_base_regs.add (base_reg);
147 }
148
149 /* Add values within the cluster. If any are pointers, add the pointee. */
150 if (binding_cluster *bind_cluster = m_store->get_cluster (base_reg))
151 bind_cluster->for_each_value (handle_sval_cb, this);
152 else
153 handle_sval (m_model->get_store_value (reg, NULL));
154 }
155
156 void
157 reachable_regions::handle_sval_cb (const svalue *sval,
158 reachable_regions *this_ptr)
159 {
160 this_ptr->handle_sval (sval);
161 }
162
163 /* Add SVAL. If it is a pointer, add the pointed-to region. */
164
165 void
166 reachable_regions::handle_sval (const svalue *sval)
167 {
168 m_reachable_svals.add (sval);
169 m_mutable_svals.add (sval);
170 if (const region_svalue *ptr = sval->dyn_cast_region_svalue ())
171 {
172 const region *pointee = ptr->get_pointee ();
173 /* Use const-ness of pointer type to affect mutability. */
174 bool ptr_is_mutable = true;
175 if (ptr->get_type ()
176 && TREE_CODE (ptr->get_type ()) == POINTER_TYPE
177 && TYPE_READONLY (TREE_TYPE (ptr->get_type ())))
178 {
179 ptr_is_mutable = false;
180 }
181 else
182 {
183 m_mutable_svals.add (sval);
184 }
185 add (pointee, ptr_is_mutable);
186 }
187 /* Treat all svalues within a compound_svalue as reachable. */
188 if (const compound_svalue *compound_sval
189 = sval->dyn_cast_compound_svalue ())
190 {
191 for (compound_svalue::iterator_t iter = compound_sval->begin ();
192 iter != compound_sval->end (); ++iter)
193 {
194 const svalue *iter_sval = (*iter).second;
195 handle_sval (iter_sval);
196 }
197 }
198 if (const svalue *cast = sval->maybe_undo_cast ())
199 handle_sval (cast);
200
201 /* If SVAL is the result of a reversible operation, then the operands
202 are reachable. */
203 switch (sval->get_kind ())
204 {
205 default:
206 break;
207 case SK_UNARYOP:
208 {
209 const unaryop_svalue *unaryop_sval = (const unaryop_svalue *)sval;
210 switch (unaryop_sval->get_op ())
211 {
212 default:
213 break;
214 case NEGATE_EXPR:
215 handle_sval (unaryop_sval->get_arg ());
216 break;
217 }
218 }
219 break;
220 case SK_BINOP:
221 {
222 const binop_svalue *binop_sval = (const binop_svalue *)sval;
223 switch (binop_sval->get_op ())
224 {
225 default:
226 break;
227 case POINTER_PLUS_EXPR:
228 handle_sval (binop_sval->get_arg0 ());
229 handle_sval (binop_sval->get_arg1 ());
230 break;
231 }
232 }
233 }
234 }
235
236 /* Add SVAL. If it is a pointer, add the pointed-to region.
237 Use PARAM_TYPE for determining mutability. */
238
239 void
240 reachable_regions::handle_parm (const svalue *sval, tree param_type)
241 {
242 bool is_mutable = true;
243 if (param_type
244 && TREE_CODE (param_type) == POINTER_TYPE
245 && TYPE_READONLY (TREE_TYPE (param_type)))
246 is_mutable = false;
247 if (is_mutable)
248 m_mutable_svals.add (sval);
249 else
250 m_reachable_svals.add (sval);
251 if (const region *base_reg = sval->maybe_get_deref_base_region ())
252 add (base_reg, is_mutable);
253 /* Treat all svalues within a compound_svalue as reachable. */
254 if (const compound_svalue *compound_sval
255 = sval->dyn_cast_compound_svalue ())
256 {
257 for (compound_svalue::iterator_t iter = compound_sval->begin ();
258 iter != compound_sval->end (); ++iter)
259 {
260 const svalue *iter_sval = (*iter).second;
261 handle_sval (iter_sval);
262 }
263 }
264 if (const svalue *cast = sval->maybe_undo_cast ())
265 handle_sval (cast);
266 }
267
268 /* Update the store to mark the clusters that were found to be mutable
269 as having escaped.
270 Notify CTXT about escaping function_decls. */
271
272 void
273 reachable_regions::mark_escaped_clusters (region_model_context *ctxt)
274 {
275 auto_vec<const function_region *> escaped_fn_regs
276 (m_mutable_base_regs.elements ());
277 for (hash_set<const region *>::iterator iter = m_mutable_base_regs.begin ();
278 iter != m_mutable_base_regs.end (); ++iter)
279 {
280 const region *base_reg = *iter;
281 m_store->mark_as_escaped (base_reg);
282
283 /* If we have a function that's escaped, potentially add
284 it to the worklist. */
285 if (const function_region *fn_reg = base_reg->dyn_cast_function_region ())
286 escaped_fn_regs.quick_push (fn_reg);
287 }
288 if (ctxt)
289 {
290 /* Sort to ensure deterministic results. */
291 escaped_fn_regs.qsort (region::cmp_ptr_ptr);
292 unsigned i;
293 const function_region *fn_reg;
294 FOR_EACH_VEC_ELT (escaped_fn_regs, i, fn_reg)
295 ctxt->on_escaped_function (fn_reg->get_fndecl ());
296 }
297 }
298
299 /* Dump SET to PP, sorting it to avoid churn when comparing dumps. */
300
301 template <typename T>
302 static void
303 dump_set (const hash_set<const T *> &set, pretty_printer *pp)
304 {
305 auto_vec<const T *> elements (set.elements ());
306 for (typename hash_set<const T *>::iterator iter = set.begin ();
307 iter != set.end (); ++iter)
308 elements.quick_push (*iter);
309
310 elements.qsort (T::cmp_ptr_ptr);
311
312 unsigned i;
313 const T *element;
314 FOR_EACH_VEC_ELT (elements, i, element)
315 {
316 pp_string (pp, " ");
317 element->dump_to_pp (pp, true);
318 pp_newline (pp);
319 }
320 }
321
322 /* Dump a multiline representation of this object to PP. */
323
324 void
325 reachable_regions::dump_to_pp (pretty_printer *pp) const
326 {
327 pp_string (pp, "reachable clusters: ");
328 pp_newline (pp);
329 dump_set (m_reachable_base_regs, pp);
330
331 pp_string (pp, "mutable clusters: ");
332 pp_newline (pp);
333 dump_set (m_mutable_base_regs, pp);
334
335 pp_string (pp, "reachable svals: ");
336 pp_newline (pp);
337 dump_set (m_reachable_svals, pp);
338
339 pp_string (pp, "mutable svals: ");
340 pp_newline (pp);
341 dump_set (m_mutable_svals, pp);
342 }
343
344 /* Dump a multiline representation of this object to stderr. */
345
346 DEBUG_FUNCTION void
347 reachable_regions::dump () const
348 {
349 pretty_printer pp;
350 pp_format_decoder (&pp) = default_tree_printer;
351 pp_show_color (&pp) = pp_show_color (global_dc->printer);
352 pp.buffer->stream = stderr;
353 dump_to_pp (&pp);
354 pp_flush (&pp);
355 }
356
357 } // namespace ana
358
359 #endif /* #if ENABLE_ANALYZER */