]>
Commit | Line | Data |
---|---|---|
808f4dfe | 1 | /* Classes for modeling the state of memory. |
7adcbafe | 2 | Copyright (C) 2020-2022 Free Software Foundation, Inc. |
808f4dfe DM |
3 | Contributed by David Malcolm <dmalcolm@redhat.com>. |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it | |
8 | under the terms of the GNU General Public License as published by | |
9 | the Free Software Foundation; either version 3, or (at your option) | |
10 | any later version. | |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but | |
13 | WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | |
15 | General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GCC; see the file COPYING3. If not see | |
19 | <http://www.gnu.org/licenses/>. */ | |
20 | ||
21 | #ifndef GCC_ANALYZER_STORE_H | |
22 | #define GCC_ANALYZER_STORE_H | |
23 | ||
24 | /* Implementation of the region-based ternary model described in: | |
25 | "A Memory Model for Static Analysis of C Programs" | |
26 | (Zhongxing Xu, Ted Kremenek, and Jian Zhang) | |
27 | http://lcs.ios.ac.cn/~xuzb/canalyze/memmodel.pdf */ | |
28 | ||
29 | /* The store models memory as a collection of "clusters", where regions | |
30 | are partitioned into clusters via their base region. | |
31 | ||
32 | For example, given: | |
33 | int a, b, c; | |
34 | struct coord { double x; double y; } verts[3]; | |
35 | then "verts[0].y" and "verts[1].x" both have "verts" as their base region. | |
36 | Each of a, b, c, and verts will have their own clusters, so that we | |
37 | know that writes to e.g. "verts[1].x".don't affect e.g. "a". | |
38 | ||
39 | Within each cluster we store a map of bindings to values, where the | |
40 | binding keys can be either concrete or symbolic. | |
41 | ||
42 | Concrete bindings affect a specific range of bits relative to the start | |
43 | of the base region of the cluster, whereas symbolic bindings affect | |
44 | a specific subregion within the cluster. | |
45 | ||
46 | Consider (from the symbolic-1.c testcase): | |
47 | ||
48 | char arr[1024]; | |
49 | arr[2] = a; (1) | |
50 | arr[3] = b; (2) | |
51 | After (1) and (2), the cluster for "arr" has concrete bindings | |
52 | for bits 16-23 and for bits 24-31, with svalues "INIT_VAL(a)" | |
53 | and "INIT_VAL(b)" respectively: | |
54 | cluster: {bits 16-23: "INIT_VAL(a)", | |
55 | bits 24-31: "INIT_VAL(b)"; | |
56 | flags: {}} | |
57 | Attempting to query unbound subregions e.g. arr[4] will | |
58 | return "UNINITIALIZED". | |
59 | "a" and "b" are each in their own clusters, with no explicit | |
60 | bindings, and thus implicitly have value INIT_VAL(a) and INIT_VAL(b). | |
61 | ||
62 | arr[3] = c; (3) | |
63 | After (3), the concrete binding for bits 24-31 is replaced with the | |
64 | svalue "INIT_VAL(c)": | |
65 | cluster: {bits 16-23: "INIT_VAL(a)", (from before) | |
66 | bits 24-31: "INIT_VAL(c)"; (updated) | |
67 | flags: {}} | |
68 | ||
69 | arr[i] = d; (4) | |
70 | After (4), we lose the concrete bindings and replace them with a | |
71 | symbolic binding for "arr[i]", with svalue "INIT_VAL(d)". We also | |
72 | mark the cluster as having been "symbolically touched": future | |
73 | attempts to query the values of subregions other than "arr[i]", | |
74 | such as "arr[3]" are "UNKNOWN", since we don't know if the write | |
75 | to arr[i] affected them. | |
76 | cluster: {symbolic_key(arr[i]): "INIT_VAL(d)"; | |
77 | flags: {TOUCHED}} | |
78 | ||
79 | arr[j] = e; (5) | |
80 | After (5), we lose the symbolic binding for "arr[i]" since we could | |
81 | have overwritten it, and add a symbolic binding for "arr[j]". | |
82 | cluster: {symbolic_key(arr[j]): "INIT_VAL(d)"; (different symbolic | |
83 | flags: {TOUCHED}} binding) | |
84 | ||
85 | arr[3] = f; (6) | |
86 | After (6), we lose the symbolic binding for "arr[j]" since we could | |
87 | have overwritten it, and gain a concrete binding for bits 24-31 | |
88 | again, this time with svalue "INIT_VAL(e)": | |
89 | cluster: {bits 24-31: "INIT_VAL(d)"; | |
90 | flags: {TOUCHED}} | |
91 | The cluster is still flagged as touched, so that we know that | |
92 | accesses to other elements are "UNKNOWN" rather than | |
93 | "UNINITIALIZED". | |
94 | ||
95 | Handling symbolic regions requires us to handle aliasing. | |
96 | ||
97 | In the first example above, each of a, b, c and verts are non-symbolic | |
98 | base regions and so their clusters are "concrete clusters", whereas given: | |
99 | struct coord *p, *q; | |
100 | then "*p" and "*q" are symbolic base regions, and thus "*p" and "*q" | |
101 | have "symbolic clusters". | |
102 | ||
103 | In the above, "verts[i].x" will have a symbolic *binding* within a | |
104 | concrete cluster for "verts", whereas "*p" is a symbolic *cluster*. | |
105 | ||
106 | Writes to concrete clusters can't affect other concrete clusters, | |
107 | but can affect symbolic clusters; e.g. after: | |
108 | verts[0].x = 42; | |
109 | we bind 42 in the cluster for "verts", but the clusters for "b" and "c" | |
110 | can't be affected. Any symbolic clusters for *p and for *q can be | |
111 | affected, *p and *q could alias verts. | |
112 | ||
113 | Writes to a symbolic cluster can affect other clusters, both | |
114 | concrete and symbolic; e.g. after: | |
115 | p->x = 17; | |
116 | we bind 17 within the cluster for "*p". The concrete clusters for a, b, | |
117 | c, and verts could be affected, depending on whether *p aliases them. | |
118 | Similarly, the symbolic cluster to *q could be affected. */ | |
119 | ||
120 | namespace ana { | |
121 | ||
3a66c289 DM |
122 | /* A class for keeping track of aspects of a program_state that we don't |
123 | know about, to avoid false positives about leaks. | |
124 | ||
125 | Consider: | |
126 | ||
127 | p->field = malloc (1024); | |
128 | q->field = NULL; | |
129 | ||
130 | where we don't know whether or not p and q point to the same memory, | |
131 | and: | |
132 | ||
133 | p->field = malloc (1024); | |
134 | unknown_fn (p); | |
135 | ||
136 | In both cases, the svalue for the address of the allocated buffer | |
137 | goes from being bound to p->field to not having anything explicitly bound | |
138 | to it. | |
139 | ||
140 | Given that we conservatively discard bindings due to possible aliasing or | |
141 | calls to unknown function, the store loses references to svalues, | |
142 | but these svalues could still be live. We don't want to warn about | |
143 | them leaking - they're effectively in a "maybe live" state. | |
144 | ||
145 | This "maybe live" information is somewhat transient. | |
146 | ||
147 | We don't want to store this "maybe live" information in the program_state, | |
148 | region_model, or store, since we don't want to bloat these objects (and | |
149 | potentially bloat the exploded_graph with more nodes). | |
150 | However, we can't store it in the region_model_context, as these context | |
151 | objects sometimes don't last long enough to be around when comparing the | |
152 | old vs the new state. | |
153 | ||
154 | This class is a way to track a set of such svalues, so that we can | |
155 | temporarily capture that they are in a "maybe live" state whilst | |
156 | comparing old and new states. */ | |
157 | ||
158 | class uncertainty_t | |
159 | { | |
160 | public: | |
161 | typedef hash_set<const svalue *>::iterator iterator; | |
162 | ||
163 | void on_maybe_bound_sval (const svalue *sval) | |
164 | { | |
165 | m_maybe_bound_svals.add (sval); | |
166 | } | |
167 | void on_mutable_sval_at_unknown_call (const svalue *sval) | |
168 | { | |
169 | m_mutable_at_unknown_call_svals.add (sval); | |
170 | } | |
171 | ||
172 | bool unknown_sm_state_p (const svalue *sval) | |
173 | { | |
174 | return (m_maybe_bound_svals.contains (sval) | |
175 | || m_mutable_at_unknown_call_svals.contains (sval)); | |
176 | } | |
177 | ||
178 | void dump_to_pp (pretty_printer *pp, bool simple) const; | |
179 | void dump (bool simple) const; | |
180 | ||
181 | iterator begin_maybe_bound_svals () const | |
182 | { | |
183 | return m_maybe_bound_svals.begin (); | |
184 | } | |
185 | iterator end_maybe_bound_svals () const | |
186 | { | |
187 | return m_maybe_bound_svals.end (); | |
188 | } | |
189 | ||
190 | private: | |
191 | ||
192 | /* svalues that might or might not still be bound. */ | |
193 | hash_set<const svalue *> m_maybe_bound_svals; | |
194 | ||
195 | /* svalues that have mutable sm-state at unknown calls. */ | |
196 | hash_set<const svalue *> m_mutable_at_unknown_call_svals; | |
197 | }; | |
198 | ||
7c6b354b | 199 | class byte_range; |
808f4dfe | 200 | class concrete_binding; |
33255ad3 | 201 | class symbolic_binding; |
808f4dfe | 202 | |
808f4dfe DM |
203 | /* Abstract base class for describing ranges of bits within a binding_map |
204 | that can have svalues bound to them. */ | |
205 | ||
206 | class binding_key | |
207 | { | |
208 | public: | |
209 | virtual ~binding_key () {} | |
210 | virtual bool concrete_p () const = 0; | |
211 | bool symbolic_p () const { return !concrete_p (); } | |
212 | ||
e61ffa20 | 213 | static const binding_key *make (store_manager *mgr, const region *r); |
808f4dfe | 214 | |
e61ffa20 | 215 | virtual void dump_to_pp (pretty_printer *pp, bool simple) const = 0; |
808f4dfe | 216 | void dump (bool simple) const; |
809192e7 | 217 | label_text get_desc (bool simple=true) const; |
808f4dfe DM |
218 | |
219 | static int cmp_ptrs (const void *, const void *); | |
220 | static int cmp (const binding_key *, const binding_key *); | |
221 | ||
222 | virtual const concrete_binding *dyn_cast_concrete_binding () const | |
223 | { return NULL; } | |
33255ad3 DM |
224 | virtual const symbolic_binding *dyn_cast_symbolic_binding () const |
225 | { return NULL; } | |
808f4dfe DM |
226 | }; |
227 | ||
7c6b354b DM |
228 | /* A concrete range of bits. */ |
229 | ||
6b400aef DM |
230 | struct bit_range |
231 | { | |
232 | bit_range (bit_offset_t start_bit_offset, bit_size_t size_in_bits) | |
233 | : m_start_bit_offset (start_bit_offset), | |
234 | m_size_in_bits (size_in_bits) | |
235 | {} | |
236 | ||
237 | void dump_to_pp (pretty_printer *pp) const; | |
ec3fafa9 | 238 | void dump () const; |
6b400aef DM |
239 | |
240 | bit_offset_t get_start_bit_offset () const | |
241 | { | |
242 | return m_start_bit_offset; | |
243 | } | |
244 | bit_offset_t get_next_bit_offset () const | |
245 | { | |
246 | return m_start_bit_offset + m_size_in_bits; | |
247 | } | |
e61ffa20 DM |
248 | bit_offset_t get_last_bit_offset () const |
249 | { | |
250 | return get_next_bit_offset () - 1; | |
251 | } | |
6b400aef DM |
252 | |
253 | bool contains_p (bit_offset_t offset) const | |
254 | { | |
255 | return (offset >= get_start_bit_offset () | |
256 | && offset < get_next_bit_offset ()); | |
257 | } | |
258 | ||
e61ffa20 DM |
259 | bool contains_p (const bit_range &other, bit_range *out) const; |
260 | ||
6b400aef DM |
261 | bool operator== (const bit_range &other) const |
262 | { | |
263 | return (m_start_bit_offset == other.m_start_bit_offset | |
264 | && m_size_in_bits == other.m_size_in_bits); | |
265 | } | |
266 | ||
d3b1ef7a DM |
267 | bool intersects_p (const bit_range &other) const |
268 | { | |
269 | return (get_start_bit_offset () < other.get_next_bit_offset () | |
270 | && other.get_start_bit_offset () < get_next_bit_offset ()); | |
271 | } | |
4892b308 DM |
272 | bool intersects_p (const bit_range &other, |
273 | bit_range *out_this, | |
274 | bit_range *out_other) const; | |
d3b1ef7a | 275 | |
6b400aef DM |
276 | static int cmp (const bit_range &br1, const bit_range &br2); |
277 | ||
4892b308 DM |
278 | bit_range operator- (bit_offset_t offset) const; |
279 | ||
d3b1ef7a DM |
280 | static bool from_mask (unsigned HOST_WIDE_INT mask, bit_range *out); |
281 | ||
7c6b354b DM |
282 | bool as_byte_range (byte_range *out) const; |
283 | ||
6b400aef DM |
284 | bit_offset_t m_start_bit_offset; |
285 | bit_size_t m_size_in_bits; | |
286 | }; | |
287 | ||
7c6b354b DM |
288 | /* A concrete range of bytes. */ |
289 | ||
290 | struct byte_range | |
291 | { | |
292 | byte_range (byte_offset_t start_byte_offset, byte_size_t size_in_bytes) | |
293 | : m_start_byte_offset (start_byte_offset), | |
294 | m_size_in_bytes (size_in_bytes) | |
295 | {} | |
296 | ||
297 | void dump_to_pp (pretty_printer *pp) const; | |
e61ffa20 DM |
298 | void dump () const; |
299 | ||
300 | bool contains_p (byte_offset_t offset) const | |
301 | { | |
302 | return (offset >= get_start_byte_offset () | |
303 | && offset < get_next_byte_offset ()); | |
304 | } | |
305 | bool contains_p (const byte_range &other, byte_range *out) const; | |
306 | ||
307 | bool operator== (const byte_range &other) const | |
308 | { | |
309 | return (m_start_byte_offset == other.m_start_byte_offset | |
310 | && m_size_in_bytes == other.m_size_in_bytes); | |
311 | } | |
7c6b354b | 312 | |
e61ffa20 DM |
313 | byte_offset_t get_start_byte_offset () const |
314 | { | |
315 | return m_start_byte_offset; | |
316 | } | |
317 | byte_offset_t get_next_byte_offset () const | |
318 | { | |
319 | return m_start_byte_offset + m_size_in_bytes; | |
320 | } | |
7c6b354b DM |
321 | byte_offset_t get_last_byte_offset () const |
322 | { | |
323 | return m_start_byte_offset + m_size_in_bytes - 1; | |
324 | } | |
325 | ||
e61ffa20 DM |
326 | bit_range as_bit_range () const |
327 | { | |
328 | return bit_range (m_start_byte_offset * BITS_PER_UNIT, | |
329 | m_size_in_bytes * BITS_PER_UNIT); | |
330 | } | |
331 | ||
332 | static int cmp (const byte_range &br1, const byte_range &br2); | |
333 | ||
7c6b354b DM |
334 | byte_offset_t m_start_byte_offset; |
335 | byte_size_t m_size_in_bytes; | |
336 | }; | |
337 | ||
808f4dfe DM |
338 | /* Concrete subclass of binding_key, for describing a concrete range of |
339 | bits within the binding_map (e.g. "bits 8-15"). */ | |
340 | ||
341 | class concrete_binding : public binding_key | |
342 | { | |
343 | public: | |
344 | /* This class is its own key for the purposes of consolidation. */ | |
345 | typedef concrete_binding key_t; | |
346 | ||
e61ffa20 DM |
347 | concrete_binding (bit_offset_t start_bit_offset, bit_size_t size_in_bits) |
348 | : m_bit_range (start_bit_offset, size_in_bits) | |
808f4dfe DM |
349 | {} |
350 | bool concrete_p () const FINAL OVERRIDE { return true; } | |
351 | ||
352 | hashval_t hash () const | |
353 | { | |
354 | inchash::hash hstate; | |
6b400aef DM |
355 | hstate.add_wide_int (m_bit_range.m_start_bit_offset); |
356 | hstate.add_wide_int (m_bit_range.m_size_in_bits); | |
e61ffa20 | 357 | return hstate.end (); |
808f4dfe DM |
358 | } |
359 | bool operator== (const concrete_binding &other) const | |
360 | { | |
6b400aef | 361 | return m_bit_range == other.m_bit_range; |
808f4dfe DM |
362 | } |
363 | ||
364 | void dump_to_pp (pretty_printer *pp, bool simple) const FINAL OVERRIDE; | |
365 | ||
366 | const concrete_binding *dyn_cast_concrete_binding () const FINAL OVERRIDE | |
367 | { return this; } | |
368 | ||
d3b1ef7a DM |
369 | const bit_range &get_bit_range () const { return m_bit_range; } |
370 | ||
6b400aef DM |
371 | bit_offset_t get_start_bit_offset () const |
372 | { | |
373 | return m_bit_range.m_start_bit_offset; | |
374 | } | |
375 | bit_size_t get_size_in_bits () const | |
376 | { | |
377 | return m_bit_range.m_size_in_bits; | |
378 | } | |
808f4dfe DM |
379 | /* Return the next bit offset after the end of this binding. */ |
380 | bit_offset_t get_next_bit_offset () const | |
381 | { | |
6b400aef | 382 | return m_bit_range.get_next_bit_offset (); |
808f4dfe DM |
383 | } |
384 | ||
385 | bool overlaps_p (const concrete_binding &other) const; | |
386 | ||
b0702ac5 DM |
387 | static int cmp_ptr_ptr (const void *, const void *); |
388 | ||
e61ffa20 DM |
389 | void mark_deleted () { m_bit_range.m_start_bit_offset = -1; } |
390 | void mark_empty () { m_bit_range.m_start_bit_offset = -2; } | |
391 | bool is_deleted () const { return m_bit_range.m_start_bit_offset == -1; } | |
392 | bool is_empty () const { return m_bit_range.m_start_bit_offset == -2; } | |
393 | ||
808f4dfe | 394 | private: |
6b400aef | 395 | bit_range m_bit_range; |
808f4dfe DM |
396 | }; |
397 | ||
398 | } // namespace ana | |
399 | ||
400 | template <> struct default_hash_traits<ana::concrete_binding> | |
401 | : public member_function_hash_traits<ana::concrete_binding> | |
402 | { | |
e61ffa20 | 403 | static const bool empty_zero_p = false; |
808f4dfe DM |
404 | }; |
405 | ||
406 | namespace ana { | |
407 | ||
408 | /* Concrete subclass of binding_key, for describing a symbolic set of | |
409 | bits within the binding_map in terms of a region (e.g. "arr[i]"). */ | |
410 | ||
411 | class symbolic_binding : public binding_key | |
412 | { | |
413 | public: | |
414 | /* This class is its own key for the purposes of consolidation. */ | |
415 | typedef symbolic_binding key_t; | |
416 | ||
e61ffa20 | 417 | symbolic_binding (const region *region) : m_region (region) {} |
808f4dfe DM |
418 | bool concrete_p () const FINAL OVERRIDE { return false; } |
419 | ||
420 | hashval_t hash () const | |
421 | { | |
e61ffa20 | 422 | return (intptr_t)m_region; |
808f4dfe DM |
423 | } |
424 | bool operator== (const symbolic_binding &other) const | |
425 | { | |
e61ffa20 | 426 | return m_region == other.m_region; |
808f4dfe DM |
427 | } |
428 | ||
429 | void dump_to_pp (pretty_printer *pp, bool simple) const FINAL OVERRIDE; | |
430 | ||
33255ad3 DM |
431 | const symbolic_binding *dyn_cast_symbolic_binding () const FINAL OVERRIDE |
432 | { return this; } | |
433 | ||
808f4dfe DM |
434 | const region *get_region () const { return m_region; } |
435 | ||
b0702ac5 DM |
436 | static int cmp_ptr_ptr (const void *, const void *); |
437 | ||
e61ffa20 DM |
438 | void mark_deleted () { m_region = reinterpret_cast<const region *> (1); } |
439 | void mark_empty () { m_region = NULL; } | |
440 | bool is_deleted () const | |
441 | { return m_region == reinterpret_cast<const region *> (1); } | |
442 | bool is_empty () const { return m_region == NULL; } | |
443 | ||
808f4dfe DM |
444 | private: |
445 | const region *m_region; | |
446 | }; | |
447 | ||
448 | } // namespace ana | |
449 | ||
450 | template <> struct default_hash_traits<ana::symbolic_binding> | |
451 | : public member_function_hash_traits<ana::symbolic_binding> | |
452 | { | |
453 | static const bool empty_zero_p = true; | |
454 | }; | |
455 | ||
456 | namespace ana { | |
457 | ||
458 | /* A mapping from binding_keys to svalues, for use by binding_cluster | |
459 | and compound_svalue. */ | |
460 | ||
461 | class binding_map | |
462 | { | |
463 | public: | |
464 | typedef hash_map <const binding_key *, const svalue *> map_t; | |
465 | typedef map_t::iterator iterator_t; | |
466 | ||
467 | binding_map () : m_map () {} | |
468 | binding_map (const binding_map &other); | |
469 | binding_map& operator=(const binding_map &other); | |
470 | ||
471 | bool operator== (const binding_map &other) const; | |
472 | bool operator!= (const binding_map &other) const | |
473 | { | |
474 | return !(*this == other); | |
475 | } | |
476 | ||
477 | hashval_t hash () const; | |
478 | ||
479 | const svalue *get (const binding_key *key) const | |
480 | { | |
481 | const svalue **slot = const_cast<map_t &> (m_map).get (key); | |
482 | if (slot) | |
483 | return *slot; | |
484 | else | |
485 | return NULL; | |
486 | } | |
487 | bool put (const binding_key *k, const svalue *v) | |
488 | { | |
489 | gcc_assert (v); | |
490 | return m_map.put (k, v); | |
491 | } | |
492 | ||
493 | void remove (const binding_key *k) { m_map.remove (k); } | |
494 | void empty () { m_map.empty (); } | |
495 | ||
496 | iterator_t begin () const { return m_map.begin (); } | |
497 | iterator_t end () const { return m_map.end (); } | |
498 | size_t elements () const { return m_map.elements (); } | |
499 | ||
500 | void dump_to_pp (pretty_printer *pp, bool simple, bool multiline) const; | |
501 | void dump (bool simple) const; | |
502 | ||
809192e7 DM |
503 | json::object *to_json () const; |
504 | ||
18056e45 | 505 | bool apply_ctor_to_region (const region *parent_reg, tree ctor, |
2867118d DM |
506 | region_model_manager *mgr); |
507 | ||
b0702ac5 DM |
508 | static int cmp (const binding_map &map1, const binding_map &map2); |
509 | ||
e61ffa20 DM |
510 | void remove_overlapping_bindings (store_manager *mgr, |
511 | const binding_key *drop_key, | |
512 | uncertainty_t *uncertainty); | |
513 | ||
808f4dfe | 514 | private: |
e61ffa20 DM |
515 | void get_overlapping_bindings (const binding_key *key, |
516 | auto_vec<const binding_key *> *out); | |
18056e45 | 517 | bool apply_ctor_val_to_range (const region *parent_reg, |
0d1b4edc DM |
518 | region_model_manager *mgr, |
519 | tree min_index, tree max_index, | |
520 | tree val); | |
18056e45 | 521 | bool apply_ctor_pair_to_child_region (const region *parent_reg, |
0d1b4edc DM |
522 | region_model_manager *mgr, |
523 | tree index, tree val); | |
524 | ||
808f4dfe DM |
525 | map_t m_map; |
526 | }; | |
527 | ||
528 | /* Concept: BindingVisitor, for use by binding_cluster::for_each_binding | |
529 | and store::for_each_binding. | |
530 | ||
531 | Should implement: | |
532 | void on_binding (const binding_key *key, const svalue *&sval); | |
533 | */ | |
534 | ||
535 | /* All of the bindings within a store for regions that share the same | |
536 | base region. */ | |
537 | ||
538 | class binding_cluster | |
539 | { | |
540 | public: | |
541 | friend class store; | |
542 | ||
543 | typedef hash_map <const binding_key *, const svalue *> map_t; | |
544 | typedef map_t::iterator iterator_t; | |
545 | ||
546 | binding_cluster (const region *base_region) | |
547 | : m_base_region (base_region), m_map (), | |
548 | m_escaped (false), m_touched (false) {} | |
549 | binding_cluster (const binding_cluster &other); | |
550 | binding_cluster& operator=(const binding_cluster &other); | |
551 | ||
552 | bool operator== (const binding_cluster &other) const; | |
553 | bool operator!= (const binding_cluster &other) const | |
554 | { | |
555 | return !(*this == other); | |
556 | } | |
557 | ||
558 | hashval_t hash () const; | |
559 | ||
560 | bool symbolic_p () const; | |
561 | ||
562 | void dump_to_pp (pretty_printer *pp, bool simple, bool multiline) const; | |
563 | void dump (bool simple) const; | |
564 | ||
e61ffa20 DM |
565 | void validate () const; |
566 | ||
809192e7 DM |
567 | json::object *to_json () const; |
568 | ||
e61ffa20 | 569 | void bind (store_manager *mgr, const region *, const svalue *); |
808f4dfe DM |
570 | |
571 | void clobber_region (store_manager *mgr, const region *reg); | |
572 | void purge_region (store_manager *mgr, const region *reg); | |
e61ffa20 | 573 | void fill_region (store_manager *mgr, const region *reg, const svalue *sval); |
808f4dfe | 574 | void zero_fill_region (store_manager *mgr, const region *reg); |
3a66c289 DM |
575 | void mark_region_as_unknown (store_manager *mgr, const region *reg, |
576 | uncertainty_t *uncertainty); | |
33255ad3 DM |
577 | void purge_state_involving (const svalue *sval, |
578 | region_model_manager *sval_mgr); | |
808f4dfe | 579 | |
e61ffa20 | 580 | const svalue *get_binding (store_manager *mgr, const region *reg) const; |
808f4dfe | 581 | const svalue *get_binding_recursive (store_manager *mgr, |
e61ffa20 | 582 | const region *reg) const; |
808f4dfe DM |
583 | const svalue *get_any_binding (store_manager *mgr, |
584 | const region *reg) const; | |
585 | const svalue *maybe_get_compound_binding (store_manager *mgr, | |
586 | const region *reg) const; | |
587 | ||
3a66c289 DM |
588 | void remove_overlapping_bindings (store_manager *mgr, const region *reg, |
589 | uncertainty_t *uncertainty); | |
808f4dfe DM |
590 | |
591 | template <typename T> | |
592 | void for_each_value (void (*cb) (const svalue *sval, T user_data), | |
8a18261a | 593 | T user_data) const |
808f4dfe DM |
594 | { |
595 | for (map_t::iterator iter = m_map.begin (); iter != m_map.end (); ++iter) | |
596 | cb ((*iter).second, user_data); | |
597 | } | |
598 | ||
599 | static bool can_merge_p (const binding_cluster *cluster_a, | |
600 | const binding_cluster *cluster_b, | |
601 | binding_cluster *out_cluster, | |
be6c485b | 602 | store *out_store, |
808f4dfe DM |
603 | store_manager *mgr, |
604 | model_merger *merger); | |
605 | void make_unknown_relative_to (const binding_cluster *other_cluster, | |
be6c485b | 606 | store *out_store, |
808f4dfe DM |
607 | store_manager *mgr); |
608 | ||
609 | void mark_as_escaped (); | |
610 | void on_unknown_fncall (const gcall *call, store_manager *mgr); | |
ded2c2c0 | 611 | void on_asm (const gasm *stmt, store_manager *mgr); |
808f4dfe DM |
612 | |
613 | bool escaped_p () const { return m_escaped; } | |
614 | bool touched_p () const { return m_touched; } | |
615 | ||
616 | bool redundant_p () const; | |
617 | bool empty_p () const { return m_map.elements () == 0; } | |
618 | ||
619 | void get_representative_path_vars (const region_model *model, | |
620 | svalue_set *visited, | |
621 | const region *base_reg, | |
622 | const svalue *sval, | |
623 | auto_vec<path_var> *out_pvs) const; | |
624 | ||
625 | const svalue *maybe_get_simple_value (store_manager *mgr) const; | |
626 | ||
627 | template <typename BindingVisitor> | |
8a18261a | 628 | void for_each_binding (BindingVisitor &v) const |
808f4dfe DM |
629 | { |
630 | for (map_t::iterator iter = m_map.begin (); iter != m_map.end (); ++iter) | |
631 | { | |
632 | const binding_key *key = (*iter).first; | |
633 | const svalue *&sval = (*iter).second; | |
634 | v.on_binding (key, sval); | |
635 | } | |
636 | } | |
637 | ||
638 | iterator_t begin () const { return m_map.begin (); } | |
639 | iterator_t end () const { return m_map.end (); } | |
640 | ||
623bc027 DM |
641 | const binding_map &get_map () const { return m_map; } |
642 | ||
808f4dfe DM |
643 | private: |
644 | const svalue *get_any_value (const binding_key *key) const; | |
808f4dfe DM |
645 | void bind_compound_sval (store_manager *mgr, |
646 | const region *reg, | |
647 | const compound_svalue *compound_sval); | |
648 | void bind_key (const binding_key *key, const svalue *sval); | |
649 | ||
650 | const region *m_base_region; | |
651 | ||
652 | binding_map m_map; | |
653 | ||
654 | /* Has a pointer to this cluster "escaped" into a part of the program | |
655 | we don't know about (via a call to a function with an unknown body, | |
656 | or by being passed in as a pointer param of a "top-level" function call). | |
657 | Such regions could be overwritten when other such functions are called, | |
658 | even if the region is no longer reachable by pointers that we are | |
659 | tracking. */ | |
660 | bool m_escaped; | |
661 | ||
662 | /* Has this cluster been written to via a symbolic binding? | |
663 | If so, then we don't know anything about unbound subregions, | |
664 | so we can't use initial_svalue, treat them as uninitialized, or | |
665 | inherit values from a parent region. */ | |
666 | bool m_touched; | |
667 | }; | |
668 | ||
669 | /* The mapping from regions to svalues. | |
670 | This is actually expressed by subdividing into clusters, to better | |
671 | handle aliasing. */ | |
672 | ||
673 | class store | |
674 | { | |
675 | public: | |
676 | typedef hash_map <const region *, binding_cluster *> cluster_map_t; | |
677 | ||
678 | store (); | |
679 | store (const store &other); | |
680 | ~store (); | |
681 | ||
682 | store &operator= (const store &other); | |
683 | ||
684 | bool operator== (const store &other) const; | |
685 | bool operator!= (const store &other) const | |
686 | { | |
687 | return !(*this == other); | |
688 | } | |
689 | ||
690 | hashval_t hash () const; | |
691 | ||
692 | void dump_to_pp (pretty_printer *pp, bool summarize, bool multiline, | |
693 | store_manager *mgr) const; | |
694 | void dump (bool simple) const; | |
695 | void summarize_to_pp (pretty_printer *pp, bool simple) const; | |
696 | ||
e61ffa20 DM |
697 | void validate () const; |
698 | ||
809192e7 DM |
699 | json::object *to_json () const; |
700 | ||
808f4dfe DM |
701 | const svalue *get_any_binding (store_manager *mgr, const region *reg) const; |
702 | ||
703 | bool called_unknown_fn_p () const { return m_called_unknown_fn; } | |
704 | ||
705 | void set_value (store_manager *mgr, const region *lhs_reg, | |
e61ffa20 | 706 | const svalue *rhs_sval, |
3a66c289 | 707 | uncertainty_t *uncertainty); |
808f4dfe DM |
708 | void clobber_region (store_manager *mgr, const region *reg); |
709 | void purge_region (store_manager *mgr, const region *reg); | |
e61ffa20 | 710 | void fill_region (store_manager *mgr, const region *reg, const svalue *sval); |
808f4dfe | 711 | void zero_fill_region (store_manager *mgr, const region *reg); |
3a66c289 DM |
712 | void mark_region_as_unknown (store_manager *mgr, const region *reg, |
713 | uncertainty_t *uncertainty); | |
33255ad3 DM |
714 | void purge_state_involving (const svalue *sval, |
715 | region_model_manager *sval_mgr); | |
808f4dfe DM |
716 | |
717 | const binding_cluster *get_cluster (const region *base_reg) const; | |
718 | binding_cluster *get_cluster (const region *base_reg); | |
719 | binding_cluster *get_or_create_cluster (const region *base_reg); | |
720 | void purge_cluster (const region *base_reg); | |
721 | ||
722 | template <typename T> | |
723 | void for_each_cluster (void (*cb) (const region *base_reg, T user_data), | |
724 | T user_data) const | |
725 | { | |
726 | for (cluster_map_t::iterator iter = m_cluster_map.begin (); | |
727 | iter != m_cluster_map.end (); ++iter) | |
728 | cb ((*iter).first, user_data); | |
729 | } | |
730 | ||
731 | static bool can_merge_p (const store *store_a, const store *store_b, | |
732 | store *out_store, store_manager *mgr, | |
733 | model_merger *merger); | |
734 | ||
735 | void mark_as_escaped (const region *base_reg); | |
736 | void on_unknown_fncall (const gcall *call, store_manager *mgr); | |
737 | bool escaped_p (const region *reg) const; | |
738 | ||
739 | void get_representative_path_vars (const region_model *model, | |
740 | svalue_set *visited, | |
741 | const svalue *sval, | |
742 | auto_vec<path_var> *out_pvs) const; | |
743 | ||
744 | cluster_map_t::iterator begin () const { return m_cluster_map.begin (); } | |
745 | cluster_map_t::iterator end () const { return m_cluster_map.end (); } | |
746 | ||
747 | tristate eval_alias (const region *base_reg_a, | |
c199723d | 748 | const region *base_reg_b) const; |
808f4dfe DM |
749 | |
750 | template <typename BindingVisitor> | |
751 | void for_each_binding (BindingVisitor &v) | |
752 | { | |
753 | for (cluster_map_t::iterator iter = m_cluster_map.begin (); | |
754 | iter != m_cluster_map.end (); ++iter) | |
755 | (*iter).second->for_each_binding (v); | |
756 | } | |
757 | ||
758 | void canonicalize (store_manager *mgr); | |
759 | void loop_replay_fixup (const store *other_store, | |
760 | region_model_manager *mgr); | |
761 | ||
762 | private: | |
763 | void remove_overlapping_bindings (store_manager *mgr, const region *reg); | |
c199723d DM |
764 | tristate eval_alias_1 (const region *base_reg_a, |
765 | const region *base_reg_b) const; | |
808f4dfe DM |
766 | |
767 | cluster_map_t m_cluster_map; | |
768 | ||
769 | /* If this is true, then unknown code has been called, and so | |
770 | any global variable that isn't currently modelled by the store | |
771 | has unknown state, rather than being in an "initial state". | |
772 | This is to avoid having to mark (and thus explicitly track) | |
773 | every global when an unknown function is called; instead, they | |
774 | can be tracked implicitly. */ | |
775 | bool m_called_unknown_fn; | |
776 | }; | |
777 | ||
778 | /* A class responsible for owning and consolidating binding keys | |
779 | (both concrete and symbolic). | |
780 | Key instances are immutable as far as clients are concerned, so they | |
781 | are provided as "const" ptrs. */ | |
782 | ||
783 | class store_manager | |
784 | { | |
785 | public: | |
786 | store_manager (region_model_manager *mgr) : m_mgr (mgr) {} | |
787 | ||
788 | /* binding consolidation. */ | |
789 | const concrete_binding * | |
790 | get_concrete_binding (bit_offset_t start_bit_offset, | |
e61ffa20 | 791 | bit_offset_t size_in_bits); |
d3b1ef7a | 792 | const concrete_binding * |
e61ffa20 | 793 | get_concrete_binding (const bit_range &bits) |
d3b1ef7a DM |
794 | { |
795 | return get_concrete_binding (bits.get_start_bit_offset (), | |
e61ffa20 | 796 | bits.m_size_in_bits); |
d3b1ef7a | 797 | } |
808f4dfe | 798 | const symbolic_binding * |
e61ffa20 | 799 | get_symbolic_binding (const region *region); |
808f4dfe DM |
800 | |
801 | region_model_manager *get_svalue_manager () const | |
802 | { | |
803 | return m_mgr; | |
804 | } | |
805 | ||
806 | void log_stats (logger *logger, bool show_objs) const; | |
807 | ||
808 | private: | |
809 | region_model_manager *m_mgr; | |
810 | consolidation_map<concrete_binding> m_concrete_binding_key_mgr; | |
811 | consolidation_map<symbolic_binding> m_symbolic_binding_key_mgr; | |
812 | }; | |
813 | ||
814 | } // namespace ana | |
815 | ||
816 | #endif /* GCC_ANALYZER_STORE_H */ |