]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/analyzer/sm-malloc.cc
Use more ARRAY_SIZE.
[thirdparty/gcc.git] / gcc / analyzer / sm-malloc.cc
1 /* A state machine for detecting misuses of the malloc/free API.
2 Copyright (C) 2019-2022 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "function.h"
26 #include "basic-block.h"
27 #include "gimple.h"
28 #include "options.h"
29 #include "bitmap.h"
30 #include "diagnostic-path.h"
31 #include "diagnostic-metadata.h"
32 #include "function.h"
33 #include "json.h"
34 #include "analyzer/analyzer.h"
35 #include "diagnostic-event-id.h"
36 #include "analyzer/analyzer-logging.h"
37 #include "analyzer/sm.h"
38 #include "analyzer/pending-diagnostic.h"
39 #include "tristate.h"
40 #include "selftest.h"
41 #include "analyzer/call-string.h"
42 #include "analyzer/program-point.h"
43 #include "analyzer/store.h"
44 #include "analyzer/region-model.h"
45 #include "stringpool.h"
46 #include "attribs.h"
47 #include "analyzer/function-set.h"
48 #include "analyzer/program-state.h"
49
50 #if ENABLE_ANALYZER
51
52 namespace ana {
53
54 namespace {
55
56 /* This state machine and its various support classes track allocations
57 and deallocations.
58
59 It has a few standard allocation/deallocation pairs (e.g. new/delete),
60 and also supports user-defined ones via
61 __attribute__ ((malloc(DEALLOCATOR))).
62
63 There can be more than one valid deallocator for a given allocator,
64 for example:
65 __attribute__ ((malloc (fclose)))
66 __attribute__ ((malloc (freopen, 3)))
67 FILE* fopen (const char*, const char*);
68 A deallocator_set represents a particular set of valid deallocators.
69
70 We track the expected deallocator_set for a value, but not the allocation
71 function - there could be more than one allocator per deallocator_set.
72 For example, there could be dozens of allocators for "free" beyond just
73 malloc e.g. calloc, xstrdup, etc. We don't want to explode the number
74 of states by tracking individual allocators in the exploded graph;
75 we merely want to track "this value expects to have 'free' called on it".
76 Perhaps we can reconstruct which allocator was used later, when emitting
77 the path, if it's necessary for precision of wording of diagnostics. */
78
79 class deallocator;
80 class deallocator_set;
81 class malloc_state_machine;
82
83 /* An enum for discriminating between different kinds of allocation_state. */
84
85 enum resource_state
86 {
87 /* States that are independent of allocator/deallocator. */
88
89 /* The start state. */
90 RS_START,
91
92 /* State for a pointer that's known to be NULL. */
93 RS_NULL,
94
95 /* State for a pointer that's known to not be on the heap (e.g. to a local
96 or global). */
97 RS_NON_HEAP,
98
99 /* Stop state, for pointers we don't want to track any more. */
100 RS_STOP,
101
102 /* States that relate to a specific deallocator_set. */
103
104 /* State for a pointer returned from an allocator that hasn't
105 been checked for NULL.
106 It could be a pointer to heap-allocated memory, or could be NULL. */
107 RS_UNCHECKED,
108
109 /* State for a pointer returned from an allocator,
110 known to be non-NULL. */
111 RS_NONNULL,
112
113 /* State for a pointer passed to a deallocator. */
114 RS_FREED
115 };
116
117 /* Custom state subclass, which can optionally refer to an a
118 deallocator_set. */
119
120 struct allocation_state : public state_machine::state
121 {
122 allocation_state (const char *name, unsigned id,
123 enum resource_state rs,
124 const deallocator_set *deallocators,
125 const deallocator *deallocator)
126 : state (name, id), m_rs (rs),
127 m_deallocators (deallocators),
128 m_deallocator (deallocator)
129 {}
130
131 void dump_to_pp (pretty_printer *pp) const FINAL OVERRIDE;
132
133 const allocation_state *get_nonnull () const;
134
135 enum resource_state m_rs;
136 const deallocator_set *m_deallocators;
137 const deallocator *m_deallocator;
138 };
139
140 /* An enum for choosing which wording to use in various diagnostics
141 when describing deallocations. */
142
143 enum wording
144 {
145 WORDING_FREED,
146 WORDING_DELETED,
147 WORDING_DEALLOCATED,
148 WORDING_REALLOCATED
149 };
150
151 /* Base class representing a deallocation function,
152 either a built-in one we know about, or one exposed via
153 __attribute__((malloc(DEALLOCATOR))). */
154
155 struct deallocator
156 {
157 hashval_t hash () const;
158 void dump_to_pp (pretty_printer *pp) const;
159 static int cmp (const deallocator *a, const deallocator *b);
160 static int cmp_ptr_ptr (const void *, const void *);
161
162 /* Name to use in diagnostics. */
163 const char *m_name;
164
165 /* Which wording to use in diagnostics. */
166 enum wording m_wording;
167
168 /* State for a value passed to one of the deallocators. */
169 state_machine::state_t m_freed;
170
171 protected:
172 deallocator (malloc_state_machine *sm,
173 const char *name,
174 enum wording wording);
175 };
176
177 /* Subclass representing a predefined deallocator.
178 e.g. "delete []", without needing a specific FUNCTION_DECL
179 ahead of time. */
180
181 struct standard_deallocator : public deallocator
182 {
183 standard_deallocator (malloc_state_machine *sm,
184 const char *name,
185 enum wording wording);
186 };
187
188 /* Subclass representing a user-defined deallocator
189 via __attribute__((malloc(DEALLOCATOR))) given
190 a specific FUNCTION_DECL. */
191
192 struct custom_deallocator : public deallocator
193 {
194 custom_deallocator (malloc_state_machine *sm,
195 tree deallocator_fndecl,
196 enum wording wording)
197 : deallocator (sm, IDENTIFIER_POINTER (DECL_NAME (deallocator_fndecl)),
198 wording)
199 {
200 }
201 };
202
203 /* Base class representing a set of possible deallocators.
204 Often this will be just a single deallocator, but some
205 allocators have multiple valid deallocators (e.g. the result of
206 "fopen" can be closed by either "fclose" or "freopen"). */
207
208 struct deallocator_set
209 {
210 deallocator_set (malloc_state_machine *sm,
211 enum wording wording);
212 virtual ~deallocator_set () {}
213
214 virtual bool contains_p (const deallocator *d) const = 0;
215 virtual const deallocator *maybe_get_single () const = 0;
216 virtual void dump_to_pp (pretty_printer *pp) const = 0;
217 void dump () const;
218
219 /* Which wording to use in diagnostics. */
220 enum wording m_wording;
221
222 /* Pointers to states.
223 These states are owned by the state_machine base class. */
224
225 /* State for an unchecked result from an allocator using this set. */
226 state_machine::state_t m_unchecked;
227
228 /* State for a known non-NULL result from such an allocator. */
229 state_machine::state_t m_nonnull;
230 };
231
232 /* Subclass of deallocator_set representing a set of deallocators
233 defined by one or more __attribute__((malloc(DEALLOCATOR))). */
234
235 struct custom_deallocator_set : public deallocator_set
236 {
237 typedef const auto_vec <const deallocator *> *key_t;
238
239 custom_deallocator_set (malloc_state_machine *sm,
240 const auto_vec <const deallocator *> *vec,
241 //const char *name,
242 //const char *dealloc_funcname,
243 //unsigned arg_idx,
244 enum wording wording);
245
246 bool contains_p (const deallocator *d) const FINAL OVERRIDE;
247 const deallocator *maybe_get_single () const FINAL OVERRIDE;
248 void dump_to_pp (pretty_printer *pp) const FINAL OVERRIDE;
249
250 auto_vec <const deallocator *> m_deallocator_vec;
251 };
252
253 /* Subclass of deallocator_set representing a set of deallocators
254 with a single standard_deallocator, e.g. "delete []". */
255
256 struct standard_deallocator_set : public deallocator_set
257 {
258 standard_deallocator_set (malloc_state_machine *sm,
259 const char *name,
260 enum wording wording);
261
262 bool contains_p (const deallocator *d) const FINAL OVERRIDE;
263 const deallocator *maybe_get_single () const FINAL OVERRIDE;
264 void dump_to_pp (pretty_printer *pp) const FINAL OVERRIDE;
265
266 standard_deallocator m_deallocator;
267 };
268
269 /* Traits class for ensuring uniqueness of deallocator_sets within
270 malloc_state_machine. */
271
272 struct deallocator_set_map_traits
273 {
274 typedef custom_deallocator_set::key_t key_type;
275 typedef custom_deallocator_set *value_type;
276 typedef custom_deallocator_set *compare_type;
277
278 static inline hashval_t hash (const key_type &k)
279 {
280 gcc_assert (k != NULL);
281 gcc_assert (k != reinterpret_cast<key_type> (1));
282
283 hashval_t result = 0;
284 unsigned i;
285 const deallocator *d;
286 FOR_EACH_VEC_ELT (*k, i, d)
287 result ^= d->hash ();
288 return result;
289 }
290 static inline bool equal_keys (const key_type &k1, const key_type &k2)
291 {
292 if (k1->length () != k2->length ())
293 return false;
294
295 for (unsigned i = 0; i < k1->length (); i++)
296 if ((*k1)[i] != (*k2)[i])
297 return false;
298
299 return true;
300 }
301 template <typename T>
302 static inline void remove (T &)
303 {
304 /* empty; the nodes are handled elsewhere. */
305 }
306 template <typename T>
307 static inline void mark_deleted (T &entry)
308 {
309 entry.m_key = reinterpret_cast<key_type> (1);
310 }
311 template <typename T>
312 static inline void mark_empty (T &entry)
313 {
314 entry.m_key = NULL;
315 }
316 template <typename T>
317 static inline bool is_deleted (const T &entry)
318 {
319 return entry.m_key == reinterpret_cast<key_type> (1);
320 }
321 template <typename T>
322 static inline bool is_empty (const T &entry)
323 {
324 return entry.m_key == NULL;
325 }
326 static const bool empty_zero_p = false;
327 };
328
329 /* A state machine for detecting misuses of the malloc/free API.
330
331 See sm-malloc.dot for an overview (keep this in-sync with that file). */
332
333 class malloc_state_machine : public state_machine
334 {
335 public:
336 typedef allocation_state custom_data_t;
337
338 malloc_state_machine (logger *logger);
339 ~malloc_state_machine ();
340
341 state_t
342 add_state (const char *name, enum resource_state rs,
343 const deallocator_set *deallocators,
344 const deallocator *deallocator);
345
346 bool inherited_state_p () const FINAL OVERRIDE { return false; }
347
348 state_machine::state_t
349 get_default_state (const svalue *sval) const FINAL OVERRIDE
350 {
351 if (tree cst = sval->maybe_get_constant ())
352 {
353 if (zerop (cst))
354 return m_null;
355 }
356 if (const region_svalue *ptr = sval->dyn_cast_region_svalue ())
357 {
358 const region *reg = ptr->get_pointee ();
359 switch (reg->get_memory_space ())
360 {
361 default:
362 break;
363 case MEMSPACE_CODE:
364 case MEMSPACE_GLOBALS:
365 case MEMSPACE_STACK:
366 case MEMSPACE_READONLY_DATA:
367 return m_non_heap;
368 }
369 }
370 return m_start;
371 }
372
373 bool on_stmt (sm_context *sm_ctxt,
374 const supernode *node,
375 const gimple *stmt) const FINAL OVERRIDE;
376
377 void on_phi (sm_context *sm_ctxt,
378 const supernode *node,
379 const gphi *phi,
380 tree rhs) const FINAL OVERRIDE;
381
382 void on_condition (sm_context *sm_ctxt,
383 const supernode *node,
384 const gimple *stmt,
385 const svalue *lhs,
386 enum tree_code op,
387 const svalue *rhs) const FINAL OVERRIDE;
388
389 bool can_purge_p (state_t s) const FINAL OVERRIDE;
390 pending_diagnostic *on_leak (tree var) const FINAL OVERRIDE;
391
392 bool reset_when_passed_to_unknown_fn_p (state_t s,
393 bool is_mutable) const FINAL OVERRIDE;
394
395 static bool unaffected_by_call_p (tree fndecl);
396
397 void on_realloc_with_move (region_model *model,
398 sm_state_map *smap,
399 const svalue *old_ptr_sval,
400 const svalue *new_ptr_sval,
401 const extrinsic_state &ext_state) const;
402
403 standard_deallocator_set m_free;
404 standard_deallocator_set m_scalar_delete;
405 standard_deallocator_set m_vector_delete;
406
407 standard_deallocator m_realloc;
408
409 /* States that are independent of api. */
410
411 /* State for a pointer that's known to be NULL. */
412 state_t m_null;
413
414 /* State for a pointer that's known to not be on the heap (e.g. to a local
415 or global). */
416 state_t m_non_heap; // TODO: or should this be a different state machine?
417 // or do we need child values etc?
418
419 /* Stop state, for pointers we don't want to track any more. */
420 state_t m_stop;
421
422 private:
423 const custom_deallocator_set *
424 get_or_create_custom_deallocator_set (tree allocator_fndecl);
425 custom_deallocator_set *
426 maybe_create_custom_deallocator_set (tree allocator_fndecl);
427 const deallocator *
428 get_or_create_deallocator (tree deallocator_fndecl);
429
430 void on_allocator_call (sm_context *sm_ctxt,
431 const gcall *call,
432 const deallocator_set *deallocators,
433 bool returns_nonnull = false) const;
434 void handle_free_of_non_heap (sm_context *sm_ctxt,
435 const supernode *node,
436 const gcall *call,
437 tree arg,
438 const deallocator *d) const;
439 void on_deallocator_call (sm_context *sm_ctxt,
440 const supernode *node,
441 const gcall *call,
442 const deallocator *d,
443 unsigned argno) const;
444 void on_realloc_call (sm_context *sm_ctxt,
445 const supernode *node,
446 const gcall *call) const;
447 void on_zero_assignment (sm_context *sm_ctxt,
448 const gimple *stmt,
449 tree lhs) const;
450
451 /* A map for consolidating deallocators so that they are
452 unique per deallocator FUNCTION_DECL. */
453 typedef hash_map<tree, deallocator *> deallocator_map_t;
454 deallocator_map_t m_deallocator_map;
455
456 /* Memoized lookups from FUNCTION_DECL to custom_deallocator_set *. */
457 typedef hash_map<tree, custom_deallocator_set *> deallocator_set_cache_t;
458 deallocator_set_cache_t m_custom_deallocator_set_cache;
459
460 /* A map for consolidating custom_deallocator_set instances. */
461 typedef hash_map<custom_deallocator_set::key_t,
462 custom_deallocator_set *,
463 deallocator_set_map_traits> custom_deallocator_set_map_t;
464 custom_deallocator_set_map_t m_custom_deallocator_set_map;
465
466 /* Record of dynamically-allocated objects, for cleanup. */
467 auto_vec <custom_deallocator_set *> m_dynamic_sets;
468 auto_vec <custom_deallocator *> m_dynamic_deallocators;
469 };
470
471 /* struct deallocator. */
472
473 deallocator::deallocator (malloc_state_machine *sm,
474 const char *name,
475 enum wording wording)
476 : m_name (name),
477 m_wording (wording),
478 m_freed (sm->add_state ("freed", RS_FREED, NULL, this))
479 {
480 }
481
482 hashval_t
483 deallocator::hash () const
484 {
485 return (hashval_t)m_freed->get_id ();
486 }
487
488 void
489 deallocator::dump_to_pp (pretty_printer *pp) const
490 {
491 pp_printf (pp, "%qs", m_name);
492 }
493
494 int
495 deallocator::cmp (const deallocator *a, const deallocator *b)
496 {
497 return (int)a->m_freed->get_id () - (int)b->m_freed->get_id ();
498 }
499
500 int
501 deallocator::cmp_ptr_ptr (const void *a, const void *b)
502 {
503 return cmp (*(const deallocator * const *)a,
504 *(const deallocator * const *)b);
505 }
506
507
508 /* struct standard_deallocator : public deallocator. */
509
510 standard_deallocator::standard_deallocator (malloc_state_machine *sm,
511 const char *name,
512 enum wording wording)
513 : deallocator (sm, name, wording)
514 {
515 }
516
517 /* struct deallocator_set. */
518
519 deallocator_set::deallocator_set (malloc_state_machine *sm,
520 enum wording wording)
521 : m_wording (wording),
522 m_unchecked (sm->add_state ("unchecked", RS_UNCHECKED, this, NULL)),
523 m_nonnull (sm->add_state ("nonnull", RS_NONNULL, this, NULL))
524 {
525 }
526
527 /* Dump a description of this deallocator_set to stderr. */
528
529 DEBUG_FUNCTION void
530 deallocator_set::dump () const
531 {
532 pretty_printer pp;
533 pp_show_color (&pp) = pp_show_color (global_dc->printer);
534 pp.buffer->stream = stderr;
535 dump_to_pp (&pp);
536 pp_newline (&pp);
537 pp_flush (&pp);
538 }
539
540 /* struct custom_deallocator_set : public deallocator_set. */
541
542 custom_deallocator_set::
543 custom_deallocator_set (malloc_state_machine *sm,
544 const auto_vec <const deallocator *> *vec,
545 enum wording wording)
546 : deallocator_set (sm, wording),
547 m_deallocator_vec (vec->length ())
548 {
549 unsigned i;
550 const deallocator *d;
551 FOR_EACH_VEC_ELT (*vec, i, d)
552 m_deallocator_vec.safe_push (d);
553 }
554
555 bool
556 custom_deallocator_set::contains_p (const deallocator *d) const
557 {
558 unsigned i;
559 const deallocator *cd;
560 FOR_EACH_VEC_ELT (m_deallocator_vec, i, cd)
561 if (cd == d)
562 return true;
563 return false;
564 }
565
566 const deallocator *
567 custom_deallocator_set::maybe_get_single () const
568 {
569 if (m_deallocator_vec.length () == 1)
570 return m_deallocator_vec[0];
571 return NULL;
572 }
573
574 void
575 custom_deallocator_set::dump_to_pp (pretty_printer *pp) const
576 {
577 pp_character (pp, '{');
578 unsigned i;
579 const deallocator *d;
580 FOR_EACH_VEC_ELT (m_deallocator_vec, i, d)
581 {
582 if (i > 0)
583 pp_string (pp, ", ");
584 d->dump_to_pp (pp);
585 }
586 pp_character (pp, '}');
587 }
588
589 /* struct standard_deallocator_set : public deallocator_set. */
590
591 standard_deallocator_set::standard_deallocator_set (malloc_state_machine *sm,
592 const char *name,
593 enum wording wording)
594 : deallocator_set (sm, wording),
595 m_deallocator (sm, name, wording)
596 {
597 }
598
599 bool
600 standard_deallocator_set::contains_p (const deallocator *d) const
601 {
602 return d == &m_deallocator;
603 }
604
605 const deallocator *
606 standard_deallocator_set::maybe_get_single () const
607 {
608 return &m_deallocator;
609 }
610
611 void
612 standard_deallocator_set::dump_to_pp (pretty_printer *pp) const
613 {
614 pp_character (pp, '{');
615 pp_string (pp, m_deallocator.m_name);
616 pp_character (pp, '}');
617 }
618
619 /* Return STATE cast to the custom state subclass, or NULL for the start state.
620 Everything should be an allocation_state apart from the start state. */
621
622 static const allocation_state *
623 dyn_cast_allocation_state (state_machine::state_t state)
624 {
625 if (state->get_id () == 0)
626 return NULL;
627 return static_cast <const allocation_state *> (state);
628 }
629
630 /* Return STATE cast to the custom state subclass, for a state that is
631 already known to not be the start state . */
632
633 static const allocation_state *
634 as_a_allocation_state (state_machine::state_t state)
635 {
636 gcc_assert (state->get_id () != 0);
637 return static_cast <const allocation_state *> (state);
638 }
639
640 /* Get the resource_state for STATE. */
641
642 static enum resource_state
643 get_rs (state_machine::state_t state)
644 {
645 if (const allocation_state *astate = dyn_cast_allocation_state (state))
646 return astate->m_rs;
647 else
648 return RS_START;
649 }
650
651 /* Return true if STATE is the start state. */
652
653 static bool
654 start_p (state_machine::state_t state)
655 {
656 return get_rs (state) == RS_START;
657 }
658
659 /* Return true if STATE is an unchecked result from an allocator. */
660
661 static bool
662 unchecked_p (state_machine::state_t state)
663 {
664 return get_rs (state) == RS_UNCHECKED;
665 }
666
667 /* Return true if STATE is a non-null result from an allocator. */
668
669 static bool
670 nonnull_p (state_machine::state_t state)
671 {
672 return get_rs (state) == RS_NONNULL;
673 }
674
675 /* Return true if STATE is a value that has been passed to a deallocator. */
676
677 static bool
678 freed_p (state_machine::state_t state)
679 {
680 return get_rs (state) == RS_FREED;
681 }
682
683 /* Class for diagnostics relating to malloc_state_machine. */
684
685 class malloc_diagnostic : public pending_diagnostic
686 {
687 public:
688 malloc_diagnostic (const malloc_state_machine &sm, tree arg)
689 : m_sm (sm), m_arg (arg)
690 {}
691
692 bool subclass_equal_p (const pending_diagnostic &base_other) const OVERRIDE
693 {
694 return same_tree_p (m_arg, ((const malloc_diagnostic &)base_other).m_arg);
695 }
696
697 label_text describe_state_change (const evdesc::state_change &change)
698 OVERRIDE
699 {
700 if (change.m_old_state == m_sm.get_start_state ()
701 && unchecked_p (change.m_new_state))
702 // TODO: verify that it's the allocation stmt, not a copy
703 return label_text::borrow ("allocated here");
704 if (unchecked_p (change.m_old_state)
705 && nonnull_p (change.m_new_state))
706 {
707 if (change.m_expr)
708 return change.formatted_print ("assuming %qE is non-NULL",
709 change.m_expr);
710 else
711 return change.formatted_print ("assuming %qs is non-NULL",
712 "<unknown>");
713 }
714 if (change.m_new_state == m_sm.m_null)
715 {
716 if (unchecked_p (change.m_old_state))
717 {
718 if (change.m_expr)
719 return change.formatted_print ("assuming %qE is NULL",
720 change.m_expr);
721 else
722 return change.formatted_print ("assuming %qs is NULL",
723 "<unknown>");
724 }
725 else
726 {
727 if (change.m_expr)
728 return change.formatted_print ("%qE is NULL",
729 change.m_expr);
730 else
731 return change.formatted_print ("%qs is NULL",
732 "<unknown>");
733 }
734 }
735
736 return label_text ();
737 }
738
739 protected:
740 const malloc_state_machine &m_sm;
741 tree m_arg;
742 };
743
744 /* Concrete subclass for reporting mismatching allocator/deallocator
745 diagnostics. */
746
747 class mismatching_deallocation : public malloc_diagnostic
748 {
749 public:
750 mismatching_deallocation (const malloc_state_machine &sm, tree arg,
751 const deallocator_set *expected_deallocators,
752 const deallocator *actual_dealloc)
753 : malloc_diagnostic (sm, arg),
754 m_expected_deallocators (expected_deallocators),
755 m_actual_dealloc (actual_dealloc)
756 {}
757
758 const char *get_kind () const FINAL OVERRIDE
759 {
760 return "mismatching_deallocation";
761 }
762
763 int get_controlling_option () const FINAL OVERRIDE
764 {
765 return OPT_Wanalyzer_mismatching_deallocation;
766 }
767
768 bool emit (rich_location *rich_loc) FINAL OVERRIDE
769 {
770 auto_diagnostic_group d;
771 diagnostic_metadata m;
772 m.add_cwe (762); /* CWE-762: Mismatched Memory Management Routines. */
773 if (const deallocator *expected_dealloc
774 = m_expected_deallocators->maybe_get_single ())
775 return warning_meta (rich_loc, m, get_controlling_option (),
776 "%qE should have been deallocated with %qs"
777 " but was deallocated with %qs",
778 m_arg, expected_dealloc->m_name,
779 m_actual_dealloc->m_name);
780 else
781 return warning_meta (rich_loc, m, get_controlling_option (),
782 "%qs called on %qE returned from a mismatched"
783 " allocation function",
784 m_actual_dealloc->m_name, m_arg);
785 }
786
787 label_text describe_state_change (const evdesc::state_change &change)
788 FINAL OVERRIDE
789 {
790 if (unchecked_p (change.m_new_state))
791 {
792 m_alloc_event = change.m_event_id;
793 if (const deallocator *expected_dealloc
794 = m_expected_deallocators->maybe_get_single ())
795 return change.formatted_print ("allocated here"
796 " (expects deallocation with %qs)",
797 expected_dealloc->m_name);
798 else
799 return change.formatted_print ("allocated here");
800 }
801 return malloc_diagnostic::describe_state_change (change);
802 }
803
804 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
805 {
806 if (m_alloc_event.known_p ())
807 {
808 if (const deallocator *expected_dealloc
809 = m_expected_deallocators->maybe_get_single ())
810 return ev.formatted_print
811 ("deallocated with %qs here;"
812 " allocation at %@ expects deallocation with %qs",
813 m_actual_dealloc->m_name, &m_alloc_event,
814 expected_dealloc->m_name);
815 else
816 return ev.formatted_print
817 ("deallocated with %qs here;"
818 " allocated at %@",
819 m_actual_dealloc->m_name, &m_alloc_event);
820 }
821 return ev.formatted_print ("deallocated with %qs here",
822 m_actual_dealloc->m_name);
823 }
824
825 private:
826 diagnostic_event_id_t m_alloc_event;
827 const deallocator_set *m_expected_deallocators;
828 const deallocator *m_actual_dealloc;
829 };
830
831 /* Concrete subclass for reporting double-free diagnostics. */
832
833 class double_free : public malloc_diagnostic
834 {
835 public:
836 double_free (const malloc_state_machine &sm, tree arg, const char *funcname)
837 : malloc_diagnostic (sm, arg), m_funcname (funcname)
838 {}
839
840 const char *get_kind () const FINAL OVERRIDE { return "double_free"; }
841
842 int get_controlling_option () const FINAL OVERRIDE
843 {
844 return OPT_Wanalyzer_double_free;
845 }
846
847 bool emit (rich_location *rich_loc) FINAL OVERRIDE
848 {
849 auto_diagnostic_group d;
850 diagnostic_metadata m;
851 m.add_cwe (415); /* CWE-415: Double Free. */
852 return warning_meta (rich_loc, m, get_controlling_option (),
853 "double-%qs of %qE", m_funcname, m_arg);
854 }
855
856 label_text describe_state_change (const evdesc::state_change &change)
857 FINAL OVERRIDE
858 {
859 if (freed_p (change.m_new_state))
860 {
861 m_first_free_event = change.m_event_id;
862 return change.formatted_print ("first %qs here", m_funcname);
863 }
864 return malloc_diagnostic::describe_state_change (change);
865 }
866
867 label_text describe_call_with_state (const evdesc::call_with_state &info)
868 FINAL OVERRIDE
869 {
870 if (freed_p (info.m_state))
871 return info.formatted_print
872 ("passing freed pointer %qE in call to %qE from %qE",
873 info.m_expr, info.m_callee_fndecl, info.m_caller_fndecl);
874 return label_text ();
875 }
876
877 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
878 {
879 if (m_first_free_event.known_p ())
880 return ev.formatted_print ("second %qs here; first %qs was at %@",
881 m_funcname, m_funcname,
882 &m_first_free_event);
883 return ev.formatted_print ("second %qs here", m_funcname);
884 }
885
886 private:
887 diagnostic_event_id_t m_first_free_event;
888 const char *m_funcname;
889 };
890
891 /* Abstract subclass for describing possible bad uses of NULL.
892 Responsible for describing the call that could return NULL. */
893
894 class possible_null : public malloc_diagnostic
895 {
896 public:
897 possible_null (const malloc_state_machine &sm, tree arg)
898 : malloc_diagnostic (sm, arg)
899 {}
900
901 label_text describe_state_change (const evdesc::state_change &change)
902 FINAL OVERRIDE
903 {
904 if (change.m_old_state == m_sm.get_start_state ()
905 && unchecked_p (change.m_new_state))
906 {
907 m_origin_of_unchecked_event = change.m_event_id;
908 return label_text::borrow ("this call could return NULL");
909 }
910 return malloc_diagnostic::describe_state_change (change);
911 }
912
913 label_text describe_return_of_state (const evdesc::return_of_state &info)
914 FINAL OVERRIDE
915 {
916 if (unchecked_p (info.m_state))
917 return info.formatted_print ("possible return of NULL to %qE from %qE",
918 info.m_caller_fndecl, info.m_callee_fndecl);
919 return label_text ();
920 }
921
922 protected:
923 diagnostic_event_id_t m_origin_of_unchecked_event;
924 };
925
926 /* Concrete subclass for describing dereference of a possible NULL
927 value. */
928
929 class possible_null_deref : public possible_null
930 {
931 public:
932 possible_null_deref (const malloc_state_machine &sm, tree arg)
933 : possible_null (sm, arg)
934 {}
935
936 const char *get_kind () const FINAL OVERRIDE { return "possible_null_deref"; }
937
938 int get_controlling_option () const FINAL OVERRIDE
939 {
940 return OPT_Wanalyzer_possible_null_dereference;
941 }
942
943 bool emit (rich_location *rich_loc) FINAL OVERRIDE
944 {
945 /* CWE-690: Unchecked Return Value to NULL Pointer Dereference. */
946 diagnostic_metadata m;
947 m.add_cwe (690);
948 return warning_meta (rich_loc, m, get_controlling_option (),
949 "dereference of possibly-NULL %qE", m_arg);
950 }
951
952 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
953 {
954 if (m_origin_of_unchecked_event.known_p ())
955 return ev.formatted_print ("%qE could be NULL: unchecked value from %@",
956 ev.m_expr,
957 &m_origin_of_unchecked_event);
958 else
959 return ev.formatted_print ("%qE could be NULL", ev.m_expr);
960 }
961
962 };
963
964 /* Return true if FNDECL is a C++ method. */
965
966 static bool
967 method_p (tree fndecl)
968 {
969 return TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE;
970 }
971
972 /* Return a 1-based description of ARG_IDX (0-based) of FNDECL.
973 Compare with %P in the C++ FE (implemented in cp/error.cc: parm_to_string
974 as called from cp_printer). */
975
976 static label_text
977 describe_argument_index (tree fndecl, int arg_idx)
978 {
979 if (method_p (fndecl))
980 if (arg_idx == 0)
981 return label_text::borrow ("'this'");
982 pretty_printer pp;
983 pp_printf (&pp, "%u", arg_idx + 1 - method_p (fndecl));
984 return label_text::take (xstrdup (pp_formatted_text (&pp)));
985 }
986
987 /* Subroutine for use by possible_null_arg::emit and null_arg::emit.
988 Issue a note informing that the pertinent argument must be non-NULL. */
989
990 static void
991 inform_nonnull_attribute (tree fndecl, int arg_idx)
992 {
993 label_text arg_desc = describe_argument_index (fndecl, arg_idx);
994 inform (DECL_SOURCE_LOCATION (fndecl),
995 "argument %s of %qD must be non-null",
996 arg_desc.m_buffer, fndecl);
997 arg_desc.maybe_free ();
998 /* Ideally we would use the location of the parm and underline the
999 attribute also - but we don't have the location_t values at this point
1000 in the middle-end.
1001 For reference, the C and C++ FEs have get_fndecl_argument_location. */
1002 }
1003
1004 /* Concrete subclass for describing passing a possibly-NULL value to a
1005 function marked with __attribute__((nonnull)). */
1006
1007 class possible_null_arg : public possible_null
1008 {
1009 public:
1010 possible_null_arg (const malloc_state_machine &sm, tree arg,
1011 tree fndecl, int arg_idx)
1012 : possible_null (sm, arg),
1013 m_fndecl (fndecl), m_arg_idx (arg_idx)
1014 {}
1015
1016 const char *get_kind () const FINAL OVERRIDE { return "possible_null_arg"; }
1017
1018 bool subclass_equal_p (const pending_diagnostic &base_other) const
1019 {
1020 const possible_null_arg &sub_other
1021 = (const possible_null_arg &)base_other;
1022 return (same_tree_p (m_arg, sub_other.m_arg)
1023 && m_fndecl == sub_other.m_fndecl
1024 && m_arg_idx == sub_other.m_arg_idx);
1025 }
1026
1027 int get_controlling_option () const FINAL OVERRIDE
1028 {
1029 return OPT_Wanalyzer_possible_null_argument;
1030 }
1031
1032 bool emit (rich_location *rich_loc) FINAL OVERRIDE
1033 {
1034 /* CWE-690: Unchecked Return Value to NULL Pointer Dereference. */
1035 auto_diagnostic_group d;
1036 diagnostic_metadata m;
1037 m.add_cwe (690);
1038 bool warned
1039 = warning_meta (rich_loc, m, get_controlling_option (),
1040 "use of possibly-NULL %qE where non-null expected",
1041 m_arg);
1042 if (warned)
1043 inform_nonnull_attribute (m_fndecl, m_arg_idx);
1044 return warned;
1045 }
1046
1047 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
1048 {
1049 label_text arg_desc = describe_argument_index (m_fndecl, m_arg_idx);
1050 label_text result;
1051 if (m_origin_of_unchecked_event.known_p ())
1052 result = ev.formatted_print ("argument %s (%qE) from %@ could be NULL"
1053 " where non-null expected",
1054 arg_desc.m_buffer, ev.m_expr,
1055 &m_origin_of_unchecked_event);
1056 else
1057 result = ev.formatted_print ("argument %s (%qE) could be NULL"
1058 " where non-null expected",
1059 arg_desc.m_buffer, ev.m_expr);
1060 arg_desc.maybe_free ();
1061 return result;
1062 }
1063
1064 private:
1065 tree m_fndecl;
1066 int m_arg_idx;
1067 };
1068
1069 /* Concrete subclass for describing a dereference of a NULL value. */
1070
1071 class null_deref : public malloc_diagnostic
1072 {
1073 public:
1074 null_deref (const malloc_state_machine &sm, tree arg)
1075 : malloc_diagnostic (sm, arg) {}
1076
1077 const char *get_kind () const FINAL OVERRIDE { return "null_deref"; }
1078
1079 int get_controlling_option () const FINAL OVERRIDE
1080 {
1081 return OPT_Wanalyzer_null_dereference;
1082 }
1083
1084 bool emit (rich_location *rich_loc) FINAL OVERRIDE
1085 {
1086 /* CWE-476: NULL Pointer Dereference. */
1087 diagnostic_metadata m;
1088 m.add_cwe (476);
1089 return warning_meta (rich_loc, m, get_controlling_option (),
1090 "dereference of NULL %qE", m_arg);
1091 }
1092
1093 label_text describe_return_of_state (const evdesc::return_of_state &info)
1094 FINAL OVERRIDE
1095 {
1096 if (info.m_state == m_sm.m_null)
1097 return info.formatted_print ("return of NULL to %qE from %qE",
1098 info.m_caller_fndecl, info.m_callee_fndecl);
1099 return label_text ();
1100 }
1101
1102 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
1103 {
1104 return ev.formatted_print ("dereference of NULL %qE", ev.m_expr);
1105 }
1106 };
1107
1108 /* Concrete subclass for describing passing a NULL value to a
1109 function marked with __attribute__((nonnull)). */
1110
1111 class null_arg : public malloc_diagnostic
1112 {
1113 public:
1114 null_arg (const malloc_state_machine &sm, tree arg,
1115 tree fndecl, int arg_idx)
1116 : malloc_diagnostic (sm, arg),
1117 m_fndecl (fndecl), m_arg_idx (arg_idx)
1118 {}
1119
1120 const char *get_kind () const FINAL OVERRIDE { return "null_arg"; }
1121
1122 bool subclass_equal_p (const pending_diagnostic &base_other) const
1123 {
1124 const null_arg &sub_other
1125 = (const null_arg &)base_other;
1126 return (same_tree_p (m_arg, sub_other.m_arg)
1127 && m_fndecl == sub_other.m_fndecl
1128 && m_arg_idx == sub_other.m_arg_idx);
1129 }
1130
1131 int get_controlling_option () const FINAL OVERRIDE
1132 {
1133 return OPT_Wanalyzer_null_argument;
1134 }
1135
1136 bool emit (rich_location *rich_loc) FINAL OVERRIDE
1137 {
1138 /* CWE-476: NULL Pointer Dereference. */
1139 auto_diagnostic_group d;
1140 diagnostic_metadata m;
1141 m.add_cwe (476);
1142
1143 bool warned;
1144 if (zerop (m_arg))
1145 warned = warning_meta (rich_loc, m, get_controlling_option (),
1146 "use of NULL where non-null expected");
1147 else
1148 warned = warning_meta (rich_loc, m, get_controlling_option (),
1149 "use of NULL %qE where non-null expected",
1150 m_arg);
1151 if (warned)
1152 inform_nonnull_attribute (m_fndecl, m_arg_idx);
1153 return warned;
1154 }
1155
1156 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
1157 {
1158 label_text arg_desc = describe_argument_index (m_fndecl, m_arg_idx);
1159 label_text result;
1160 if (zerop (ev.m_expr))
1161 result = ev.formatted_print ("argument %s NULL where non-null expected",
1162 arg_desc.m_buffer);
1163 else
1164 result = ev.formatted_print ("argument %s (%qE) NULL"
1165 " where non-null expected",
1166 arg_desc.m_buffer, ev.m_expr);
1167 arg_desc.maybe_free ();
1168 return result;
1169 }
1170
1171 private:
1172 tree m_fndecl;
1173 int m_arg_idx;
1174 };
1175
1176 class use_after_free : public malloc_diagnostic
1177 {
1178 public:
1179 use_after_free (const malloc_state_machine &sm, tree arg,
1180 const deallocator *deallocator)
1181 : malloc_diagnostic (sm, arg),
1182 m_deallocator (deallocator)
1183 {
1184 gcc_assert (deallocator);
1185 }
1186
1187 const char *get_kind () const FINAL OVERRIDE { return "use_after_free"; }
1188
1189 int get_controlling_option () const FINAL OVERRIDE
1190 {
1191 return OPT_Wanalyzer_use_after_free;
1192 }
1193
1194 bool emit (rich_location *rich_loc) FINAL OVERRIDE
1195 {
1196 /* CWE-416: Use After Free. */
1197 diagnostic_metadata m;
1198 m.add_cwe (416);
1199 return warning_meta (rich_loc, m, get_controlling_option (),
1200 "use after %<%s%> of %qE",
1201 m_deallocator->m_name, m_arg);
1202 }
1203
1204 label_text describe_state_change (const evdesc::state_change &change)
1205 FINAL OVERRIDE
1206 {
1207 if (freed_p (change.m_new_state))
1208 {
1209 m_free_event = change.m_event_id;
1210 switch (m_deallocator->m_wording)
1211 {
1212 default:
1213 case WORDING_REALLOCATED:
1214 gcc_unreachable ();
1215 case WORDING_FREED:
1216 return label_text::borrow ("freed here");
1217 case WORDING_DELETED:
1218 return label_text::borrow ("deleted here");
1219 case WORDING_DEALLOCATED:
1220 return label_text::borrow ("deallocated here");
1221 }
1222 }
1223 return malloc_diagnostic::describe_state_change (change);
1224 }
1225
1226 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
1227 {
1228 const char *funcname = m_deallocator->m_name;
1229 if (m_free_event.known_p ())
1230 switch (m_deallocator->m_wording)
1231 {
1232 default:
1233 case WORDING_REALLOCATED:
1234 gcc_unreachable ();
1235 case WORDING_FREED:
1236 return ev.formatted_print ("use after %<%s%> of %qE; freed at %@",
1237 funcname, ev.m_expr, &m_free_event);
1238 case WORDING_DELETED:
1239 return ev.formatted_print ("use after %<%s%> of %qE; deleted at %@",
1240 funcname, ev.m_expr, &m_free_event);
1241 case WORDING_DEALLOCATED:
1242 return ev.formatted_print ("use after %<%s%> of %qE;"
1243 " deallocated at %@",
1244 funcname, ev.m_expr, &m_free_event);
1245 }
1246 else
1247 return ev.formatted_print ("use after %<%s%> of %qE",
1248 funcname, ev.m_expr);
1249 }
1250
1251 /* Implementation of pending_diagnostic::supercedes_p for
1252 use_after_free.
1253
1254 We want use-after-free to supercede use-of-unitialized-value,
1255 so that if we have these at the same stmt, we don't emit
1256 a use-of-uninitialized, just the use-after-free.
1257 (this is because we fully purge information about freed
1258 buffers when we free them to avoid state explosions, so
1259 that if they are accessed after the free, it looks like
1260 they are uninitialized). */
1261
1262 bool supercedes_p (const pending_diagnostic &other) const FINAL OVERRIDE
1263 {
1264 if (other.use_of_uninit_p ())
1265 return true;
1266
1267 return false;
1268 }
1269
1270 private:
1271 diagnostic_event_id_t m_free_event;
1272 const deallocator *m_deallocator;
1273 };
1274
1275 class malloc_leak : public malloc_diagnostic
1276 {
1277 public:
1278 malloc_leak (const malloc_state_machine &sm, tree arg)
1279 : malloc_diagnostic (sm, arg) {}
1280
1281 const char *get_kind () const FINAL OVERRIDE { return "malloc_leak"; }
1282
1283 int get_controlling_option () const FINAL OVERRIDE
1284 {
1285 return OPT_Wanalyzer_malloc_leak;
1286 }
1287
1288 bool emit (rich_location *rich_loc) FINAL OVERRIDE
1289 {
1290 diagnostic_metadata m;
1291 m.add_cwe (401);
1292 if (m_arg)
1293 return warning_meta (rich_loc, m, get_controlling_option (),
1294 "leak of %qE", m_arg);
1295 else
1296 return warning_meta (rich_loc, m, get_controlling_option (),
1297 "leak of %qs", "<unknown>");
1298 }
1299
1300 label_text describe_state_change (const evdesc::state_change &change)
1301 FINAL OVERRIDE
1302 {
1303 if (unchecked_p (change.m_new_state)
1304 || (start_p (change.m_old_state) && nonnull_p (change.m_new_state)))
1305 {
1306 m_alloc_event = change.m_event_id;
1307 return label_text::borrow ("allocated here");
1308 }
1309 return malloc_diagnostic::describe_state_change (change);
1310 }
1311
1312 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
1313 {
1314 if (ev.m_expr)
1315 {
1316 if (m_alloc_event.known_p ())
1317 return ev.formatted_print ("%qE leaks here; was allocated at %@",
1318 ev.m_expr, &m_alloc_event);
1319 else
1320 return ev.formatted_print ("%qE leaks here", ev.m_expr);
1321 }
1322 else
1323 {
1324 if (m_alloc_event.known_p ())
1325 return ev.formatted_print ("%qs leaks here; was allocated at %@",
1326 "<unknown>", &m_alloc_event);
1327 else
1328 return ev.formatted_print ("%qs leaks here", "<unknown>");
1329 }
1330 }
1331
1332 private:
1333 diagnostic_event_id_t m_alloc_event;
1334 };
1335
1336 class free_of_non_heap : public malloc_diagnostic
1337 {
1338 public:
1339 free_of_non_heap (const malloc_state_machine &sm, tree arg,
1340 const region *freed_reg,
1341 const char *funcname)
1342 : malloc_diagnostic (sm, arg), m_freed_reg (freed_reg), m_funcname (funcname)
1343 {
1344 }
1345
1346 const char *get_kind () const FINAL OVERRIDE { return "free_of_non_heap"; }
1347
1348 bool subclass_equal_p (const pending_diagnostic &base_other) const
1349 FINAL OVERRIDE
1350 {
1351 const free_of_non_heap &other = (const free_of_non_heap &)base_other;
1352 return (same_tree_p (m_arg, other.m_arg)
1353 && m_freed_reg == other.m_freed_reg);
1354 }
1355
1356 int get_controlling_option () const FINAL OVERRIDE
1357 {
1358 return OPT_Wanalyzer_free_of_non_heap;
1359 }
1360
1361 bool emit (rich_location *rich_loc) FINAL OVERRIDE
1362 {
1363 auto_diagnostic_group d;
1364 diagnostic_metadata m;
1365 m.add_cwe (590); /* CWE-590: Free of Memory not on the Heap. */
1366 switch (get_memory_space ())
1367 {
1368 default:
1369 case MEMSPACE_HEAP:
1370 gcc_unreachable ();
1371 case MEMSPACE_UNKNOWN:
1372 case MEMSPACE_CODE:
1373 case MEMSPACE_GLOBALS:
1374 case MEMSPACE_READONLY_DATA:
1375 return warning_meta (rich_loc, m, get_controlling_option (),
1376 "%<%s%> of %qE which points to memory"
1377 " not on the heap",
1378 m_funcname, m_arg);
1379 break;
1380 case MEMSPACE_STACK:
1381 return warning_meta (rich_loc, m, get_controlling_option (),
1382 "%<%s%> of %qE which points to memory"
1383 " on the stack",
1384 m_funcname, m_arg);
1385 break;
1386 }
1387 }
1388
1389 label_text describe_state_change (const evdesc::state_change &)
1390 FINAL OVERRIDE
1391 {
1392 return label_text::borrow ("pointer is from here");
1393 }
1394
1395 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
1396 {
1397 return ev.formatted_print ("call to %qs here", m_funcname);
1398 }
1399
1400 void mark_interesting_stuff (interesting_t *interest) FINAL OVERRIDE
1401 {
1402 if (m_freed_reg)
1403 interest->add_region_creation (m_freed_reg);
1404 }
1405
1406 private:
1407 enum memory_space get_memory_space () const
1408 {
1409 if (m_freed_reg)
1410 return m_freed_reg->get_memory_space ();
1411 else
1412 return MEMSPACE_UNKNOWN;
1413 }
1414
1415 const region *m_freed_reg;
1416 const char *m_funcname;
1417 };
1418
1419 /* struct allocation_state : public state_machine::state. */
1420
1421 /* Implementation of state_machine::state::dump_to_pp vfunc
1422 for allocation_state: append the API that this allocation is
1423 associated with. */
1424
1425 void
1426 allocation_state::dump_to_pp (pretty_printer *pp) const
1427 {
1428 state_machine::state::dump_to_pp (pp);
1429 if (m_deallocators)
1430 {
1431 pp_string (pp, " (");
1432 m_deallocators->dump_to_pp (pp);
1433 pp_character (pp, ')');
1434 }
1435 }
1436
1437 /* Given a allocation_state for a deallocator_set, get the "nonnull" state
1438 for the corresponding allocator(s). */
1439
1440 const allocation_state *
1441 allocation_state::get_nonnull () const
1442 {
1443 gcc_assert (m_deallocators);
1444 return as_a_allocation_state (m_deallocators->m_nonnull);
1445 }
1446
1447 /* malloc_state_machine's ctor. */
1448
1449 malloc_state_machine::malloc_state_machine (logger *logger)
1450 : state_machine ("malloc", logger),
1451 m_free (this, "free", WORDING_FREED),
1452 m_scalar_delete (this, "delete", WORDING_DELETED),
1453 m_vector_delete (this, "delete[]", WORDING_DELETED),
1454 m_realloc (this, "realloc", WORDING_REALLOCATED)
1455 {
1456 gcc_assert (m_start->get_id () == 0);
1457 m_null = add_state ("null", RS_FREED, NULL, NULL);
1458 m_non_heap = add_state ("non-heap", RS_NON_HEAP, NULL, NULL);
1459 m_stop = add_state ("stop", RS_STOP, NULL, NULL);
1460 }
1461
1462 malloc_state_machine::~malloc_state_machine ()
1463 {
1464 unsigned i;
1465 custom_deallocator_set *set;
1466 FOR_EACH_VEC_ELT (m_dynamic_sets, i, set)
1467 delete set;
1468 custom_deallocator *d;
1469 FOR_EACH_VEC_ELT (m_dynamic_deallocators, i, d)
1470 delete d;
1471 }
1472
1473 state_machine::state_t
1474 malloc_state_machine::add_state (const char *name, enum resource_state rs,
1475 const deallocator_set *deallocators,
1476 const deallocator *deallocator)
1477 {
1478 return add_custom_state (new allocation_state (name, alloc_state_id (),
1479 rs, deallocators,
1480 deallocator));
1481 }
1482
1483 /* If ALLOCATOR_FNDECL has any "__attribute__((malloc(FOO)))",
1484 return a custom_deallocator_set for them, consolidating them
1485 to ensure uniqueness of the sets.
1486
1487 Return NULL if it has no such attributes. */
1488
1489 const custom_deallocator_set *
1490 malloc_state_machine::
1491 get_or_create_custom_deallocator_set (tree allocator_fndecl)
1492 {
1493 /* Early rejection of decls without attributes. */
1494 tree attrs = DECL_ATTRIBUTES (allocator_fndecl);
1495 if (!attrs)
1496 return NULL;
1497
1498 /* Otherwise, call maybe_create_custom_deallocator_set,
1499 memoizing the result. */
1500 if (custom_deallocator_set **slot
1501 = m_custom_deallocator_set_cache.get (allocator_fndecl))
1502 return *slot;
1503 custom_deallocator_set *set
1504 = maybe_create_custom_deallocator_set (allocator_fndecl);
1505 m_custom_deallocator_set_cache.put (allocator_fndecl, set);
1506 return set;
1507 }
1508
1509 /* Given ALLOCATOR_FNDECL, a FUNCTION_DECL with attributes,
1510 look for any "__attribute__((malloc(FOO)))" and return a
1511 custom_deallocator_set for them, consolidating them
1512 to ensure uniqueness of the sets.
1513
1514 Return NULL if it has no such attributes.
1515
1516 Subroutine of get_or_create_custom_deallocator_set which
1517 memoizes the result. */
1518
1519 custom_deallocator_set *
1520 malloc_state_machine::
1521 maybe_create_custom_deallocator_set (tree allocator_fndecl)
1522 {
1523 tree attrs = DECL_ATTRIBUTES (allocator_fndecl);
1524 gcc_assert (attrs);
1525
1526 /* Look for instances of __attribute__((malloc(FOO))). */
1527 auto_vec<const deallocator *> deallocator_vec;
1528 for (tree allocs = attrs;
1529 (allocs = lookup_attribute ("malloc", allocs));
1530 allocs = TREE_CHAIN (allocs))
1531 {
1532 tree args = TREE_VALUE (allocs);
1533 if (!args)
1534 continue;
1535 if (TREE_VALUE (args))
1536 {
1537 const deallocator *d
1538 = get_or_create_deallocator (TREE_VALUE (args));
1539 deallocator_vec.safe_push (d);
1540 }
1541 }
1542
1543 /* If there weren't any deallocators, bail. */
1544 if (deallocator_vec.length () == 0)
1545 return NULL;
1546
1547 /* Consolidate, so that we reuse existing deallocator_set
1548 instances. */
1549 deallocator_vec.qsort (deallocator::cmp_ptr_ptr);
1550 custom_deallocator_set **slot
1551 = m_custom_deallocator_set_map.get (&deallocator_vec);
1552 if (slot)
1553 return *slot;
1554 custom_deallocator_set *set
1555 = new custom_deallocator_set (this, &deallocator_vec, WORDING_DEALLOCATED);
1556 m_custom_deallocator_set_map.put (&set->m_deallocator_vec, set);
1557 m_dynamic_sets.safe_push (set);
1558 return set;
1559 }
1560
1561 /* Get the deallocator for DEALLOCATOR_FNDECL, creating it if necessary. */
1562
1563 const deallocator *
1564 malloc_state_machine::get_or_create_deallocator (tree deallocator_fndecl)
1565 {
1566 deallocator **slot = m_deallocator_map.get (deallocator_fndecl);
1567 if (slot)
1568 return *slot;
1569
1570 /* Reuse "free". */
1571 deallocator *d;
1572 if (is_named_call_p (deallocator_fndecl, "free")
1573 || is_std_named_call_p (deallocator_fndecl, "free")
1574 || is_named_call_p (deallocator_fndecl, "__builtin_free"))
1575 d = &m_free.m_deallocator;
1576 else
1577 {
1578 custom_deallocator *cd
1579 = new custom_deallocator (this, deallocator_fndecl,
1580 WORDING_DEALLOCATED);
1581 m_dynamic_deallocators.safe_push (cd);
1582 d = cd;
1583 }
1584 m_deallocator_map.put (deallocator_fndecl, d);
1585 return d;
1586 }
1587
1588 /* Try to identify the function declaration either by name or as a known malloc
1589 builtin. */
1590
1591 static bool
1592 known_allocator_p (const_tree fndecl, const gcall *call)
1593 {
1594 /* Either it is a function we know by name and number of arguments... */
1595 if (is_named_call_p (fndecl, "malloc", call, 1)
1596 || is_named_call_p (fndecl, "calloc", call, 2)
1597 || is_std_named_call_p (fndecl, "malloc", call, 1)
1598 || is_std_named_call_p (fndecl, "calloc", call, 2)
1599 || is_named_call_p (fndecl, "strdup", call, 1)
1600 || is_named_call_p (fndecl, "strndup", call, 2))
1601 return true;
1602
1603 /* ... or it is a builtin allocator that allocates objects freed with
1604 __builtin_free. */
1605 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1606 switch (DECL_FUNCTION_CODE (fndecl))
1607 {
1608 case BUILT_IN_MALLOC:
1609 case BUILT_IN_CALLOC:
1610 case BUILT_IN_STRDUP:
1611 case BUILT_IN_STRNDUP:
1612 return true;
1613 default:
1614 break;
1615 }
1616
1617 return false;
1618 }
1619
1620 /* Implementation of state_machine::on_stmt vfunc for malloc_state_machine. */
1621
1622 bool
1623 malloc_state_machine::on_stmt (sm_context *sm_ctxt,
1624 const supernode *node,
1625 const gimple *stmt) const
1626 {
1627 if (const gcall *call = dyn_cast <const gcall *> (stmt))
1628 if (tree callee_fndecl = sm_ctxt->get_fndecl_for_call (call))
1629 {
1630 if (known_allocator_p (callee_fndecl, call))
1631 {
1632 on_allocator_call (sm_ctxt, call, &m_free);
1633 return true;
1634 }
1635
1636 if (is_named_call_p (callee_fndecl, "operator new", call, 1))
1637 on_allocator_call (sm_ctxt, call, &m_scalar_delete);
1638 else if (is_named_call_p (callee_fndecl, "operator new []", call, 1))
1639 on_allocator_call (sm_ctxt, call, &m_vector_delete);
1640 else if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
1641 || is_named_call_p (callee_fndecl, "operator delete", call, 2))
1642 {
1643 on_deallocator_call (sm_ctxt, node, call,
1644 &m_scalar_delete.m_deallocator, 0);
1645 return true;
1646 }
1647 else if (is_named_call_p (callee_fndecl, "operator delete []", call, 1))
1648 {
1649 on_deallocator_call (sm_ctxt, node, call,
1650 &m_vector_delete.m_deallocator, 0);
1651 return true;
1652 }
1653
1654 if (is_named_call_p (callee_fndecl, "alloca", call, 1)
1655 || is_named_call_p (callee_fndecl, "__builtin_alloca", call, 1))
1656 {
1657 tree lhs = gimple_call_lhs (call);
1658 if (lhs)
1659 sm_ctxt->on_transition (node, stmt, lhs, m_start, m_non_heap);
1660 return true;
1661 }
1662
1663 if (is_named_call_p (callee_fndecl, "free", call, 1)
1664 || is_std_named_call_p (callee_fndecl, "free", call, 1)
1665 || is_named_call_p (callee_fndecl, "__builtin_free", call, 1))
1666 {
1667 on_deallocator_call (sm_ctxt, node, call,
1668 &m_free.m_deallocator, 0);
1669 return true;
1670 }
1671
1672 if (is_named_call_p (callee_fndecl, "realloc", call, 2)
1673 || is_named_call_p (callee_fndecl, "__builtin_realloc", call, 2))
1674 {
1675 on_realloc_call (sm_ctxt, node, call);
1676 return true;
1677 }
1678
1679 if (unaffected_by_call_p (callee_fndecl))
1680 return true;
1681
1682 /* Cast away const-ness for cache-like operations. */
1683 malloc_state_machine *mutable_this
1684 = const_cast <malloc_state_machine *> (this);
1685
1686 /* Handle "__attribute__((malloc(FOO)))". */
1687 if (const deallocator_set *deallocators
1688 = mutable_this->get_or_create_custom_deallocator_set
1689 (callee_fndecl))
1690 {
1691 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee_fndecl));
1692 bool returns_nonnull
1693 = lookup_attribute ("returns_nonnull", attrs);
1694 on_allocator_call (sm_ctxt, call, deallocators, returns_nonnull);
1695 }
1696
1697 /* Handle "__attribute__((nonnull))". */
1698 {
1699 tree fntype = TREE_TYPE (callee_fndecl);
1700 bitmap nonnull_args = get_nonnull_args (fntype);
1701 if (nonnull_args)
1702 {
1703 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
1704 {
1705 tree arg = gimple_call_arg (stmt, i);
1706 if (TREE_CODE (TREE_TYPE (arg)) != POINTER_TYPE)
1707 continue;
1708 /* If we have a nonnull-args, and either all pointers, or just
1709 the specified pointers. */
1710 if (bitmap_empty_p (nonnull_args)
1711 || bitmap_bit_p (nonnull_args, i))
1712 {
1713 state_t state = sm_ctxt->get_state (stmt, arg);
1714 /* Can't use a switch as the states are non-const. */
1715 if (unchecked_p (state))
1716 {
1717 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1718 sm_ctxt->warn (node, stmt, arg,
1719 new possible_null_arg (*this, diag_arg,
1720 callee_fndecl,
1721 i));
1722 const allocation_state *astate
1723 = as_a_allocation_state (state);
1724 sm_ctxt->set_next_state (stmt, arg,
1725 astate->get_nonnull ());
1726 }
1727 else if (state == m_null)
1728 {
1729 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1730 sm_ctxt->warn (node, stmt, arg,
1731 new null_arg (*this, diag_arg,
1732 callee_fndecl, i));
1733 sm_ctxt->set_next_state (stmt, arg, m_stop);
1734 }
1735 }
1736 }
1737 BITMAP_FREE (nonnull_args);
1738 }
1739 }
1740
1741 /* Check for this after nonnull, so that if we have both
1742 then we transition to "freed", rather than "checked". */
1743 unsigned dealloc_argno = fndecl_dealloc_argno (callee_fndecl);
1744 if (dealloc_argno != UINT_MAX)
1745 {
1746 const deallocator *d
1747 = mutable_this->get_or_create_deallocator (callee_fndecl);
1748 on_deallocator_call (sm_ctxt, node, call, d, dealloc_argno);
1749 }
1750 }
1751
1752 if (tree lhs = sm_ctxt->is_zero_assignment (stmt))
1753 if (any_pointer_p (lhs))
1754 on_zero_assignment (sm_ctxt, stmt,lhs);
1755
1756 /* Handle dereferences. */
1757 for (unsigned i = 0; i < gimple_num_ops (stmt); i++)
1758 {
1759 tree op = gimple_op (stmt, i);
1760 if (!op)
1761 continue;
1762 if (TREE_CODE (op) == COMPONENT_REF)
1763 op = TREE_OPERAND (op, 0);
1764
1765 if (TREE_CODE (op) == MEM_REF)
1766 {
1767 tree arg = TREE_OPERAND (op, 0);
1768
1769 state_t state = sm_ctxt->get_state (stmt, arg);
1770 if (unchecked_p (state))
1771 {
1772 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1773 sm_ctxt->warn (node, stmt, arg,
1774 new possible_null_deref (*this, diag_arg));
1775 const allocation_state *astate = as_a_allocation_state (state);
1776 sm_ctxt->set_next_state (stmt, arg, astate->get_nonnull ());
1777 }
1778 else if (state == m_null)
1779 {
1780 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1781 sm_ctxt->warn (node, stmt, arg,
1782 new null_deref (*this, diag_arg));
1783 sm_ctxt->set_next_state (stmt, arg, m_stop);
1784 }
1785 else if (freed_p (state))
1786 {
1787 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1788 const allocation_state *astate = as_a_allocation_state (state);
1789 sm_ctxt->warn (node, stmt, arg,
1790 new use_after_free (*this, diag_arg,
1791 astate->m_deallocator));
1792 sm_ctxt->set_next_state (stmt, arg, m_stop);
1793 }
1794 }
1795 }
1796 return false;
1797 }
1798
1799 /* Handle a call to an allocator.
1800 RETURNS_NONNULL is true if CALL is to a fndecl known to have
1801 __attribute__((returns_nonnull)). */
1802
1803 void
1804 malloc_state_machine::on_allocator_call (sm_context *sm_ctxt,
1805 const gcall *call,
1806 const deallocator_set *deallocators,
1807 bool returns_nonnull) const
1808 {
1809 tree lhs = gimple_call_lhs (call);
1810 if (lhs)
1811 {
1812 if (sm_ctxt->get_state (call, lhs) == m_start)
1813 sm_ctxt->set_next_state (call, lhs,
1814 (returns_nonnull
1815 ? deallocators->m_nonnull
1816 : deallocators->m_unchecked));
1817 }
1818 else
1819 {
1820 /* TODO: report leak. */
1821 }
1822 }
1823
1824 /* Handle deallocations of non-heap pointers.
1825 non-heap -> stop, with warning. */
1826
1827 void
1828 malloc_state_machine::handle_free_of_non_heap (sm_context *sm_ctxt,
1829 const supernode *node,
1830 const gcall *call,
1831 tree arg,
1832 const deallocator *d) const
1833 {
1834 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1835 const region *freed_reg = NULL;
1836 if (const program_state *old_state = sm_ctxt->get_old_program_state ())
1837 {
1838 const region_model *old_model = old_state->m_region_model;
1839 const svalue *ptr_sval = old_model->get_rvalue (arg, NULL);
1840 freed_reg = old_model->deref_rvalue (ptr_sval, arg, NULL);
1841 }
1842 sm_ctxt->warn (node, call, arg,
1843 new free_of_non_heap (*this, diag_arg, freed_reg,
1844 d->m_name));
1845 sm_ctxt->set_next_state (call, arg, m_stop);
1846 }
1847
1848 void
1849 malloc_state_machine::on_deallocator_call (sm_context *sm_ctxt,
1850 const supernode *node,
1851 const gcall *call,
1852 const deallocator *d,
1853 unsigned argno) const
1854 {
1855 if (argno >= gimple_call_num_args (call))
1856 return;
1857 tree arg = gimple_call_arg (call, argno);
1858
1859 state_t state = sm_ctxt->get_state (call, arg);
1860
1861 /* start/unchecked/nonnull -> freed. */
1862 if (state == m_start)
1863 sm_ctxt->set_next_state (call, arg, d->m_freed);
1864 else if (unchecked_p (state) || nonnull_p (state))
1865 {
1866 const allocation_state *astate = as_a_allocation_state (state);
1867 gcc_assert (astate->m_deallocators);
1868 if (!astate->m_deallocators->contains_p (d))
1869 {
1870 /* Wrong allocator. */
1871 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1872 pending_diagnostic *pd
1873 = new mismatching_deallocation (*this, diag_arg,
1874 astate->m_deallocators,
1875 d);
1876 sm_ctxt->warn (node, call, arg, pd);
1877 }
1878 sm_ctxt->set_next_state (call, arg, d->m_freed);
1879 }
1880
1881 /* Keep state "null" as-is, rather than transitioning to "freed";
1882 we don't want to complain about double-free of NULL. */
1883 else if (state == d->m_freed)
1884 {
1885 /* freed -> stop, with warning. */
1886 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1887 sm_ctxt->warn (node, call, arg,
1888 new double_free (*this, diag_arg, d->m_name));
1889 sm_ctxt->set_next_state (call, arg, m_stop);
1890 }
1891 else if (state == m_non_heap)
1892 {
1893 /* non-heap -> stop, with warning. */
1894 handle_free_of_non_heap (sm_ctxt, node, call, arg, d);
1895 }
1896 }
1897
1898 /* Handle a call to "realloc".
1899 Check for free of non-heap or mismatching allocators,
1900 transitioning to the "stop" state for such cases.
1901
1902 Otherwise, region_model::impl_call_realloc will later
1903 get called (which will handle other sm-state transitions
1904 when the state is bifurcated). */
1905
1906 void
1907 malloc_state_machine::on_realloc_call (sm_context *sm_ctxt,
1908 const supernode *node,
1909 const gcall *call) const
1910 {
1911 const unsigned argno = 0;
1912 const deallocator *d = &m_realloc;
1913
1914 tree arg = gimple_call_arg (call, argno);
1915
1916 state_t state = sm_ctxt->get_state (call, arg);
1917
1918 if (unchecked_p (state) || nonnull_p (state))
1919 {
1920 const allocation_state *astate = as_a_allocation_state (state);
1921 gcc_assert (astate->m_deallocators);
1922 if (!astate->m_deallocators->contains_p (&m_free.m_deallocator))
1923 {
1924 /* Wrong allocator. */
1925 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1926 pending_diagnostic *pd
1927 = new mismatching_deallocation (*this, diag_arg,
1928 astate->m_deallocators,
1929 d);
1930 sm_ctxt->warn (node, call, arg, pd);
1931 sm_ctxt->set_next_state (call, arg, m_stop);
1932 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
1933 path_ctxt->terminate_path ();
1934 }
1935 }
1936 else if (state == m_free.m_deallocator.m_freed)
1937 {
1938 /* freed -> stop, with warning. */
1939 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1940 sm_ctxt->warn (node, call, arg,
1941 new double_free (*this, diag_arg, "free"));
1942 sm_ctxt->set_next_state (call, arg, m_stop);
1943 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
1944 path_ctxt->terminate_path ();
1945 }
1946 else if (state == m_non_heap)
1947 {
1948 /* non-heap -> stop, with warning. */
1949 handle_free_of_non_heap (sm_ctxt, node, call, arg, d);
1950 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
1951 path_ctxt->terminate_path ();
1952 }
1953 }
1954
1955 /* Implementation of state_machine::on_phi vfunc for malloc_state_machine. */
1956
1957 void
1958 malloc_state_machine::on_phi (sm_context *sm_ctxt,
1959 const supernode *node ATTRIBUTE_UNUSED,
1960 const gphi *phi,
1961 tree rhs) const
1962 {
1963 if (zerop (rhs))
1964 {
1965 tree lhs = gimple_phi_result (phi);
1966 on_zero_assignment (sm_ctxt, phi, lhs);
1967 }
1968 }
1969
1970 /* Implementation of state_machine::on_condition vfunc for malloc_state_machine.
1971 Potentially transition state 'unchecked' to 'nonnull' or to 'null'. */
1972
1973 void
1974 malloc_state_machine::on_condition (sm_context *sm_ctxt,
1975 const supernode *node ATTRIBUTE_UNUSED,
1976 const gimple *stmt,
1977 const svalue *lhs,
1978 enum tree_code op,
1979 const svalue *rhs) const
1980 {
1981 if (!rhs->all_zeroes_p ())
1982 return;
1983
1984 if (!any_pointer_p (lhs))
1985 return;
1986 if (!any_pointer_p (rhs))
1987 return;
1988
1989 if (op == NE_EXPR)
1990 {
1991 log ("got 'ARG != 0' match");
1992 state_t s = sm_ctxt->get_state (stmt, lhs);
1993 if (unchecked_p (s))
1994 {
1995 const allocation_state *astate = as_a_allocation_state (s);
1996 sm_ctxt->set_next_state (stmt, lhs, astate->get_nonnull ());
1997 }
1998 }
1999 else if (op == EQ_EXPR)
2000 {
2001 log ("got 'ARG == 0' match");
2002 state_t s = sm_ctxt->get_state (stmt, lhs);
2003 if (unchecked_p (s))
2004 sm_ctxt->set_next_state (stmt, lhs, m_null);
2005 }
2006 }
2007
2008 /* Implementation of state_machine::can_purge_p vfunc for malloc_state_machine.
2009 Don't allow purging of pointers in state 'unchecked' or 'nonnull'
2010 (to avoid false leak reports). */
2011
2012 bool
2013 malloc_state_machine::can_purge_p (state_t s) const
2014 {
2015 enum resource_state rs = get_rs (s);
2016 return rs != RS_UNCHECKED && rs != RS_NONNULL;
2017 }
2018
2019 /* Implementation of state_machine::on_leak vfunc for malloc_state_machine
2020 (for complaining about leaks of pointers in state 'unchecked' and
2021 'nonnull'). */
2022
2023 pending_diagnostic *
2024 malloc_state_machine::on_leak (tree var) const
2025 {
2026 return new malloc_leak (*this, var);
2027 }
2028
2029 /* Implementation of state_machine::reset_when_passed_to_unknown_fn_p vfunc
2030 for malloc_state_machine. */
2031
2032 bool
2033 malloc_state_machine::reset_when_passed_to_unknown_fn_p (state_t s,
2034 bool is_mutable) const
2035 {
2036 /* An on-stack ptr doesn't stop being stack-allocated when passed to an
2037 unknown fn. */
2038 if (s == m_non_heap)
2039 return false;
2040
2041 /* Otherwise, pointers passed as non-const can be freed. */
2042 return is_mutable;
2043 }
2044
2045 /* Return true if calls to FNDECL are known to not affect this sm-state. */
2046
2047 bool
2048 malloc_state_machine::unaffected_by_call_p (tree fndecl)
2049 {
2050 /* A set of functions that are known to not affect allocation
2051 status, even if we haven't fully modelled the rest of their
2052 behavior yet. */
2053 static const char * const funcnames[] = {
2054 /* This array must be kept sorted. */
2055 "strsep",
2056 };
2057 const size_t count = ARRAY_SIZE (funcnames);
2058 function_set fs (funcnames, count);
2059
2060 if (fs.contains_decl_p (fndecl))
2061 return true;
2062
2063 return false;
2064 }
2065
2066 /* Shared logic for handling GIMPLE_ASSIGNs and GIMPLE_PHIs that
2067 assign zero to LHS. */
2068
2069 void
2070 malloc_state_machine::on_zero_assignment (sm_context *sm_ctxt,
2071 const gimple *stmt,
2072 tree lhs) const
2073 {
2074 state_t s = sm_ctxt->get_state (stmt, lhs);
2075 enum resource_state rs = get_rs (s);
2076 if (rs == RS_START
2077 || rs == RS_UNCHECKED
2078 || rs == RS_NONNULL
2079 || rs == RS_FREED)
2080 sm_ctxt->set_next_state (stmt, lhs, m_null);
2081 }
2082
2083 /* Special-case hook for handling realloc, for the "success with move to
2084 a new buffer" case, marking OLD_PTR_SVAL as freed and NEW_PTR_SVAL as
2085 non-null.
2086
2087 This is similar to on_deallocator_call and on_allocator_call,
2088 but the checks happen in on_realloc_call, and by splitting the states. */
2089
2090 void
2091 malloc_state_machine::
2092 on_realloc_with_move (region_model *model,
2093 sm_state_map *smap,
2094 const svalue *old_ptr_sval,
2095 const svalue *new_ptr_sval,
2096 const extrinsic_state &ext_state) const
2097 {
2098 smap->set_state (model, old_ptr_sval,
2099 m_free.m_deallocator.m_freed,
2100 NULL, ext_state);
2101
2102 smap->set_state (model, new_ptr_sval,
2103 m_free.m_nonnull,
2104 NULL, ext_state);
2105 }
2106
2107 } // anonymous namespace
2108
2109 /* Internal interface to this file. */
2110
2111 state_machine *
2112 make_malloc_state_machine (logger *logger)
2113 {
2114 return new malloc_state_machine (logger);
2115 }
2116
2117 /* Specialcase hook for handling realloc, for use by
2118 region_model::impl_call_realloc::success_with_move::update_model. */
2119
2120 void
2121 region_model::on_realloc_with_move (const call_details &cd,
2122 const svalue *old_ptr_sval,
2123 const svalue *new_ptr_sval)
2124 {
2125 region_model_context *ctxt = cd.get_ctxt ();
2126 if (!ctxt)
2127 return;
2128 const extrinsic_state *ext_state = ctxt->get_ext_state ();
2129 if (!ext_state)
2130 return;
2131
2132 sm_state_map *smap;
2133 const state_machine *sm;
2134 unsigned sm_idx;
2135 if (!ctxt->get_malloc_map (&smap, &sm, &sm_idx))
2136 return;
2137
2138 gcc_assert (smap);
2139 gcc_assert (sm);
2140
2141 const malloc_state_machine &malloc_sm
2142 = (const malloc_state_machine &)*sm;
2143
2144 malloc_sm.on_realloc_with_move (this,
2145 smap,
2146 old_ptr_sval,
2147 new_ptr_sval,
2148 *ext_state);
2149 }
2150
2151 } // namespace ana
2152
2153 #endif /* #if ENABLE_ANALYZER */