]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/analyzer/sm-malloc.cc
analyzer: fixes to free of non-heap detection [PR104560]
[thirdparty/gcc.git] / gcc / analyzer / sm-malloc.cc
1 /* A state machine for detecting misuses of the malloc/free API.
2 Copyright (C) 2019-2022 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "function.h"
26 #include "basic-block.h"
27 #include "gimple.h"
28 #include "options.h"
29 #include "bitmap.h"
30 #include "diagnostic-path.h"
31 #include "diagnostic-metadata.h"
32 #include "function.h"
33 #include "json.h"
34 #include "analyzer/analyzer.h"
35 #include "diagnostic-event-id.h"
36 #include "analyzer/analyzer-logging.h"
37 #include "analyzer/sm.h"
38 #include "analyzer/pending-diagnostic.h"
39 #include "tristate.h"
40 #include "selftest.h"
41 #include "analyzer/call-string.h"
42 #include "analyzer/program-point.h"
43 #include "analyzer/store.h"
44 #include "analyzer/region-model.h"
45 #include "stringpool.h"
46 #include "attribs.h"
47 #include "analyzer/function-set.h"
48 #include "analyzer/program-state.h"
49
50 #if ENABLE_ANALYZER
51
52 namespace ana {
53
54 namespace {
55
56 /* This state machine and its various support classes track allocations
57 and deallocations.
58
59 It has a few standard allocation/deallocation pairs (e.g. new/delete),
60 and also supports user-defined ones via
61 __attribute__ ((malloc(DEALLOCATOR))).
62
63 There can be more than one valid deallocator for a given allocator,
64 for example:
65 __attribute__ ((malloc (fclose)))
66 __attribute__ ((malloc (freopen, 3)))
67 FILE* fopen (const char*, const char*);
68 A deallocator_set represents a particular set of valid deallocators.
69
70 We track the expected deallocator_set for a value, but not the allocation
71 function - there could be more than one allocator per deallocator_set.
72 For example, there could be dozens of allocators for "free" beyond just
73 malloc e.g. calloc, xstrdup, etc. We don't want to explode the number
74 of states by tracking individual allocators in the exploded graph;
75 we merely want to track "this value expects to have 'free' called on it".
76 Perhaps we can reconstruct which allocator was used later, when emitting
77 the path, if it's necessary for precision of wording of diagnostics. */
78
79 class deallocator;
80 class deallocator_set;
81 class malloc_state_machine;
82
83 /* An enum for discriminating between different kinds of allocation_state. */
84
85 enum resource_state
86 {
87 /* States that are independent of allocator/deallocator. */
88
89 /* The start state. */
90 RS_START,
91
92 /* State for a pointer that's known to be NULL. */
93 RS_NULL,
94
95 /* State for a pointer that's known to not be on the heap (e.g. to a local
96 or global). */
97 RS_NON_HEAP,
98
99 /* Stop state, for pointers we don't want to track any more. */
100 RS_STOP,
101
102 /* States that relate to a specific deallocator_set. */
103
104 /* State for a pointer returned from an allocator that hasn't
105 been checked for NULL.
106 It could be a pointer to heap-allocated memory, or could be NULL. */
107 RS_UNCHECKED,
108
109 /* State for a pointer returned from an allocator,
110 known to be non-NULL. */
111 RS_NONNULL,
112
113 /* State for a pointer passed to a deallocator. */
114 RS_FREED
115 };
116
117 /* Custom state subclass, which can optionally refer to an a
118 deallocator_set. */
119
120 struct allocation_state : public state_machine::state
121 {
122 allocation_state (const char *name, unsigned id,
123 enum resource_state rs,
124 const deallocator_set *deallocators,
125 const deallocator *deallocator)
126 : state (name, id), m_rs (rs),
127 m_deallocators (deallocators),
128 m_deallocator (deallocator)
129 {}
130
131 void dump_to_pp (pretty_printer *pp) const FINAL OVERRIDE;
132
133 const allocation_state *get_nonnull () const;
134
135 enum resource_state m_rs;
136 const deallocator_set *m_deallocators;
137 const deallocator *m_deallocator;
138 };
139
140 /* An enum for choosing which wording to use in various diagnostics
141 when describing deallocations. */
142
143 enum wording
144 {
145 WORDING_FREED,
146 WORDING_DELETED,
147 WORDING_DEALLOCATED,
148 WORDING_REALLOCATED
149 };
150
151 /* Base class representing a deallocation function,
152 either a built-in one we know about, or one exposed via
153 __attribute__((malloc(DEALLOCATOR))). */
154
155 struct deallocator
156 {
157 hashval_t hash () const;
158 void dump_to_pp (pretty_printer *pp) const;
159 static int cmp (const deallocator *a, const deallocator *b);
160 static int cmp_ptr_ptr (const void *, const void *);
161
162 /* Name to use in diagnostics. */
163 const char *m_name;
164
165 /* Which wording to use in diagnostics. */
166 enum wording m_wording;
167
168 /* State for a value passed to one of the deallocators. */
169 state_machine::state_t m_freed;
170
171 protected:
172 deallocator (malloc_state_machine *sm,
173 const char *name,
174 enum wording wording);
175 };
176
177 /* Subclass representing a predefined deallocator.
178 e.g. "delete []", without needing a specific FUNCTION_DECL
179 ahead of time. */
180
181 struct standard_deallocator : public deallocator
182 {
183 standard_deallocator (malloc_state_machine *sm,
184 const char *name,
185 enum wording wording);
186 };
187
188 /* Subclass representing a user-defined deallocator
189 via __attribute__((malloc(DEALLOCATOR))) given
190 a specific FUNCTION_DECL. */
191
192 struct custom_deallocator : public deallocator
193 {
194 custom_deallocator (malloc_state_machine *sm,
195 tree deallocator_fndecl,
196 enum wording wording)
197 : deallocator (sm, IDENTIFIER_POINTER (DECL_NAME (deallocator_fndecl)),
198 wording)
199 {
200 }
201 };
202
203 /* Base class representing a set of possible deallocators.
204 Often this will be just a single deallocator, but some
205 allocators have multiple valid deallocators (e.g. the result of
206 "fopen" can be closed by either "fclose" or "freopen"). */
207
208 struct deallocator_set
209 {
210 deallocator_set (malloc_state_machine *sm,
211 enum wording wording);
212 virtual ~deallocator_set () {}
213
214 virtual bool contains_p (const deallocator *d) const = 0;
215 virtual const deallocator *maybe_get_single () const = 0;
216 virtual void dump_to_pp (pretty_printer *pp) const = 0;
217 void dump () const;
218
219 /* Which wording to use in diagnostics. */
220 enum wording m_wording;
221
222 /* Pointers to states.
223 These states are owned by the state_machine base class. */
224
225 /* State for an unchecked result from an allocator using this set. */
226 state_machine::state_t m_unchecked;
227
228 /* State for a known non-NULL result from such an allocator. */
229 state_machine::state_t m_nonnull;
230 };
231
232 /* Subclass of deallocator_set representing a set of deallocators
233 defined by one or more __attribute__((malloc(DEALLOCATOR))). */
234
235 struct custom_deallocator_set : public deallocator_set
236 {
237 typedef const auto_vec <const deallocator *> *key_t;
238
239 custom_deallocator_set (malloc_state_machine *sm,
240 const auto_vec <const deallocator *> *vec,
241 //const char *name,
242 //const char *dealloc_funcname,
243 //unsigned arg_idx,
244 enum wording wording);
245
246 bool contains_p (const deallocator *d) const FINAL OVERRIDE;
247 const deallocator *maybe_get_single () const FINAL OVERRIDE;
248 void dump_to_pp (pretty_printer *pp) const FINAL OVERRIDE;
249
250 auto_vec <const deallocator *> m_deallocator_vec;
251 };
252
253 /* Subclass of deallocator_set representing a set of deallocators
254 with a single standard_deallocator, e.g. "delete []". */
255
256 struct standard_deallocator_set : public deallocator_set
257 {
258 standard_deallocator_set (malloc_state_machine *sm,
259 const char *name,
260 enum wording wording);
261
262 bool contains_p (const deallocator *d) const FINAL OVERRIDE;
263 const deallocator *maybe_get_single () const FINAL OVERRIDE;
264 void dump_to_pp (pretty_printer *pp) const FINAL OVERRIDE;
265
266 standard_deallocator m_deallocator;
267 };
268
269 /* Traits class for ensuring uniqueness of deallocator_sets within
270 malloc_state_machine. */
271
272 struct deallocator_set_map_traits
273 {
274 typedef custom_deallocator_set::key_t key_type;
275 typedef custom_deallocator_set *value_type;
276 typedef custom_deallocator_set *compare_type;
277
278 static inline hashval_t hash (const key_type &k)
279 {
280 gcc_assert (k != NULL);
281 gcc_assert (k != reinterpret_cast<key_type> (1));
282
283 hashval_t result = 0;
284 unsigned i;
285 const deallocator *d;
286 FOR_EACH_VEC_ELT (*k, i, d)
287 result ^= d->hash ();
288 return result;
289 }
290 static inline bool equal_keys (const key_type &k1, const key_type &k2)
291 {
292 if (k1->length () != k2->length ())
293 return false;
294
295 for (unsigned i = 0; i < k1->length (); i++)
296 if ((*k1)[i] != (*k2)[i])
297 return false;
298
299 return true;
300 }
301 template <typename T>
302 static inline void remove (T &)
303 {
304 /* empty; the nodes are handled elsewhere. */
305 }
306 template <typename T>
307 static inline void mark_deleted (T &entry)
308 {
309 entry.m_key = reinterpret_cast<key_type> (1);
310 }
311 template <typename T>
312 static inline void mark_empty (T &entry)
313 {
314 entry.m_key = NULL;
315 }
316 template <typename T>
317 static inline bool is_deleted (const T &entry)
318 {
319 return entry.m_key == reinterpret_cast<key_type> (1);
320 }
321 template <typename T>
322 static inline bool is_empty (const T &entry)
323 {
324 return entry.m_key == NULL;
325 }
326 static const bool empty_zero_p = false;
327 };
328
329 /* A state machine for detecting misuses of the malloc/free API.
330
331 See sm-malloc.dot for an overview (keep this in-sync with that file). */
332
333 class malloc_state_machine : public state_machine
334 {
335 public:
336 typedef allocation_state custom_data_t;
337
338 malloc_state_machine (logger *logger);
339 ~malloc_state_machine ();
340
341 state_t
342 add_state (const char *name, enum resource_state rs,
343 const deallocator_set *deallocators,
344 const deallocator *deallocator);
345
346 bool inherited_state_p () const FINAL OVERRIDE { return false; }
347
348 state_machine::state_t
349 get_default_state (const svalue *sval) const FINAL OVERRIDE
350 {
351 if (tree cst = sval->maybe_get_constant ())
352 {
353 if (zerop (cst))
354 return m_null;
355 }
356 if (const region_svalue *ptr = sval->dyn_cast_region_svalue ())
357 {
358 const region *reg = ptr->get_pointee ();
359 switch (reg->get_memory_space ())
360 {
361 default:
362 break;
363 case MEMSPACE_CODE:
364 case MEMSPACE_GLOBALS:
365 case MEMSPACE_STACK:
366 case MEMSPACE_READONLY_DATA:
367 return m_non_heap;
368 }
369 }
370 return m_start;
371 }
372
373 bool on_stmt (sm_context *sm_ctxt,
374 const supernode *node,
375 const gimple *stmt) const FINAL OVERRIDE;
376
377 void on_phi (sm_context *sm_ctxt,
378 const supernode *node,
379 const gphi *phi,
380 tree rhs) const FINAL OVERRIDE;
381
382 void on_condition (sm_context *sm_ctxt,
383 const supernode *node,
384 const gimple *stmt,
385 const svalue *lhs,
386 enum tree_code op,
387 const svalue *rhs) const FINAL OVERRIDE;
388
389 bool can_purge_p (state_t s) const FINAL OVERRIDE;
390 pending_diagnostic *on_leak (tree var) const FINAL OVERRIDE;
391
392 bool reset_when_passed_to_unknown_fn_p (state_t s,
393 bool is_mutable) const FINAL OVERRIDE;
394
395 static bool unaffected_by_call_p (tree fndecl);
396
397 void on_realloc_with_move (region_model *model,
398 sm_state_map *smap,
399 const svalue *old_ptr_sval,
400 const svalue *new_ptr_sval,
401 const extrinsic_state &ext_state) const;
402
403 standard_deallocator_set m_free;
404 standard_deallocator_set m_scalar_delete;
405 standard_deallocator_set m_vector_delete;
406
407 standard_deallocator m_realloc;
408
409 /* States that are independent of api. */
410
411 /* State for a pointer that's known to be NULL. */
412 state_t m_null;
413
414 /* State for a pointer that's known to not be on the heap (e.g. to a local
415 or global). */
416 state_t m_non_heap; // TODO: or should this be a different state machine?
417 // or do we need child values etc?
418
419 /* Stop state, for pointers we don't want to track any more. */
420 state_t m_stop;
421
422 private:
423 const custom_deallocator_set *
424 get_or_create_custom_deallocator_set (tree allocator_fndecl);
425 custom_deallocator_set *
426 maybe_create_custom_deallocator_set (tree allocator_fndecl);
427 const deallocator *
428 get_or_create_deallocator (tree deallocator_fndecl);
429
430 void on_allocator_call (sm_context *sm_ctxt,
431 const gcall *call,
432 const deallocator_set *deallocators,
433 bool returns_nonnull = false) const;
434 void handle_free_of_non_heap (sm_context *sm_ctxt,
435 const supernode *node,
436 const gcall *call,
437 tree arg,
438 const deallocator *d) const;
439 void on_deallocator_call (sm_context *sm_ctxt,
440 const supernode *node,
441 const gcall *call,
442 const deallocator *d,
443 unsigned argno) const;
444 void on_realloc_call (sm_context *sm_ctxt,
445 const supernode *node,
446 const gcall *call) const;
447 void on_zero_assignment (sm_context *sm_ctxt,
448 const gimple *stmt,
449 tree lhs) const;
450
451 /* A map for consolidating deallocators so that they are
452 unique per deallocator FUNCTION_DECL. */
453 typedef hash_map<tree, deallocator *> deallocator_map_t;
454 deallocator_map_t m_deallocator_map;
455
456 /* Memoized lookups from FUNCTION_DECL to custom_deallocator_set *. */
457 typedef hash_map<tree, custom_deallocator_set *> deallocator_set_cache_t;
458 deallocator_set_cache_t m_custom_deallocator_set_cache;
459
460 /* A map for consolidating custom_deallocator_set instances. */
461 typedef hash_map<custom_deallocator_set::key_t,
462 custom_deallocator_set *,
463 deallocator_set_map_traits> custom_deallocator_set_map_t;
464 custom_deallocator_set_map_t m_custom_deallocator_set_map;
465
466 /* Record of dynamically-allocated objects, for cleanup. */
467 auto_vec <custom_deallocator_set *> m_dynamic_sets;
468 auto_vec <custom_deallocator *> m_dynamic_deallocators;
469 };
470
471 /* struct deallocator. */
472
473 deallocator::deallocator (malloc_state_machine *sm,
474 const char *name,
475 enum wording wording)
476 : m_name (name),
477 m_wording (wording),
478 m_freed (sm->add_state ("freed", RS_FREED, NULL, this))
479 {
480 }
481
482 hashval_t
483 deallocator::hash () const
484 {
485 return (hashval_t)m_freed->get_id ();
486 }
487
488 void
489 deallocator::dump_to_pp (pretty_printer *pp) const
490 {
491 pp_printf (pp, "%qs", m_name);
492 }
493
494 int
495 deallocator::cmp (const deallocator *a, const deallocator *b)
496 {
497 return (int)a->m_freed->get_id () - (int)b->m_freed->get_id ();
498 }
499
500 int
501 deallocator::cmp_ptr_ptr (const void *a, const void *b)
502 {
503 return cmp (*(const deallocator * const *)a,
504 *(const deallocator * const *)b);
505 }
506
507
508 /* struct standard_deallocator : public deallocator. */
509
510 standard_deallocator::standard_deallocator (malloc_state_machine *sm,
511 const char *name,
512 enum wording wording)
513 : deallocator (sm, name, wording)
514 {
515 }
516
517 /* struct deallocator_set. */
518
519 deallocator_set::deallocator_set (malloc_state_machine *sm,
520 enum wording wording)
521 : m_wording (wording),
522 m_unchecked (sm->add_state ("unchecked", RS_UNCHECKED, this, NULL)),
523 m_nonnull (sm->add_state ("nonnull", RS_NONNULL, this, NULL))
524 {
525 }
526
527 /* Dump a description of this deallocator_set to stderr. */
528
529 DEBUG_FUNCTION void
530 deallocator_set::dump () const
531 {
532 pretty_printer pp;
533 pp_show_color (&pp) = pp_show_color (global_dc->printer);
534 pp.buffer->stream = stderr;
535 dump_to_pp (&pp);
536 pp_newline (&pp);
537 pp_flush (&pp);
538 }
539
540 /* struct custom_deallocator_set : public deallocator_set. */
541
542 custom_deallocator_set::
543 custom_deallocator_set (malloc_state_machine *sm,
544 const auto_vec <const deallocator *> *vec,
545 enum wording wording)
546 : deallocator_set (sm, wording),
547 m_deallocator_vec (vec->length ())
548 {
549 unsigned i;
550 const deallocator *d;
551 FOR_EACH_VEC_ELT (*vec, i, d)
552 m_deallocator_vec.safe_push (d);
553 }
554
555 bool
556 custom_deallocator_set::contains_p (const deallocator *d) const
557 {
558 unsigned i;
559 const deallocator *cd;
560 FOR_EACH_VEC_ELT (m_deallocator_vec, i, cd)
561 if (cd == d)
562 return true;
563 return false;
564 }
565
566 const deallocator *
567 custom_deallocator_set::maybe_get_single () const
568 {
569 if (m_deallocator_vec.length () == 1)
570 return m_deallocator_vec[0];
571 return NULL;
572 }
573
574 void
575 custom_deallocator_set::dump_to_pp (pretty_printer *pp) const
576 {
577 pp_character (pp, '{');
578 unsigned i;
579 const deallocator *d;
580 FOR_EACH_VEC_ELT (m_deallocator_vec, i, d)
581 {
582 if (i > 0)
583 pp_string (pp, ", ");
584 d->dump_to_pp (pp);
585 }
586 pp_character (pp, '}');
587 }
588
589 /* struct standard_deallocator_set : public deallocator_set. */
590
591 standard_deallocator_set::standard_deallocator_set (malloc_state_machine *sm,
592 const char *name,
593 enum wording wording)
594 : deallocator_set (sm, wording),
595 m_deallocator (sm, name, wording)
596 {
597 }
598
599 bool
600 standard_deallocator_set::contains_p (const deallocator *d) const
601 {
602 return d == &m_deallocator;
603 }
604
605 const deallocator *
606 standard_deallocator_set::maybe_get_single () const
607 {
608 return &m_deallocator;
609 }
610
611 void
612 standard_deallocator_set::dump_to_pp (pretty_printer *pp) const
613 {
614 pp_character (pp, '{');
615 pp_string (pp, m_deallocator.m_name);
616 pp_character (pp, '}');
617 }
618
619 /* Return STATE cast to the custom state subclass, or NULL for the start state.
620 Everything should be an allocation_state apart from the start state. */
621
622 static const allocation_state *
623 dyn_cast_allocation_state (state_machine::state_t state)
624 {
625 if (state->get_id () == 0)
626 return NULL;
627 return static_cast <const allocation_state *> (state);
628 }
629
630 /* Return STATE cast to the custom state subclass, for a state that is
631 already known to not be the start state . */
632
633 static const allocation_state *
634 as_a_allocation_state (state_machine::state_t state)
635 {
636 gcc_assert (state->get_id () != 0);
637 return static_cast <const allocation_state *> (state);
638 }
639
640 /* Get the resource_state for STATE. */
641
642 static enum resource_state
643 get_rs (state_machine::state_t state)
644 {
645 if (const allocation_state *astate = dyn_cast_allocation_state (state))
646 return astate->m_rs;
647 else
648 return RS_START;
649 }
650
651 /* Return true if STATE is the start state. */
652
653 static bool
654 start_p (state_machine::state_t state)
655 {
656 return get_rs (state) == RS_START;
657 }
658
659 /* Return true if STATE is an unchecked result from an allocator. */
660
661 static bool
662 unchecked_p (state_machine::state_t state)
663 {
664 return get_rs (state) == RS_UNCHECKED;
665 }
666
667 /* Return true if STATE is a non-null result from an allocator. */
668
669 static bool
670 nonnull_p (state_machine::state_t state)
671 {
672 return get_rs (state) == RS_NONNULL;
673 }
674
675 /* Return true if STATE is a value that has been passed to a deallocator. */
676
677 static bool
678 freed_p (state_machine::state_t state)
679 {
680 return get_rs (state) == RS_FREED;
681 }
682
683 /* Class for diagnostics relating to malloc_state_machine. */
684
685 class malloc_diagnostic : public pending_diagnostic
686 {
687 public:
688 malloc_diagnostic (const malloc_state_machine &sm, tree arg)
689 : m_sm (sm), m_arg (arg)
690 {}
691
692 bool subclass_equal_p (const pending_diagnostic &base_other) const OVERRIDE
693 {
694 return same_tree_p (m_arg, ((const malloc_diagnostic &)base_other).m_arg);
695 }
696
697 label_text describe_state_change (const evdesc::state_change &change)
698 OVERRIDE
699 {
700 if (change.m_old_state == m_sm.get_start_state ()
701 && unchecked_p (change.m_new_state))
702 // TODO: verify that it's the allocation stmt, not a copy
703 return label_text::borrow ("allocated here");
704 if (unchecked_p (change.m_old_state)
705 && nonnull_p (change.m_new_state))
706 {
707 if (change.m_expr)
708 return change.formatted_print ("assuming %qE is non-NULL",
709 change.m_expr);
710 else
711 return change.formatted_print ("assuming %qs is non-NULL",
712 "<unknown>");
713 }
714 if (change.m_new_state == m_sm.m_null)
715 {
716 if (unchecked_p (change.m_old_state))
717 {
718 if (change.m_expr)
719 return change.formatted_print ("assuming %qE is NULL",
720 change.m_expr);
721 else
722 return change.formatted_print ("assuming %qs is NULL",
723 "<unknown>");
724 }
725 else
726 {
727 if (change.m_expr)
728 return change.formatted_print ("%qE is NULL",
729 change.m_expr);
730 else
731 return change.formatted_print ("%qs is NULL",
732 "<unknown>");
733 }
734 }
735
736 return label_text ();
737 }
738
739 protected:
740 const malloc_state_machine &m_sm;
741 tree m_arg;
742 };
743
744 /* Concrete subclass for reporting mismatching allocator/deallocator
745 diagnostics. */
746
747 class mismatching_deallocation : public malloc_diagnostic
748 {
749 public:
750 mismatching_deallocation (const malloc_state_machine &sm, tree arg,
751 const deallocator_set *expected_deallocators,
752 const deallocator *actual_dealloc)
753 : malloc_diagnostic (sm, arg),
754 m_expected_deallocators (expected_deallocators),
755 m_actual_dealloc (actual_dealloc)
756 {}
757
758 const char *get_kind () const FINAL OVERRIDE
759 {
760 return "mismatching_deallocation";
761 }
762
763 bool emit (rich_location *rich_loc) FINAL OVERRIDE
764 {
765 auto_diagnostic_group d;
766 diagnostic_metadata m;
767 m.add_cwe (762); /* CWE-762: Mismatched Memory Management Routines. */
768 if (const deallocator *expected_dealloc
769 = m_expected_deallocators->maybe_get_single ())
770 return warning_meta (rich_loc, m, OPT_Wanalyzer_mismatching_deallocation,
771 "%qE should have been deallocated with %qs"
772 " but was deallocated with %qs",
773 m_arg, expected_dealloc->m_name,
774 m_actual_dealloc->m_name);
775 else
776 return warning_meta (rich_loc, m, OPT_Wanalyzer_mismatching_deallocation,
777 "%qs called on %qE returned from a mismatched"
778 " allocation function",
779 m_actual_dealloc->m_name, m_arg);
780 }
781
782 label_text describe_state_change (const evdesc::state_change &change)
783 FINAL OVERRIDE
784 {
785 if (unchecked_p (change.m_new_state))
786 {
787 m_alloc_event = change.m_event_id;
788 if (const deallocator *expected_dealloc
789 = m_expected_deallocators->maybe_get_single ())
790 return change.formatted_print ("allocated here"
791 " (expects deallocation with %qs)",
792 expected_dealloc->m_name);
793 else
794 return change.formatted_print ("allocated here");
795 }
796 return malloc_diagnostic::describe_state_change (change);
797 }
798
799 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
800 {
801 if (m_alloc_event.known_p ())
802 {
803 if (const deallocator *expected_dealloc
804 = m_expected_deallocators->maybe_get_single ())
805 return ev.formatted_print
806 ("deallocated with %qs here;"
807 " allocation at %@ expects deallocation with %qs",
808 m_actual_dealloc->m_name, &m_alloc_event,
809 expected_dealloc->m_name);
810 else
811 return ev.formatted_print
812 ("deallocated with %qs here;"
813 " allocated at %@",
814 m_actual_dealloc->m_name, &m_alloc_event);
815 }
816 return ev.formatted_print ("deallocated with %qs here",
817 m_actual_dealloc->m_name);
818 }
819
820 private:
821 diagnostic_event_id_t m_alloc_event;
822 const deallocator_set *m_expected_deallocators;
823 const deallocator *m_actual_dealloc;
824 };
825
826 /* Concrete subclass for reporting double-free diagnostics. */
827
828 class double_free : public malloc_diagnostic
829 {
830 public:
831 double_free (const malloc_state_machine &sm, tree arg, const char *funcname)
832 : malloc_diagnostic (sm, arg), m_funcname (funcname)
833 {}
834
835 const char *get_kind () const FINAL OVERRIDE { return "double_free"; }
836
837 bool emit (rich_location *rich_loc) FINAL OVERRIDE
838 {
839 auto_diagnostic_group d;
840 diagnostic_metadata m;
841 m.add_cwe (415); /* CWE-415: Double Free. */
842 return warning_meta (rich_loc, m, OPT_Wanalyzer_double_free,
843 "double-%qs of %qE", m_funcname, m_arg);
844 }
845
846 label_text describe_state_change (const evdesc::state_change &change)
847 FINAL OVERRIDE
848 {
849 if (freed_p (change.m_new_state))
850 {
851 m_first_free_event = change.m_event_id;
852 return change.formatted_print ("first %qs here", m_funcname);
853 }
854 return malloc_diagnostic::describe_state_change (change);
855 }
856
857 label_text describe_call_with_state (const evdesc::call_with_state &info)
858 FINAL OVERRIDE
859 {
860 if (freed_p (info.m_state))
861 return info.formatted_print
862 ("passing freed pointer %qE in call to %qE from %qE",
863 info.m_expr, info.m_callee_fndecl, info.m_caller_fndecl);
864 return label_text ();
865 }
866
867 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
868 {
869 if (m_first_free_event.known_p ())
870 return ev.formatted_print ("second %qs here; first %qs was at %@",
871 m_funcname, m_funcname,
872 &m_first_free_event);
873 return ev.formatted_print ("second %qs here", m_funcname);
874 }
875
876 private:
877 diagnostic_event_id_t m_first_free_event;
878 const char *m_funcname;
879 };
880
881 /* Abstract subclass for describing possible bad uses of NULL.
882 Responsible for describing the call that could return NULL. */
883
884 class possible_null : public malloc_diagnostic
885 {
886 public:
887 possible_null (const malloc_state_machine &sm, tree arg)
888 : malloc_diagnostic (sm, arg)
889 {}
890
891 label_text describe_state_change (const evdesc::state_change &change)
892 FINAL OVERRIDE
893 {
894 if (change.m_old_state == m_sm.get_start_state ()
895 && unchecked_p (change.m_new_state))
896 {
897 m_origin_of_unchecked_event = change.m_event_id;
898 return label_text::borrow ("this call could return NULL");
899 }
900 return malloc_diagnostic::describe_state_change (change);
901 }
902
903 label_text describe_return_of_state (const evdesc::return_of_state &info)
904 FINAL OVERRIDE
905 {
906 if (unchecked_p (info.m_state))
907 return info.formatted_print ("possible return of NULL to %qE from %qE",
908 info.m_caller_fndecl, info.m_callee_fndecl);
909 return label_text ();
910 }
911
912 protected:
913 diagnostic_event_id_t m_origin_of_unchecked_event;
914 };
915
916 /* Concrete subclass for describing dereference of a possible NULL
917 value. */
918
919 class possible_null_deref : public possible_null
920 {
921 public:
922 possible_null_deref (const malloc_state_machine &sm, tree arg)
923 : possible_null (sm, arg)
924 {}
925
926 const char *get_kind () const FINAL OVERRIDE { return "possible_null_deref"; }
927
928 bool emit (rich_location *rich_loc) FINAL OVERRIDE
929 {
930 /* CWE-690: Unchecked Return Value to NULL Pointer Dereference. */
931 diagnostic_metadata m;
932 m.add_cwe (690);
933 return warning_meta (rich_loc, m,
934 OPT_Wanalyzer_possible_null_dereference,
935 "dereference of possibly-NULL %qE", m_arg);
936 }
937
938 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
939 {
940 if (m_origin_of_unchecked_event.known_p ())
941 return ev.formatted_print ("%qE could be NULL: unchecked value from %@",
942 ev.m_expr,
943 &m_origin_of_unchecked_event);
944 else
945 return ev.formatted_print ("%qE could be NULL", ev.m_expr);
946 }
947
948 };
949
950 /* Return true if FNDECL is a C++ method. */
951
952 static bool
953 method_p (tree fndecl)
954 {
955 return TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE;
956 }
957
958 /* Return a 1-based description of ARG_IDX (0-based) of FNDECL.
959 Compare with %P in the C++ FE (implemented in cp/error.cc: parm_to_string
960 as called from cp_printer). */
961
962 static label_text
963 describe_argument_index (tree fndecl, int arg_idx)
964 {
965 if (method_p (fndecl))
966 if (arg_idx == 0)
967 return label_text::borrow ("'this'");
968 pretty_printer pp;
969 pp_printf (&pp, "%u", arg_idx + 1 - method_p (fndecl));
970 return label_text::take (xstrdup (pp_formatted_text (&pp)));
971 }
972
973 /* Subroutine for use by possible_null_arg::emit and null_arg::emit.
974 Issue a note informing that the pertinent argument must be non-NULL. */
975
976 static void
977 inform_nonnull_attribute (tree fndecl, int arg_idx)
978 {
979 label_text arg_desc = describe_argument_index (fndecl, arg_idx);
980 inform (DECL_SOURCE_LOCATION (fndecl),
981 "argument %s of %qD must be non-null",
982 arg_desc.m_buffer, fndecl);
983 arg_desc.maybe_free ();
984 /* Ideally we would use the location of the parm and underline the
985 attribute also - but we don't have the location_t values at this point
986 in the middle-end.
987 For reference, the C and C++ FEs have get_fndecl_argument_location. */
988 }
989
990 /* Concrete subclass for describing passing a possibly-NULL value to a
991 function marked with __attribute__((nonnull)). */
992
993 class possible_null_arg : public possible_null
994 {
995 public:
996 possible_null_arg (const malloc_state_machine &sm, tree arg,
997 tree fndecl, int arg_idx)
998 : possible_null (sm, arg),
999 m_fndecl (fndecl), m_arg_idx (arg_idx)
1000 {}
1001
1002 const char *get_kind () const FINAL OVERRIDE { return "possible_null_arg"; }
1003
1004 bool subclass_equal_p (const pending_diagnostic &base_other) const
1005 {
1006 const possible_null_arg &sub_other
1007 = (const possible_null_arg &)base_other;
1008 return (same_tree_p (m_arg, sub_other.m_arg)
1009 && m_fndecl == sub_other.m_fndecl
1010 && m_arg_idx == sub_other.m_arg_idx);
1011 }
1012
1013
1014 bool emit (rich_location *rich_loc) FINAL OVERRIDE
1015 {
1016 /* CWE-690: Unchecked Return Value to NULL Pointer Dereference. */
1017 auto_diagnostic_group d;
1018 diagnostic_metadata m;
1019 m.add_cwe (690);
1020 bool warned
1021 = warning_meta (rich_loc, m, OPT_Wanalyzer_possible_null_argument,
1022 "use of possibly-NULL %qE where non-null expected",
1023 m_arg);
1024 if (warned)
1025 inform_nonnull_attribute (m_fndecl, m_arg_idx);
1026 return warned;
1027 }
1028
1029 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
1030 {
1031 label_text arg_desc = describe_argument_index (m_fndecl, m_arg_idx);
1032 label_text result;
1033 if (m_origin_of_unchecked_event.known_p ())
1034 result = ev.formatted_print ("argument %s (%qE) from %@ could be NULL"
1035 " where non-null expected",
1036 arg_desc.m_buffer, ev.m_expr,
1037 &m_origin_of_unchecked_event);
1038 else
1039 result = ev.formatted_print ("argument %s (%qE) could be NULL"
1040 " where non-null expected",
1041 arg_desc.m_buffer, ev.m_expr);
1042 arg_desc.maybe_free ();
1043 return result;
1044 }
1045
1046 private:
1047 tree m_fndecl;
1048 int m_arg_idx;
1049 };
1050
1051 /* Concrete subclass for describing a dereference of a NULL value. */
1052
1053 class null_deref : public malloc_diagnostic
1054 {
1055 public:
1056 null_deref (const malloc_state_machine &sm, tree arg)
1057 : malloc_diagnostic (sm, arg) {}
1058
1059 const char *get_kind () const FINAL OVERRIDE { return "null_deref"; }
1060
1061 bool emit (rich_location *rich_loc) FINAL OVERRIDE
1062 {
1063 /* CWE-476: NULL Pointer Dereference. */
1064 diagnostic_metadata m;
1065 m.add_cwe (476);
1066 return warning_meta (rich_loc, m,
1067 OPT_Wanalyzer_null_dereference,
1068 "dereference of NULL %qE", m_arg);
1069 }
1070
1071 label_text describe_return_of_state (const evdesc::return_of_state &info)
1072 FINAL OVERRIDE
1073 {
1074 if (info.m_state == m_sm.m_null)
1075 return info.formatted_print ("return of NULL to %qE from %qE",
1076 info.m_caller_fndecl, info.m_callee_fndecl);
1077 return label_text ();
1078 }
1079
1080 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
1081 {
1082 return ev.formatted_print ("dereference of NULL %qE", ev.m_expr);
1083 }
1084 };
1085
1086 /* Concrete subclass for describing passing a NULL value to a
1087 function marked with __attribute__((nonnull)). */
1088
1089 class null_arg : public malloc_diagnostic
1090 {
1091 public:
1092 null_arg (const malloc_state_machine &sm, tree arg,
1093 tree fndecl, int arg_idx)
1094 : malloc_diagnostic (sm, arg),
1095 m_fndecl (fndecl), m_arg_idx (arg_idx)
1096 {}
1097
1098 const char *get_kind () const FINAL OVERRIDE { return "null_arg"; }
1099
1100 bool subclass_equal_p (const pending_diagnostic &base_other) const
1101 {
1102 const null_arg &sub_other
1103 = (const null_arg &)base_other;
1104 return (same_tree_p (m_arg, sub_other.m_arg)
1105 && m_fndecl == sub_other.m_fndecl
1106 && m_arg_idx == sub_other.m_arg_idx);
1107 }
1108
1109 bool emit (rich_location *rich_loc) FINAL OVERRIDE
1110 {
1111 /* CWE-476: NULL Pointer Dereference. */
1112 auto_diagnostic_group d;
1113 diagnostic_metadata m;
1114 m.add_cwe (476);
1115
1116 bool warned;
1117 if (zerop (m_arg))
1118 warned = warning_meta (rich_loc, m, OPT_Wanalyzer_null_argument,
1119 "use of NULL where non-null expected");
1120 else
1121 warned = warning_meta (rich_loc, m, OPT_Wanalyzer_null_argument,
1122 "use of NULL %qE where non-null expected",
1123 m_arg);
1124 if (warned)
1125 inform_nonnull_attribute (m_fndecl, m_arg_idx);
1126 return warned;
1127 }
1128
1129 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
1130 {
1131 label_text arg_desc = describe_argument_index (m_fndecl, m_arg_idx);
1132 label_text result;
1133 if (zerop (ev.m_expr))
1134 result = ev.formatted_print ("argument %s NULL where non-null expected",
1135 arg_desc.m_buffer);
1136 else
1137 result = ev.formatted_print ("argument %s (%qE) NULL"
1138 " where non-null expected",
1139 arg_desc.m_buffer, ev.m_expr);
1140 arg_desc.maybe_free ();
1141 return result;
1142 }
1143
1144 private:
1145 tree m_fndecl;
1146 int m_arg_idx;
1147 };
1148
1149 class use_after_free : public malloc_diagnostic
1150 {
1151 public:
1152 use_after_free (const malloc_state_machine &sm, tree arg,
1153 const deallocator *deallocator)
1154 : malloc_diagnostic (sm, arg),
1155 m_deallocator (deallocator)
1156 {
1157 gcc_assert (deallocator);
1158 }
1159
1160 const char *get_kind () const FINAL OVERRIDE { return "use_after_free"; }
1161
1162 bool emit (rich_location *rich_loc) FINAL OVERRIDE
1163 {
1164 /* CWE-416: Use After Free. */
1165 diagnostic_metadata m;
1166 m.add_cwe (416);
1167 return warning_meta (rich_loc, m, OPT_Wanalyzer_use_after_free,
1168 "use after %<%s%> of %qE",
1169 m_deallocator->m_name, m_arg);
1170 }
1171
1172 label_text describe_state_change (const evdesc::state_change &change)
1173 FINAL OVERRIDE
1174 {
1175 if (freed_p (change.m_new_state))
1176 {
1177 m_free_event = change.m_event_id;
1178 switch (m_deallocator->m_wording)
1179 {
1180 default:
1181 case WORDING_REALLOCATED:
1182 gcc_unreachable ();
1183 case WORDING_FREED:
1184 return label_text::borrow ("freed here");
1185 case WORDING_DELETED:
1186 return label_text::borrow ("deleted here");
1187 case WORDING_DEALLOCATED:
1188 return label_text::borrow ("deallocated here");
1189 }
1190 }
1191 return malloc_diagnostic::describe_state_change (change);
1192 }
1193
1194 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
1195 {
1196 const char *funcname = m_deallocator->m_name;
1197 if (m_free_event.known_p ())
1198 switch (m_deallocator->m_wording)
1199 {
1200 default:
1201 case WORDING_REALLOCATED:
1202 gcc_unreachable ();
1203 case WORDING_FREED:
1204 return ev.formatted_print ("use after %<%s%> of %qE; freed at %@",
1205 funcname, ev.m_expr, &m_free_event);
1206 case WORDING_DELETED:
1207 return ev.formatted_print ("use after %<%s%> of %qE; deleted at %@",
1208 funcname, ev.m_expr, &m_free_event);
1209 case WORDING_DEALLOCATED:
1210 return ev.formatted_print ("use after %<%s%> of %qE;"
1211 " deallocated at %@",
1212 funcname, ev.m_expr, &m_free_event);
1213 }
1214 else
1215 return ev.formatted_print ("use after %<%s%> of %qE",
1216 funcname, ev.m_expr);
1217 }
1218
1219 /* Implementation of pending_diagnostic::supercedes_p for
1220 use_after_free.
1221
1222 We want use-after-free to supercede use-of-unitialized-value,
1223 so that if we have these at the same stmt, we don't emit
1224 a use-of-uninitialized, just the use-after-free.
1225 (this is because we fully purge information about freed
1226 buffers when we free them to avoid state explosions, so
1227 that if they are accessed after the free, it looks like
1228 they are uninitialized). */
1229
1230 bool supercedes_p (const pending_diagnostic &other) const FINAL OVERRIDE
1231 {
1232 if (other.use_of_uninit_p ())
1233 return true;
1234
1235 return false;
1236 }
1237
1238 private:
1239 diagnostic_event_id_t m_free_event;
1240 const deallocator *m_deallocator;
1241 };
1242
1243 class malloc_leak : public malloc_diagnostic
1244 {
1245 public:
1246 malloc_leak (const malloc_state_machine &sm, tree arg)
1247 : malloc_diagnostic (sm, arg) {}
1248
1249 const char *get_kind () const FINAL OVERRIDE { return "malloc_leak"; }
1250
1251 bool emit (rich_location *rich_loc) FINAL OVERRIDE
1252 {
1253 diagnostic_metadata m;
1254 m.add_cwe (401);
1255 if (m_arg)
1256 return warning_meta (rich_loc, m, OPT_Wanalyzer_malloc_leak,
1257 "leak of %qE", m_arg);
1258 else
1259 return warning_meta (rich_loc, m, OPT_Wanalyzer_malloc_leak,
1260 "leak of %qs", "<unknown>");
1261 }
1262
1263 label_text describe_state_change (const evdesc::state_change &change)
1264 FINAL OVERRIDE
1265 {
1266 if (unchecked_p (change.m_new_state)
1267 || (start_p (change.m_old_state) && nonnull_p (change.m_new_state)))
1268 {
1269 m_alloc_event = change.m_event_id;
1270 return label_text::borrow ("allocated here");
1271 }
1272 return malloc_diagnostic::describe_state_change (change);
1273 }
1274
1275 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
1276 {
1277 if (ev.m_expr)
1278 {
1279 if (m_alloc_event.known_p ())
1280 return ev.formatted_print ("%qE leaks here; was allocated at %@",
1281 ev.m_expr, &m_alloc_event);
1282 else
1283 return ev.formatted_print ("%qE leaks here", ev.m_expr);
1284 }
1285 else
1286 {
1287 if (m_alloc_event.known_p ())
1288 return ev.formatted_print ("%qs leaks here; was allocated at %@",
1289 "<unknown>", &m_alloc_event);
1290 else
1291 return ev.formatted_print ("%qs leaks here", "<unknown>");
1292 }
1293 }
1294
1295 private:
1296 diagnostic_event_id_t m_alloc_event;
1297 };
1298
1299 class free_of_non_heap : public malloc_diagnostic
1300 {
1301 public:
1302 free_of_non_heap (const malloc_state_machine &sm, tree arg,
1303 const region *freed_reg,
1304 const char *funcname)
1305 : malloc_diagnostic (sm, arg), m_freed_reg (freed_reg), m_funcname (funcname)
1306 {
1307 }
1308
1309 const char *get_kind () const FINAL OVERRIDE { return "free_of_non_heap"; }
1310
1311 bool subclass_equal_p (const pending_diagnostic &base_other) const
1312 FINAL OVERRIDE
1313 {
1314 const free_of_non_heap &other = (const free_of_non_heap &)base_other;
1315 return (same_tree_p (m_arg, other.m_arg)
1316 && m_freed_reg == other.m_freed_reg);
1317 }
1318
1319 bool emit (rich_location *rich_loc) FINAL OVERRIDE
1320 {
1321 auto_diagnostic_group d;
1322 diagnostic_metadata m;
1323 m.add_cwe (590); /* CWE-590: Free of Memory not on the Heap. */
1324 switch (get_memory_space ())
1325 {
1326 default:
1327 case MEMSPACE_HEAP:
1328 gcc_unreachable ();
1329 case MEMSPACE_UNKNOWN:
1330 case MEMSPACE_CODE:
1331 case MEMSPACE_GLOBALS:
1332 case MEMSPACE_READONLY_DATA:
1333 return warning_meta (rich_loc, m, OPT_Wanalyzer_free_of_non_heap,
1334 "%<%s%> of %qE which points to memory"
1335 " not on the heap",
1336 m_funcname, m_arg);
1337 break;
1338 case MEMSPACE_STACK:
1339 return warning_meta (rich_loc, m, OPT_Wanalyzer_free_of_non_heap,
1340 "%<%s%> of %qE which points to memory"
1341 " on the stack",
1342 m_funcname, m_arg);
1343 break;
1344 }
1345 }
1346
1347 label_text describe_state_change (const evdesc::state_change &)
1348 FINAL OVERRIDE
1349 {
1350 return label_text::borrow ("pointer is from here");
1351 }
1352
1353 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
1354 {
1355 return ev.formatted_print ("call to %qs here", m_funcname);
1356 }
1357
1358 void mark_interesting_stuff (interesting_t *interest) FINAL OVERRIDE
1359 {
1360 if (m_freed_reg)
1361 interest->add_region_creation (m_freed_reg);
1362 }
1363
1364 private:
1365 enum memory_space get_memory_space () const
1366 {
1367 if (m_freed_reg)
1368 return m_freed_reg->get_memory_space ();
1369 else
1370 return MEMSPACE_UNKNOWN;
1371 }
1372
1373 const region *m_freed_reg;
1374 const char *m_funcname;
1375 };
1376
1377 /* struct allocation_state : public state_machine::state. */
1378
1379 /* Implementation of state_machine::state::dump_to_pp vfunc
1380 for allocation_state: append the API that this allocation is
1381 associated with. */
1382
1383 void
1384 allocation_state::dump_to_pp (pretty_printer *pp) const
1385 {
1386 state_machine::state::dump_to_pp (pp);
1387 if (m_deallocators)
1388 {
1389 pp_string (pp, " (");
1390 m_deallocators->dump_to_pp (pp);
1391 pp_character (pp, ')');
1392 }
1393 }
1394
1395 /* Given a allocation_state for a deallocator_set, get the "nonnull" state
1396 for the corresponding allocator(s). */
1397
1398 const allocation_state *
1399 allocation_state::get_nonnull () const
1400 {
1401 gcc_assert (m_deallocators);
1402 return as_a_allocation_state (m_deallocators->m_nonnull);
1403 }
1404
1405 /* malloc_state_machine's ctor. */
1406
1407 malloc_state_machine::malloc_state_machine (logger *logger)
1408 : state_machine ("malloc", logger),
1409 m_free (this, "free", WORDING_FREED),
1410 m_scalar_delete (this, "delete", WORDING_DELETED),
1411 m_vector_delete (this, "delete[]", WORDING_DELETED),
1412 m_realloc (this, "realloc", WORDING_REALLOCATED)
1413 {
1414 gcc_assert (m_start->get_id () == 0);
1415 m_null = add_state ("null", RS_FREED, NULL, NULL);
1416 m_non_heap = add_state ("non-heap", RS_NON_HEAP, NULL, NULL);
1417 m_stop = add_state ("stop", RS_STOP, NULL, NULL);
1418 }
1419
1420 malloc_state_machine::~malloc_state_machine ()
1421 {
1422 unsigned i;
1423 custom_deallocator_set *set;
1424 FOR_EACH_VEC_ELT (m_dynamic_sets, i, set)
1425 delete set;
1426 custom_deallocator *d;
1427 FOR_EACH_VEC_ELT (m_dynamic_deallocators, i, d)
1428 delete d;
1429 }
1430
1431 state_machine::state_t
1432 malloc_state_machine::add_state (const char *name, enum resource_state rs,
1433 const deallocator_set *deallocators,
1434 const deallocator *deallocator)
1435 {
1436 return add_custom_state (new allocation_state (name, alloc_state_id (),
1437 rs, deallocators,
1438 deallocator));
1439 }
1440
1441 /* If ALLOCATOR_FNDECL has any "__attribute__((malloc(FOO)))",
1442 return a custom_deallocator_set for them, consolidating them
1443 to ensure uniqueness of the sets.
1444
1445 Return NULL if it has no such attributes. */
1446
1447 const custom_deallocator_set *
1448 malloc_state_machine::
1449 get_or_create_custom_deallocator_set (tree allocator_fndecl)
1450 {
1451 /* Early rejection of decls without attributes. */
1452 tree attrs = DECL_ATTRIBUTES (allocator_fndecl);
1453 if (!attrs)
1454 return NULL;
1455
1456 /* Otherwise, call maybe_create_custom_deallocator_set,
1457 memoizing the result. */
1458 if (custom_deallocator_set **slot
1459 = m_custom_deallocator_set_cache.get (allocator_fndecl))
1460 return *slot;
1461 custom_deallocator_set *set
1462 = maybe_create_custom_deallocator_set (allocator_fndecl);
1463 m_custom_deallocator_set_cache.put (allocator_fndecl, set);
1464 return set;
1465 }
1466
1467 /* Given ALLOCATOR_FNDECL, a FUNCTION_DECL with attributes,
1468 look for any "__attribute__((malloc(FOO)))" and return a
1469 custom_deallocator_set for them, consolidating them
1470 to ensure uniqueness of the sets.
1471
1472 Return NULL if it has no such attributes.
1473
1474 Subroutine of get_or_create_custom_deallocator_set which
1475 memoizes the result. */
1476
1477 custom_deallocator_set *
1478 malloc_state_machine::
1479 maybe_create_custom_deallocator_set (tree allocator_fndecl)
1480 {
1481 tree attrs = DECL_ATTRIBUTES (allocator_fndecl);
1482 gcc_assert (attrs);
1483
1484 /* Look for instances of __attribute__((malloc(FOO))). */
1485 auto_vec<const deallocator *> deallocator_vec;
1486 for (tree allocs = attrs;
1487 (allocs = lookup_attribute ("malloc", allocs));
1488 allocs = TREE_CHAIN (allocs))
1489 {
1490 tree args = TREE_VALUE (allocs);
1491 if (!args)
1492 continue;
1493 if (TREE_VALUE (args))
1494 {
1495 const deallocator *d
1496 = get_or_create_deallocator (TREE_VALUE (args));
1497 deallocator_vec.safe_push (d);
1498 }
1499 }
1500
1501 /* If there weren't any deallocators, bail. */
1502 if (deallocator_vec.length () == 0)
1503 return NULL;
1504
1505 /* Consolidate, so that we reuse existing deallocator_set
1506 instances. */
1507 deallocator_vec.qsort (deallocator::cmp_ptr_ptr);
1508 custom_deallocator_set **slot
1509 = m_custom_deallocator_set_map.get (&deallocator_vec);
1510 if (slot)
1511 return *slot;
1512 custom_deallocator_set *set
1513 = new custom_deallocator_set (this, &deallocator_vec, WORDING_DEALLOCATED);
1514 m_custom_deallocator_set_map.put (&set->m_deallocator_vec, set);
1515 m_dynamic_sets.safe_push (set);
1516 return set;
1517 }
1518
1519 /* Get the deallocator for DEALLOCATOR_FNDECL, creating it if necessary. */
1520
1521 const deallocator *
1522 malloc_state_machine::get_or_create_deallocator (tree deallocator_fndecl)
1523 {
1524 deallocator **slot = m_deallocator_map.get (deallocator_fndecl);
1525 if (slot)
1526 return *slot;
1527
1528 /* Reuse "free". */
1529 deallocator *d;
1530 if (is_named_call_p (deallocator_fndecl, "free")
1531 || is_std_named_call_p (deallocator_fndecl, "free")
1532 || is_named_call_p (deallocator_fndecl, "__builtin_free"))
1533 d = &m_free.m_deallocator;
1534 else
1535 {
1536 custom_deallocator *cd
1537 = new custom_deallocator (this, deallocator_fndecl,
1538 WORDING_DEALLOCATED);
1539 m_dynamic_deallocators.safe_push (cd);
1540 d = cd;
1541 }
1542 m_deallocator_map.put (deallocator_fndecl, d);
1543 return d;
1544 }
1545
1546 /* Try to identify the function declaration either by name or as a known malloc
1547 builtin. */
1548
1549 static bool
1550 known_allocator_p (const_tree fndecl, const gcall *call)
1551 {
1552 /* Either it is a function we know by name and number of arguments... */
1553 if (is_named_call_p (fndecl, "malloc", call, 1)
1554 || is_named_call_p (fndecl, "calloc", call, 2)
1555 || is_std_named_call_p (fndecl, "malloc", call, 1)
1556 || is_std_named_call_p (fndecl, "calloc", call, 2)
1557 || is_named_call_p (fndecl, "strdup", call, 1)
1558 || is_named_call_p (fndecl, "strndup", call, 2))
1559 return true;
1560
1561 /* ... or it is a builtin allocator that allocates objects freed with
1562 __builtin_free. */
1563 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1564 switch (DECL_FUNCTION_CODE (fndecl))
1565 {
1566 case BUILT_IN_MALLOC:
1567 case BUILT_IN_CALLOC:
1568 case BUILT_IN_STRDUP:
1569 case BUILT_IN_STRNDUP:
1570 return true;
1571 default:
1572 break;
1573 }
1574
1575 return false;
1576 }
1577
1578 /* Implementation of state_machine::on_stmt vfunc for malloc_state_machine. */
1579
1580 bool
1581 malloc_state_machine::on_stmt (sm_context *sm_ctxt,
1582 const supernode *node,
1583 const gimple *stmt) const
1584 {
1585 if (const gcall *call = dyn_cast <const gcall *> (stmt))
1586 if (tree callee_fndecl = sm_ctxt->get_fndecl_for_call (call))
1587 {
1588 if (known_allocator_p (callee_fndecl, call))
1589 {
1590 on_allocator_call (sm_ctxt, call, &m_free);
1591 return true;
1592 }
1593
1594 if (is_named_call_p (callee_fndecl, "operator new", call, 1))
1595 on_allocator_call (sm_ctxt, call, &m_scalar_delete);
1596 else if (is_named_call_p (callee_fndecl, "operator new []", call, 1))
1597 on_allocator_call (sm_ctxt, call, &m_vector_delete);
1598 else if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
1599 || is_named_call_p (callee_fndecl, "operator delete", call, 2))
1600 {
1601 on_deallocator_call (sm_ctxt, node, call,
1602 &m_scalar_delete.m_deallocator, 0);
1603 return true;
1604 }
1605 else if (is_named_call_p (callee_fndecl, "operator delete []", call, 1))
1606 {
1607 on_deallocator_call (sm_ctxt, node, call,
1608 &m_vector_delete.m_deallocator, 0);
1609 return true;
1610 }
1611
1612 if (is_named_call_p (callee_fndecl, "alloca", call, 1)
1613 || is_named_call_p (callee_fndecl, "__builtin_alloca", call, 1))
1614 {
1615 tree lhs = gimple_call_lhs (call);
1616 if (lhs)
1617 sm_ctxt->on_transition (node, stmt, lhs, m_start, m_non_heap);
1618 return true;
1619 }
1620
1621 if (is_named_call_p (callee_fndecl, "free", call, 1)
1622 || is_std_named_call_p (callee_fndecl, "free", call, 1)
1623 || is_named_call_p (callee_fndecl, "__builtin_free", call, 1))
1624 {
1625 on_deallocator_call (sm_ctxt, node, call,
1626 &m_free.m_deallocator, 0);
1627 return true;
1628 }
1629
1630 if (is_named_call_p (callee_fndecl, "realloc", call, 2)
1631 || is_named_call_p (callee_fndecl, "__builtin_realloc", call, 2))
1632 {
1633 on_realloc_call (sm_ctxt, node, call);
1634 return true;
1635 }
1636
1637 if (unaffected_by_call_p (callee_fndecl))
1638 return true;
1639
1640 /* Cast away const-ness for cache-like operations. */
1641 malloc_state_machine *mutable_this
1642 = const_cast <malloc_state_machine *> (this);
1643
1644 /* Handle "__attribute__((malloc(FOO)))". */
1645 if (const deallocator_set *deallocators
1646 = mutable_this->get_or_create_custom_deallocator_set
1647 (callee_fndecl))
1648 {
1649 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee_fndecl));
1650 bool returns_nonnull
1651 = lookup_attribute ("returns_nonnull", attrs);
1652 on_allocator_call (sm_ctxt, call, deallocators, returns_nonnull);
1653 }
1654
1655 /* Handle "__attribute__((nonnull))". */
1656 {
1657 tree fntype = TREE_TYPE (callee_fndecl);
1658 bitmap nonnull_args = get_nonnull_args (fntype);
1659 if (nonnull_args)
1660 {
1661 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
1662 {
1663 tree arg = gimple_call_arg (stmt, i);
1664 if (TREE_CODE (TREE_TYPE (arg)) != POINTER_TYPE)
1665 continue;
1666 /* If we have a nonnull-args, and either all pointers, or just
1667 the specified pointers. */
1668 if (bitmap_empty_p (nonnull_args)
1669 || bitmap_bit_p (nonnull_args, i))
1670 {
1671 state_t state = sm_ctxt->get_state (stmt, arg);
1672 /* Can't use a switch as the states are non-const. */
1673 if (unchecked_p (state))
1674 {
1675 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1676 sm_ctxt->warn (node, stmt, arg,
1677 new possible_null_arg (*this, diag_arg,
1678 callee_fndecl,
1679 i));
1680 const allocation_state *astate
1681 = as_a_allocation_state (state);
1682 sm_ctxt->set_next_state (stmt, arg,
1683 astate->get_nonnull ());
1684 }
1685 else if (state == m_null)
1686 {
1687 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1688 sm_ctxt->warn (node, stmt, arg,
1689 new null_arg (*this, diag_arg,
1690 callee_fndecl, i));
1691 sm_ctxt->set_next_state (stmt, arg, m_stop);
1692 }
1693 }
1694 }
1695 BITMAP_FREE (nonnull_args);
1696 }
1697 }
1698
1699 /* Check for this after nonnull, so that if we have both
1700 then we transition to "freed", rather than "checked". */
1701 unsigned dealloc_argno = fndecl_dealloc_argno (callee_fndecl);
1702 if (dealloc_argno != UINT_MAX)
1703 {
1704 const deallocator *d
1705 = mutable_this->get_or_create_deallocator (callee_fndecl);
1706 on_deallocator_call (sm_ctxt, node, call, d, dealloc_argno);
1707 }
1708 }
1709
1710 if (tree lhs = sm_ctxt->is_zero_assignment (stmt))
1711 if (any_pointer_p (lhs))
1712 on_zero_assignment (sm_ctxt, stmt,lhs);
1713
1714 /* Handle dereferences. */
1715 for (unsigned i = 0; i < gimple_num_ops (stmt); i++)
1716 {
1717 tree op = gimple_op (stmt, i);
1718 if (!op)
1719 continue;
1720 if (TREE_CODE (op) == COMPONENT_REF)
1721 op = TREE_OPERAND (op, 0);
1722
1723 if (TREE_CODE (op) == MEM_REF)
1724 {
1725 tree arg = TREE_OPERAND (op, 0);
1726
1727 state_t state = sm_ctxt->get_state (stmt, arg);
1728 if (unchecked_p (state))
1729 {
1730 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1731 sm_ctxt->warn (node, stmt, arg,
1732 new possible_null_deref (*this, diag_arg));
1733 const allocation_state *astate = as_a_allocation_state (state);
1734 sm_ctxt->set_next_state (stmt, arg, astate->get_nonnull ());
1735 }
1736 else if (state == m_null)
1737 {
1738 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1739 sm_ctxt->warn (node, stmt, arg,
1740 new null_deref (*this, diag_arg));
1741 sm_ctxt->set_next_state (stmt, arg, m_stop);
1742 }
1743 else if (freed_p (state))
1744 {
1745 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1746 const allocation_state *astate = as_a_allocation_state (state);
1747 sm_ctxt->warn (node, stmt, arg,
1748 new use_after_free (*this, diag_arg,
1749 astate->m_deallocator));
1750 sm_ctxt->set_next_state (stmt, arg, m_stop);
1751 }
1752 }
1753 }
1754 return false;
1755 }
1756
1757 /* Handle a call to an allocator.
1758 RETURNS_NONNULL is true if CALL is to a fndecl known to have
1759 __attribute__((returns_nonnull)). */
1760
1761 void
1762 malloc_state_machine::on_allocator_call (sm_context *sm_ctxt,
1763 const gcall *call,
1764 const deallocator_set *deallocators,
1765 bool returns_nonnull) const
1766 {
1767 tree lhs = gimple_call_lhs (call);
1768 if (lhs)
1769 {
1770 if (sm_ctxt->get_state (call, lhs) == m_start)
1771 sm_ctxt->set_next_state (call, lhs,
1772 (returns_nonnull
1773 ? deallocators->m_nonnull
1774 : deallocators->m_unchecked));
1775 }
1776 else
1777 {
1778 /* TODO: report leak. */
1779 }
1780 }
1781
1782 /* Handle deallocations of non-heap pointers.
1783 non-heap -> stop, with warning. */
1784
1785 void
1786 malloc_state_machine::handle_free_of_non_heap (sm_context *sm_ctxt,
1787 const supernode *node,
1788 const gcall *call,
1789 tree arg,
1790 const deallocator *d) const
1791 {
1792 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1793 const region *freed_reg = NULL;
1794 if (const program_state *old_state = sm_ctxt->get_old_program_state ())
1795 {
1796 const region_model *old_model = old_state->m_region_model;
1797 const svalue *ptr_sval = old_model->get_rvalue (arg, NULL);
1798 freed_reg = old_model->deref_rvalue (ptr_sval, arg, NULL);
1799 }
1800 sm_ctxt->warn (node, call, arg,
1801 new free_of_non_heap (*this, diag_arg, freed_reg,
1802 d->m_name));
1803 sm_ctxt->set_next_state (call, arg, m_stop);
1804 }
1805
1806 void
1807 malloc_state_machine::on_deallocator_call (sm_context *sm_ctxt,
1808 const supernode *node,
1809 const gcall *call,
1810 const deallocator *d,
1811 unsigned argno) const
1812 {
1813 if (argno >= gimple_call_num_args (call))
1814 return;
1815 tree arg = gimple_call_arg (call, argno);
1816
1817 state_t state = sm_ctxt->get_state (call, arg);
1818
1819 /* start/unchecked/nonnull -> freed. */
1820 if (state == m_start)
1821 sm_ctxt->set_next_state (call, arg, d->m_freed);
1822 else if (unchecked_p (state) || nonnull_p (state))
1823 {
1824 const allocation_state *astate = as_a_allocation_state (state);
1825 gcc_assert (astate->m_deallocators);
1826 if (!astate->m_deallocators->contains_p (d))
1827 {
1828 /* Wrong allocator. */
1829 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1830 pending_diagnostic *pd
1831 = new mismatching_deallocation (*this, diag_arg,
1832 astate->m_deallocators,
1833 d);
1834 sm_ctxt->warn (node, call, arg, pd);
1835 }
1836 sm_ctxt->set_next_state (call, arg, d->m_freed);
1837 }
1838
1839 /* Keep state "null" as-is, rather than transitioning to "freed";
1840 we don't want to complain about double-free of NULL. */
1841 else if (state == d->m_freed)
1842 {
1843 /* freed -> stop, with warning. */
1844 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1845 sm_ctxt->warn (node, call, arg,
1846 new double_free (*this, diag_arg, d->m_name));
1847 sm_ctxt->set_next_state (call, arg, m_stop);
1848 }
1849 else if (state == m_non_heap)
1850 {
1851 /* non-heap -> stop, with warning. */
1852 handle_free_of_non_heap (sm_ctxt, node, call, arg, d);
1853 }
1854 }
1855
1856 /* Handle a call to "realloc".
1857 Check for free of non-heap or mismatching allocators,
1858 transitioning to the "stop" state for such cases.
1859
1860 Otherwise, region_model::impl_call_realloc will later
1861 get called (which will handle other sm-state transitions
1862 when the state is bifurcated). */
1863
1864 void
1865 malloc_state_machine::on_realloc_call (sm_context *sm_ctxt,
1866 const supernode *node,
1867 const gcall *call) const
1868 {
1869 const unsigned argno = 0;
1870 const deallocator *d = &m_realloc;
1871
1872 tree arg = gimple_call_arg (call, argno);
1873
1874 state_t state = sm_ctxt->get_state (call, arg);
1875
1876 if (unchecked_p (state) || nonnull_p (state))
1877 {
1878 const allocation_state *astate = as_a_allocation_state (state);
1879 gcc_assert (astate->m_deallocators);
1880 if (!astate->m_deallocators->contains_p (&m_free.m_deallocator))
1881 {
1882 /* Wrong allocator. */
1883 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1884 pending_diagnostic *pd
1885 = new mismatching_deallocation (*this, diag_arg,
1886 astate->m_deallocators,
1887 d);
1888 sm_ctxt->warn (node, call, arg, pd);
1889 sm_ctxt->set_next_state (call, arg, m_stop);
1890 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
1891 path_ctxt->terminate_path ();
1892 }
1893 }
1894 else if (state == m_free.m_deallocator.m_freed)
1895 {
1896 /* freed -> stop, with warning. */
1897 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1898 sm_ctxt->warn (node, call, arg,
1899 new double_free (*this, diag_arg, "free"));
1900 sm_ctxt->set_next_state (call, arg, m_stop);
1901 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
1902 path_ctxt->terminate_path ();
1903 }
1904 else if (state == m_non_heap)
1905 {
1906 /* non-heap -> stop, with warning. */
1907 handle_free_of_non_heap (sm_ctxt, node, call, arg, d);
1908 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
1909 path_ctxt->terminate_path ();
1910 }
1911 }
1912
1913 /* Implementation of state_machine::on_phi vfunc for malloc_state_machine. */
1914
1915 void
1916 malloc_state_machine::on_phi (sm_context *sm_ctxt,
1917 const supernode *node ATTRIBUTE_UNUSED,
1918 const gphi *phi,
1919 tree rhs) const
1920 {
1921 if (zerop (rhs))
1922 {
1923 tree lhs = gimple_phi_result (phi);
1924 on_zero_assignment (sm_ctxt, phi, lhs);
1925 }
1926 }
1927
1928 /* Implementation of state_machine::on_condition vfunc for malloc_state_machine.
1929 Potentially transition state 'unchecked' to 'nonnull' or to 'null'. */
1930
1931 void
1932 malloc_state_machine::on_condition (sm_context *sm_ctxt,
1933 const supernode *node ATTRIBUTE_UNUSED,
1934 const gimple *stmt,
1935 const svalue *lhs,
1936 enum tree_code op,
1937 const svalue *rhs) const
1938 {
1939 if (!rhs->all_zeroes_p ())
1940 return;
1941
1942 if (!any_pointer_p (lhs))
1943 return;
1944 if (!any_pointer_p (rhs))
1945 return;
1946
1947 if (op == NE_EXPR)
1948 {
1949 log ("got 'ARG != 0' match");
1950 state_t s = sm_ctxt->get_state (stmt, lhs);
1951 if (unchecked_p (s))
1952 {
1953 const allocation_state *astate = as_a_allocation_state (s);
1954 sm_ctxt->set_next_state (stmt, lhs, astate->get_nonnull ());
1955 }
1956 }
1957 else if (op == EQ_EXPR)
1958 {
1959 log ("got 'ARG == 0' match");
1960 state_t s = sm_ctxt->get_state (stmt, lhs);
1961 if (unchecked_p (s))
1962 sm_ctxt->set_next_state (stmt, lhs, m_null);
1963 }
1964 }
1965
1966 /* Implementation of state_machine::can_purge_p vfunc for malloc_state_machine.
1967 Don't allow purging of pointers in state 'unchecked' or 'nonnull'
1968 (to avoid false leak reports). */
1969
1970 bool
1971 malloc_state_machine::can_purge_p (state_t s) const
1972 {
1973 enum resource_state rs = get_rs (s);
1974 return rs != RS_UNCHECKED && rs != RS_NONNULL;
1975 }
1976
1977 /* Implementation of state_machine::on_leak vfunc for malloc_state_machine
1978 (for complaining about leaks of pointers in state 'unchecked' and
1979 'nonnull'). */
1980
1981 pending_diagnostic *
1982 malloc_state_machine::on_leak (tree var) const
1983 {
1984 return new malloc_leak (*this, var);
1985 }
1986
1987 /* Implementation of state_machine::reset_when_passed_to_unknown_fn_p vfunc
1988 for malloc_state_machine. */
1989
1990 bool
1991 malloc_state_machine::reset_when_passed_to_unknown_fn_p (state_t s,
1992 bool is_mutable) const
1993 {
1994 /* An on-stack ptr doesn't stop being stack-allocated when passed to an
1995 unknown fn. */
1996 if (s == m_non_heap)
1997 return false;
1998
1999 /* Otherwise, pointers passed as non-const can be freed. */
2000 return is_mutable;
2001 }
2002
2003 /* Return true if calls to FNDECL are known to not affect this sm-state. */
2004
2005 bool
2006 malloc_state_machine::unaffected_by_call_p (tree fndecl)
2007 {
2008 /* A set of functions that are known to not affect allocation
2009 status, even if we haven't fully modelled the rest of their
2010 behavior yet. */
2011 static const char * const funcnames[] = {
2012 /* This array must be kept sorted. */
2013 "strsep",
2014 };
2015 const size_t count
2016 = sizeof(funcnames) / sizeof (funcnames[0]);
2017 function_set fs (funcnames, count);
2018
2019 if (fs.contains_decl_p (fndecl))
2020 return true;
2021
2022 return false;
2023 }
2024
2025 /* Shared logic for handling GIMPLE_ASSIGNs and GIMPLE_PHIs that
2026 assign zero to LHS. */
2027
2028 void
2029 malloc_state_machine::on_zero_assignment (sm_context *sm_ctxt,
2030 const gimple *stmt,
2031 tree lhs) const
2032 {
2033 state_t s = sm_ctxt->get_state (stmt, lhs);
2034 enum resource_state rs = get_rs (s);
2035 if (rs == RS_START
2036 || rs == RS_UNCHECKED
2037 || rs == RS_NONNULL
2038 || rs == RS_FREED)
2039 sm_ctxt->set_next_state (stmt, lhs, m_null);
2040 }
2041
2042 /* Special-case hook for handling realloc, for the "success with move to
2043 a new buffer" case, marking OLD_PTR_SVAL as freed and NEW_PTR_SVAL as
2044 non-null.
2045
2046 This is similar to on_deallocator_call and on_allocator_call,
2047 but the checks happen in on_realloc_call, and by splitting the states. */
2048
2049 void
2050 malloc_state_machine::
2051 on_realloc_with_move (region_model *model,
2052 sm_state_map *smap,
2053 const svalue *old_ptr_sval,
2054 const svalue *new_ptr_sval,
2055 const extrinsic_state &ext_state) const
2056 {
2057 smap->set_state (model, old_ptr_sval,
2058 m_free.m_deallocator.m_freed,
2059 NULL, ext_state);
2060
2061 smap->set_state (model, new_ptr_sval,
2062 m_free.m_nonnull,
2063 NULL, ext_state);
2064 }
2065
2066 } // anonymous namespace
2067
2068 /* Internal interface to this file. */
2069
2070 state_machine *
2071 make_malloc_state_machine (logger *logger)
2072 {
2073 return new malloc_state_machine (logger);
2074 }
2075
2076 /* Specialcase hook for handling realloc, for use by
2077 region_model::impl_call_realloc::success_with_move::update_model. */
2078
2079 void
2080 region_model::on_realloc_with_move (const call_details &cd,
2081 const svalue *old_ptr_sval,
2082 const svalue *new_ptr_sval)
2083 {
2084 region_model_context *ctxt = cd.get_ctxt ();
2085 if (!ctxt)
2086 return;
2087 const extrinsic_state *ext_state = ctxt->get_ext_state ();
2088 if (!ext_state)
2089 return;
2090
2091 sm_state_map *smap;
2092 const state_machine *sm;
2093 unsigned sm_idx;
2094 if (!ctxt->get_malloc_map (&smap, &sm, &sm_idx))
2095 return;
2096
2097 gcc_assert (smap);
2098 gcc_assert (sm);
2099
2100 const malloc_state_machine &malloc_sm
2101 = (const malloc_state_machine &)*sm;
2102
2103 malloc_sm.on_realloc_with_move (this,
2104 smap,
2105 old_ptr_sval,
2106 new_ptr_sval,
2107 *ext_state);
2108 }
2109
2110 } // namespace ana
2111
2112 #endif /* #if ENABLE_ANALYZER */