]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/analyzer/sm-malloc.cc
analyzer: use std::unique_ptr for pending_diagnostic/note
[thirdparty/gcc.git] / gcc / analyzer / sm-malloc.cc
1 /* A state machine for detecting misuses of the malloc/free API.
2 Copyright (C) 2019-2022 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #define INCLUDE_MEMORY
23 #include "system.h"
24 #include "coretypes.h"
25 #include "make-unique.h"
26 #include "tree.h"
27 #include "function.h"
28 #include "basic-block.h"
29 #include "gimple.h"
30 #include "options.h"
31 #include "bitmap.h"
32 #include "diagnostic-path.h"
33 #include "diagnostic-metadata.h"
34 #include "analyzer/analyzer.h"
35 #include "diagnostic-event-id.h"
36 #include "analyzer/analyzer-logging.h"
37 #include "analyzer/sm.h"
38 #include "analyzer/pending-diagnostic.h"
39 #include "analyzer/call-string.h"
40 #include "analyzer/program-point.h"
41 #include "analyzer/store.h"
42 #include "analyzer/region-model.h"
43 #include "stringpool.h"
44 #include "attribs.h"
45 #include "analyzer/function-set.h"
46 #include "analyzer/program-state.h"
47
48 #if ENABLE_ANALYZER
49
50 namespace ana {
51
52 namespace {
53
54 /* This state machine and its various support classes track allocations
55 and deallocations.
56
57 It has a few standard allocation/deallocation pairs (e.g. new/delete),
58 and also supports user-defined ones via
59 __attribute__ ((malloc(DEALLOCATOR))).
60
61 There can be more than one valid deallocator for a given allocator,
62 for example:
63 __attribute__ ((malloc (fclose)))
64 __attribute__ ((malloc (freopen, 3)))
65 FILE* fopen (const char*, const char*);
66 A deallocator_set represents a particular set of valid deallocators.
67
68 We track the expected deallocator_set for a value, but not the allocation
69 function - there could be more than one allocator per deallocator_set.
70 For example, there could be dozens of allocators for "free" beyond just
71 malloc e.g. calloc, xstrdup, etc. We don't want to explode the number
72 of states by tracking individual allocators in the exploded graph;
73 we merely want to track "this value expects to have 'free' called on it".
74 Perhaps we can reconstruct which allocator was used later, when emitting
75 the path, if it's necessary for precision of wording of diagnostics. */
76
77 class deallocator;
78 class deallocator_set;
79 class malloc_state_machine;
80
81 /* An enum for discriminating between different kinds of allocation_state. */
82
83 enum resource_state
84 {
85 /* States that are independent of allocator/deallocator. */
86
87 /* The start state. */
88 RS_START,
89
90 /* State for a pointer that's known to be NULL. */
91 RS_NULL,
92
93 /* State for a pointer that's known to not be on the heap (e.g. to a local
94 or global). */
95 RS_NON_HEAP,
96
97 /* Stop state, for pointers we don't want to track any more. */
98 RS_STOP,
99
100 /* States that relate to a specific deallocator_set. */
101
102 /* State for a pointer returned from an allocator that hasn't
103 been checked for NULL.
104 It could be a pointer to heap-allocated memory, or could be NULL. */
105 RS_UNCHECKED,
106
107 /* State for a pointer returned from an allocator,
108 known to be non-NULL. */
109 RS_NONNULL,
110
111 /* State for a pointer passed to a deallocator. */
112 RS_FREED
113 };
114
115 /* Custom state subclass, which can optionally refer to an a
116 deallocator_set. */
117
118 struct allocation_state : public state_machine::state
119 {
120 allocation_state (const char *name, unsigned id,
121 enum resource_state rs,
122 const deallocator_set *deallocators,
123 const deallocator *deallocator)
124 : state (name, id), m_rs (rs),
125 m_deallocators (deallocators),
126 m_deallocator (deallocator)
127 {}
128
129 void dump_to_pp (pretty_printer *pp) const final override;
130
131 const allocation_state *get_nonnull () const;
132
133 enum resource_state m_rs;
134 const deallocator_set *m_deallocators;
135 const deallocator *m_deallocator;
136 };
137
138 /* An enum for choosing which wording to use in various diagnostics
139 when describing deallocations. */
140
141 enum wording
142 {
143 WORDING_FREED,
144 WORDING_DELETED,
145 WORDING_DEALLOCATED,
146 WORDING_REALLOCATED
147 };
148
149 /* Base class representing a deallocation function,
150 either a built-in one we know about, or one exposed via
151 __attribute__((malloc(DEALLOCATOR))). */
152
153 struct deallocator
154 {
155 hashval_t hash () const;
156 void dump_to_pp (pretty_printer *pp) const;
157 static int cmp (const deallocator *a, const deallocator *b);
158 static int cmp_ptr_ptr (const void *, const void *);
159
160 /* Name to use in diagnostics. */
161 const char *m_name;
162
163 /* Which wording to use in diagnostics. */
164 enum wording m_wording;
165
166 /* State for a value passed to one of the deallocators. */
167 state_machine::state_t m_freed;
168
169 protected:
170 deallocator (malloc_state_machine *sm,
171 const char *name,
172 enum wording wording);
173 };
174
175 /* Subclass representing a predefined deallocator.
176 e.g. "delete []", without needing a specific FUNCTION_DECL
177 ahead of time. */
178
179 struct standard_deallocator : public deallocator
180 {
181 standard_deallocator (malloc_state_machine *sm,
182 const char *name,
183 enum wording wording);
184 };
185
186 /* Subclass representing a user-defined deallocator
187 via __attribute__((malloc(DEALLOCATOR))) given
188 a specific FUNCTION_DECL. */
189
190 struct custom_deallocator : public deallocator
191 {
192 custom_deallocator (malloc_state_machine *sm,
193 tree deallocator_fndecl,
194 enum wording wording)
195 : deallocator (sm, IDENTIFIER_POINTER (DECL_NAME (deallocator_fndecl)),
196 wording)
197 {
198 }
199 };
200
201 /* Base class representing a set of possible deallocators.
202 Often this will be just a single deallocator, but some
203 allocators have multiple valid deallocators (e.g. the result of
204 "fopen" can be closed by either "fclose" or "freopen"). */
205
206 struct deallocator_set
207 {
208 deallocator_set (malloc_state_machine *sm,
209 enum wording wording);
210 virtual ~deallocator_set () {}
211
212 virtual bool contains_p (const deallocator *d) const = 0;
213 virtual const deallocator *maybe_get_single () const = 0;
214 virtual void dump_to_pp (pretty_printer *pp) const = 0;
215 void dump () const;
216
217 /* Which wording to use in diagnostics. */
218 enum wording m_wording;
219
220 /* Pointers to states.
221 These states are owned by the state_machine base class. */
222
223 /* State for an unchecked result from an allocator using this set. */
224 state_machine::state_t m_unchecked;
225
226 /* State for a known non-NULL result from such an allocator. */
227 state_machine::state_t m_nonnull;
228 };
229
230 /* Subclass of deallocator_set representing a set of deallocators
231 defined by one or more __attribute__((malloc(DEALLOCATOR))). */
232
233 struct custom_deallocator_set : public deallocator_set
234 {
235 typedef const auto_vec <const deallocator *> *key_t;
236
237 custom_deallocator_set (malloc_state_machine *sm,
238 const auto_vec <const deallocator *> *vec,
239 //const char *name,
240 //const char *dealloc_funcname,
241 //unsigned arg_idx,
242 enum wording wording);
243
244 bool contains_p (const deallocator *d) const final override;
245 const deallocator *maybe_get_single () const final override;
246 void dump_to_pp (pretty_printer *pp) const final override;
247
248 auto_vec <const deallocator *> m_deallocator_vec;
249 };
250
251 /* Subclass of deallocator_set representing a set of deallocators
252 with a single standard_deallocator, e.g. "delete []". */
253
254 struct standard_deallocator_set : public deallocator_set
255 {
256 standard_deallocator_set (malloc_state_machine *sm,
257 const char *name,
258 enum wording wording);
259
260 bool contains_p (const deallocator *d) const final override;
261 const deallocator *maybe_get_single () const final override;
262 void dump_to_pp (pretty_printer *pp) const final override;
263
264 standard_deallocator m_deallocator;
265 };
266
267 /* Traits class for ensuring uniqueness of deallocator_sets within
268 malloc_state_machine. */
269
270 struct deallocator_set_map_traits
271 {
272 typedef custom_deallocator_set::key_t key_type;
273 typedef custom_deallocator_set *value_type;
274 typedef custom_deallocator_set *compare_type;
275
276 static inline hashval_t hash (const key_type &k)
277 {
278 gcc_assert (k != NULL);
279 gcc_assert (k != reinterpret_cast<key_type> (1));
280
281 hashval_t result = 0;
282 unsigned i;
283 const deallocator *d;
284 FOR_EACH_VEC_ELT (*k, i, d)
285 result ^= d->hash ();
286 return result;
287 }
288 static inline bool equal_keys (const key_type &k1, const key_type &k2)
289 {
290 if (k1->length () != k2->length ())
291 return false;
292
293 for (unsigned i = 0; i < k1->length (); i++)
294 if ((*k1)[i] != (*k2)[i])
295 return false;
296
297 return true;
298 }
299 template <typename T>
300 static inline void remove (T &)
301 {
302 /* empty; the nodes are handled elsewhere. */
303 }
304 template <typename T>
305 static inline void mark_deleted (T &entry)
306 {
307 entry.m_key = reinterpret_cast<key_type> (1);
308 }
309 template <typename T>
310 static inline void mark_empty (T &entry)
311 {
312 entry.m_key = NULL;
313 }
314 template <typename T>
315 static inline bool is_deleted (const T &entry)
316 {
317 return entry.m_key == reinterpret_cast<key_type> (1);
318 }
319 template <typename T>
320 static inline bool is_empty (const T &entry)
321 {
322 return entry.m_key == NULL;
323 }
324 static const bool empty_zero_p = false;
325 };
326
327 /* A state machine for detecting misuses of the malloc/free API.
328
329 See sm-malloc.dot for an overview (keep this in-sync with that file). */
330
331 class malloc_state_machine : public state_machine
332 {
333 public:
334 typedef allocation_state custom_data_t;
335
336 malloc_state_machine (logger *logger);
337 ~malloc_state_machine ();
338
339 state_t
340 add_state (const char *name, enum resource_state rs,
341 const deallocator_set *deallocators,
342 const deallocator *deallocator);
343
344 bool inherited_state_p () const final override { return false; }
345
346 state_machine::state_t
347 get_default_state (const svalue *sval) const final override
348 {
349 if (tree cst = sval->maybe_get_constant ())
350 {
351 if (zerop (cst))
352 return m_null;
353 }
354 if (const region_svalue *ptr = sval->dyn_cast_region_svalue ())
355 {
356 const region *reg = ptr->get_pointee ();
357 switch (reg->get_memory_space ())
358 {
359 default:
360 break;
361 case MEMSPACE_CODE:
362 case MEMSPACE_GLOBALS:
363 case MEMSPACE_STACK:
364 case MEMSPACE_READONLY_DATA:
365 return m_non_heap;
366 }
367 }
368 return m_start;
369 }
370
371 bool on_stmt (sm_context *sm_ctxt,
372 const supernode *node,
373 const gimple *stmt) const final override;
374
375 void on_phi (sm_context *sm_ctxt,
376 const supernode *node,
377 const gphi *phi,
378 tree rhs) const final override;
379
380 void on_condition (sm_context *sm_ctxt,
381 const supernode *node,
382 const gimple *stmt,
383 const svalue *lhs,
384 enum tree_code op,
385 const svalue *rhs) const final override;
386
387 bool can_purge_p (state_t s) const final override;
388 std::unique_ptr<pending_diagnostic> on_leak (tree var) const final override;
389
390 bool reset_when_passed_to_unknown_fn_p (state_t s,
391 bool is_mutable) const final override;
392
393 static bool unaffected_by_call_p (tree fndecl);
394
395 void on_realloc_with_move (region_model *model,
396 sm_state_map *smap,
397 const svalue *old_ptr_sval,
398 const svalue *new_ptr_sval,
399 const extrinsic_state &ext_state) const;
400
401 standard_deallocator_set m_free;
402 standard_deallocator_set m_scalar_delete;
403 standard_deallocator_set m_vector_delete;
404
405 standard_deallocator m_realloc;
406
407 /* States that are independent of api. */
408
409 /* State for a pointer that's known to be NULL. */
410 state_t m_null;
411
412 /* State for a pointer that's known to not be on the heap (e.g. to a local
413 or global). */
414 state_t m_non_heap; // TODO: or should this be a different state machine?
415 // or do we need child values etc?
416
417 /* Stop state, for pointers we don't want to track any more. */
418 state_t m_stop;
419
420 private:
421 const custom_deallocator_set *
422 get_or_create_custom_deallocator_set (tree allocator_fndecl);
423 custom_deallocator_set *
424 maybe_create_custom_deallocator_set (tree allocator_fndecl);
425 const deallocator *
426 get_or_create_deallocator (tree deallocator_fndecl);
427
428 void on_allocator_call (sm_context *sm_ctxt,
429 const gcall *call,
430 const deallocator_set *deallocators,
431 bool returns_nonnull = false) const;
432 void handle_free_of_non_heap (sm_context *sm_ctxt,
433 const supernode *node,
434 const gcall *call,
435 tree arg,
436 const deallocator *d) const;
437 void on_deallocator_call (sm_context *sm_ctxt,
438 const supernode *node,
439 const gcall *call,
440 const deallocator *d,
441 unsigned argno) const;
442 void on_realloc_call (sm_context *sm_ctxt,
443 const supernode *node,
444 const gcall *call) const;
445 void on_zero_assignment (sm_context *sm_ctxt,
446 const gimple *stmt,
447 tree lhs) const;
448
449 /* A map for consolidating deallocators so that they are
450 unique per deallocator FUNCTION_DECL. */
451 typedef hash_map<tree, deallocator *> deallocator_map_t;
452 deallocator_map_t m_deallocator_map;
453
454 /* Memoized lookups from FUNCTION_DECL to custom_deallocator_set *. */
455 typedef hash_map<tree, custom_deallocator_set *> deallocator_set_cache_t;
456 deallocator_set_cache_t m_custom_deallocator_set_cache;
457
458 /* A map for consolidating custom_deallocator_set instances. */
459 typedef hash_map<custom_deallocator_set::key_t,
460 custom_deallocator_set *,
461 deallocator_set_map_traits> custom_deallocator_set_map_t;
462 custom_deallocator_set_map_t m_custom_deallocator_set_map;
463
464 /* Record of dynamically-allocated objects, for cleanup. */
465 auto_vec <custom_deallocator_set *> m_dynamic_sets;
466 auto_vec <custom_deallocator *> m_dynamic_deallocators;
467 };
468
469 /* struct deallocator. */
470
471 deallocator::deallocator (malloc_state_machine *sm,
472 const char *name,
473 enum wording wording)
474 : m_name (name),
475 m_wording (wording),
476 m_freed (sm->add_state ("freed", RS_FREED, NULL, this))
477 {
478 }
479
480 hashval_t
481 deallocator::hash () const
482 {
483 return (hashval_t)m_freed->get_id ();
484 }
485
486 void
487 deallocator::dump_to_pp (pretty_printer *pp) const
488 {
489 pp_printf (pp, "%qs", m_name);
490 }
491
492 int
493 deallocator::cmp (const deallocator *a, const deallocator *b)
494 {
495 return (int)a->m_freed->get_id () - (int)b->m_freed->get_id ();
496 }
497
498 int
499 deallocator::cmp_ptr_ptr (const void *a, const void *b)
500 {
501 return cmp (*(const deallocator * const *)a,
502 *(const deallocator * const *)b);
503 }
504
505
506 /* struct standard_deallocator : public deallocator. */
507
508 standard_deallocator::standard_deallocator (malloc_state_machine *sm,
509 const char *name,
510 enum wording wording)
511 : deallocator (sm, name, wording)
512 {
513 }
514
515 /* struct deallocator_set. */
516
517 deallocator_set::deallocator_set (malloc_state_machine *sm,
518 enum wording wording)
519 : m_wording (wording),
520 m_unchecked (sm->add_state ("unchecked", RS_UNCHECKED, this, NULL)),
521 m_nonnull (sm->add_state ("nonnull", RS_NONNULL, this, NULL))
522 {
523 }
524
525 /* Dump a description of this deallocator_set to stderr. */
526
527 DEBUG_FUNCTION void
528 deallocator_set::dump () const
529 {
530 pretty_printer pp;
531 pp_show_color (&pp) = pp_show_color (global_dc->printer);
532 pp.buffer->stream = stderr;
533 dump_to_pp (&pp);
534 pp_newline (&pp);
535 pp_flush (&pp);
536 }
537
538 /* struct custom_deallocator_set : public deallocator_set. */
539
540 custom_deallocator_set::
541 custom_deallocator_set (malloc_state_machine *sm,
542 const auto_vec <const deallocator *> *vec,
543 enum wording wording)
544 : deallocator_set (sm, wording),
545 m_deallocator_vec (vec->length ())
546 {
547 unsigned i;
548 const deallocator *d;
549 FOR_EACH_VEC_ELT (*vec, i, d)
550 m_deallocator_vec.safe_push (d);
551 }
552
553 bool
554 custom_deallocator_set::contains_p (const deallocator *d) const
555 {
556 unsigned i;
557 const deallocator *cd;
558 FOR_EACH_VEC_ELT (m_deallocator_vec, i, cd)
559 if (cd == d)
560 return true;
561 return false;
562 }
563
564 const deallocator *
565 custom_deallocator_set::maybe_get_single () const
566 {
567 if (m_deallocator_vec.length () == 1)
568 return m_deallocator_vec[0];
569 return NULL;
570 }
571
572 void
573 custom_deallocator_set::dump_to_pp (pretty_printer *pp) const
574 {
575 pp_character (pp, '{');
576 unsigned i;
577 const deallocator *d;
578 FOR_EACH_VEC_ELT (m_deallocator_vec, i, d)
579 {
580 if (i > 0)
581 pp_string (pp, ", ");
582 d->dump_to_pp (pp);
583 }
584 pp_character (pp, '}');
585 }
586
587 /* struct standard_deallocator_set : public deallocator_set. */
588
589 standard_deallocator_set::standard_deallocator_set (malloc_state_machine *sm,
590 const char *name,
591 enum wording wording)
592 : deallocator_set (sm, wording),
593 m_deallocator (sm, name, wording)
594 {
595 }
596
597 bool
598 standard_deallocator_set::contains_p (const deallocator *d) const
599 {
600 return d == &m_deallocator;
601 }
602
603 const deallocator *
604 standard_deallocator_set::maybe_get_single () const
605 {
606 return &m_deallocator;
607 }
608
609 void
610 standard_deallocator_set::dump_to_pp (pretty_printer *pp) const
611 {
612 pp_character (pp, '{');
613 pp_string (pp, m_deallocator.m_name);
614 pp_character (pp, '}');
615 }
616
617 /* Return STATE cast to the custom state subclass, or NULL for the start state.
618 Everything should be an allocation_state apart from the start state. */
619
620 static const allocation_state *
621 dyn_cast_allocation_state (state_machine::state_t state)
622 {
623 if (state->get_id () == 0)
624 return NULL;
625 return static_cast <const allocation_state *> (state);
626 }
627
628 /* Return STATE cast to the custom state subclass, for a state that is
629 already known to not be the start state . */
630
631 static const allocation_state *
632 as_a_allocation_state (state_machine::state_t state)
633 {
634 gcc_assert (state->get_id () != 0);
635 return static_cast <const allocation_state *> (state);
636 }
637
638 /* Get the resource_state for STATE. */
639
640 static enum resource_state
641 get_rs (state_machine::state_t state)
642 {
643 if (const allocation_state *astate = dyn_cast_allocation_state (state))
644 return astate->m_rs;
645 else
646 return RS_START;
647 }
648
649 /* Return true if STATE is the start state. */
650
651 static bool
652 start_p (state_machine::state_t state)
653 {
654 return get_rs (state) == RS_START;
655 }
656
657 /* Return true if STATE is an unchecked result from an allocator. */
658
659 static bool
660 unchecked_p (state_machine::state_t state)
661 {
662 return get_rs (state) == RS_UNCHECKED;
663 }
664
665 /* Return true if STATE is a non-null result from an allocator. */
666
667 static bool
668 nonnull_p (state_machine::state_t state)
669 {
670 return get_rs (state) == RS_NONNULL;
671 }
672
673 /* Return true if STATE is a value that has been passed to a deallocator. */
674
675 static bool
676 freed_p (state_machine::state_t state)
677 {
678 return get_rs (state) == RS_FREED;
679 }
680
681 /* Class for diagnostics relating to malloc_state_machine. */
682
683 class malloc_diagnostic : public pending_diagnostic
684 {
685 public:
686 malloc_diagnostic (const malloc_state_machine &sm, tree arg)
687 : m_sm (sm), m_arg (arg)
688 {}
689
690 bool subclass_equal_p (const pending_diagnostic &base_other) const override
691 {
692 return same_tree_p (m_arg, ((const malloc_diagnostic &)base_other).m_arg);
693 }
694
695 label_text describe_state_change (const evdesc::state_change &change)
696 override
697 {
698 if (change.m_old_state == m_sm.get_start_state ()
699 && unchecked_p (change.m_new_state))
700 // TODO: verify that it's the allocation stmt, not a copy
701 return label_text::borrow ("allocated here");
702 if (unchecked_p (change.m_old_state)
703 && nonnull_p (change.m_new_state))
704 {
705 if (change.m_expr)
706 return change.formatted_print ("assuming %qE is non-NULL",
707 change.m_expr);
708 else
709 return change.formatted_print ("assuming %qs is non-NULL",
710 "<unknown>");
711 }
712 if (change.m_new_state == m_sm.m_null)
713 {
714 if (unchecked_p (change.m_old_state))
715 {
716 if (change.m_expr)
717 return change.formatted_print ("assuming %qE is NULL",
718 change.m_expr);
719 else
720 return change.formatted_print ("assuming %qs is NULL",
721 "<unknown>");
722 }
723 else
724 {
725 if (change.m_expr)
726 return change.formatted_print ("%qE is NULL",
727 change.m_expr);
728 else
729 return change.formatted_print ("%qs is NULL",
730 "<unknown>");
731 }
732 }
733
734 return label_text ();
735 }
736
737 diagnostic_event::meaning
738 get_meaning_for_state_change (const evdesc::state_change &change)
739 const final override
740 {
741 if (change.m_old_state == m_sm.get_start_state ()
742 && unchecked_p (change.m_new_state))
743 return diagnostic_event::meaning (diagnostic_event::VERB_acquire,
744 diagnostic_event::NOUN_memory);
745 if (freed_p (change.m_new_state))
746 return diagnostic_event::meaning (diagnostic_event::VERB_release,
747 diagnostic_event::NOUN_memory);
748 return diagnostic_event::meaning ();
749 }
750
751 protected:
752 const malloc_state_machine &m_sm;
753 tree m_arg;
754 };
755
756 /* Concrete subclass for reporting mismatching allocator/deallocator
757 diagnostics. */
758
759 class mismatching_deallocation : public malloc_diagnostic
760 {
761 public:
762 mismatching_deallocation (const malloc_state_machine &sm, tree arg,
763 const deallocator_set *expected_deallocators,
764 const deallocator *actual_dealloc)
765 : malloc_diagnostic (sm, arg),
766 m_expected_deallocators (expected_deallocators),
767 m_actual_dealloc (actual_dealloc)
768 {}
769
770 const char *get_kind () const final override
771 {
772 return "mismatching_deallocation";
773 }
774
775 int get_controlling_option () const final override
776 {
777 return OPT_Wanalyzer_mismatching_deallocation;
778 }
779
780 bool emit (rich_location *rich_loc) final override
781 {
782 auto_diagnostic_group d;
783 diagnostic_metadata m;
784 m.add_cwe (762); /* CWE-762: Mismatched Memory Management Routines. */
785 if (const deallocator *expected_dealloc
786 = m_expected_deallocators->maybe_get_single ())
787 return warning_meta (rich_loc, m, get_controlling_option (),
788 "%qE should have been deallocated with %qs"
789 " but was deallocated with %qs",
790 m_arg, expected_dealloc->m_name,
791 m_actual_dealloc->m_name);
792 else
793 return warning_meta (rich_loc, m, get_controlling_option (),
794 "%qs called on %qE returned from a mismatched"
795 " allocation function",
796 m_actual_dealloc->m_name, m_arg);
797 }
798
799 label_text describe_state_change (const evdesc::state_change &change)
800 final override
801 {
802 if (unchecked_p (change.m_new_state))
803 {
804 m_alloc_event = change.m_event_id;
805 if (const deallocator *expected_dealloc
806 = m_expected_deallocators->maybe_get_single ())
807 return change.formatted_print ("allocated here"
808 " (expects deallocation with %qs)",
809 expected_dealloc->m_name);
810 else
811 return change.formatted_print ("allocated here");
812 }
813 return malloc_diagnostic::describe_state_change (change);
814 }
815
816 label_text describe_final_event (const evdesc::final_event &ev) final override
817 {
818 if (m_alloc_event.known_p ())
819 {
820 if (const deallocator *expected_dealloc
821 = m_expected_deallocators->maybe_get_single ())
822 return ev.formatted_print
823 ("deallocated with %qs here;"
824 " allocation at %@ expects deallocation with %qs",
825 m_actual_dealloc->m_name, &m_alloc_event,
826 expected_dealloc->m_name);
827 else
828 return ev.formatted_print
829 ("deallocated with %qs here;"
830 " allocated at %@",
831 m_actual_dealloc->m_name, &m_alloc_event);
832 }
833 return ev.formatted_print ("deallocated with %qs here",
834 m_actual_dealloc->m_name);
835 }
836
837 private:
838 diagnostic_event_id_t m_alloc_event;
839 const deallocator_set *m_expected_deallocators;
840 const deallocator *m_actual_dealloc;
841 };
842
843 /* Concrete subclass for reporting double-free diagnostics. */
844
845 class double_free : public malloc_diagnostic
846 {
847 public:
848 double_free (const malloc_state_machine &sm, tree arg, const char *funcname)
849 : malloc_diagnostic (sm, arg), m_funcname (funcname)
850 {}
851
852 const char *get_kind () const final override { return "double_free"; }
853
854 int get_controlling_option () const final override
855 {
856 return OPT_Wanalyzer_double_free;
857 }
858
859 bool emit (rich_location *rich_loc) final override
860 {
861 auto_diagnostic_group d;
862 diagnostic_metadata m;
863 m.add_cwe (415); /* CWE-415: Double Free. */
864 return warning_meta (rich_loc, m, get_controlling_option (),
865 "double-%qs of %qE", m_funcname, m_arg);
866 }
867
868 label_text describe_state_change (const evdesc::state_change &change)
869 final override
870 {
871 if (freed_p (change.m_new_state))
872 {
873 m_first_free_event = change.m_event_id;
874 return change.formatted_print ("first %qs here", m_funcname);
875 }
876 return malloc_diagnostic::describe_state_change (change);
877 }
878
879 label_text describe_call_with_state (const evdesc::call_with_state &info)
880 final override
881 {
882 if (freed_p (info.m_state))
883 return info.formatted_print
884 ("passing freed pointer %qE in call to %qE from %qE",
885 info.m_expr, info.m_callee_fndecl, info.m_caller_fndecl);
886 return label_text ();
887 }
888
889 label_text describe_final_event (const evdesc::final_event &ev) final override
890 {
891 if (m_first_free_event.known_p ())
892 return ev.formatted_print ("second %qs here; first %qs was at %@",
893 m_funcname, m_funcname,
894 &m_first_free_event);
895 return ev.formatted_print ("second %qs here", m_funcname);
896 }
897
898 private:
899 diagnostic_event_id_t m_first_free_event;
900 const char *m_funcname;
901 };
902
903 /* Abstract subclass for describing possible bad uses of NULL.
904 Responsible for describing the call that could return NULL. */
905
906 class possible_null : public malloc_diagnostic
907 {
908 public:
909 possible_null (const malloc_state_machine &sm, tree arg)
910 : malloc_diagnostic (sm, arg)
911 {}
912
913 label_text describe_state_change (const evdesc::state_change &change)
914 final override
915 {
916 if (change.m_old_state == m_sm.get_start_state ()
917 && unchecked_p (change.m_new_state))
918 {
919 m_origin_of_unchecked_event = change.m_event_id;
920 return label_text::borrow ("this call could return NULL");
921 }
922 return malloc_diagnostic::describe_state_change (change);
923 }
924
925 label_text describe_return_of_state (const evdesc::return_of_state &info)
926 final override
927 {
928 if (unchecked_p (info.m_state))
929 return info.formatted_print ("possible return of NULL to %qE from %qE",
930 info.m_caller_fndecl, info.m_callee_fndecl);
931 return label_text ();
932 }
933
934 protected:
935 diagnostic_event_id_t m_origin_of_unchecked_event;
936 };
937
938 /* Concrete subclass for describing dereference of a possible NULL
939 value. */
940
941 class possible_null_deref : public possible_null
942 {
943 public:
944 possible_null_deref (const malloc_state_machine &sm, tree arg)
945 : possible_null (sm, arg)
946 {}
947
948 const char *get_kind () const final override { return "possible_null_deref"; }
949
950 int get_controlling_option () const final override
951 {
952 return OPT_Wanalyzer_possible_null_dereference;
953 }
954
955 bool emit (rich_location *rich_loc) final override
956 {
957 /* CWE-690: Unchecked Return Value to NULL Pointer Dereference. */
958 diagnostic_metadata m;
959 m.add_cwe (690);
960 return warning_meta (rich_loc, m, get_controlling_option (),
961 "dereference of possibly-NULL %qE", m_arg);
962 }
963
964 label_text describe_final_event (const evdesc::final_event &ev) final override
965 {
966 if (m_origin_of_unchecked_event.known_p ())
967 return ev.formatted_print ("%qE could be NULL: unchecked value from %@",
968 ev.m_expr,
969 &m_origin_of_unchecked_event);
970 else
971 return ev.formatted_print ("%qE could be NULL", ev.m_expr);
972 }
973
974 };
975
976 /* Return true if FNDECL is a C++ method. */
977
978 static bool
979 method_p (tree fndecl)
980 {
981 return TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE;
982 }
983
984 /* Return a 1-based description of ARG_IDX (0-based) of FNDECL.
985 Compare with %P in the C++ FE (implemented in cp/error.cc: parm_to_string
986 as called from cp_printer). */
987
988 static label_text
989 describe_argument_index (tree fndecl, int arg_idx)
990 {
991 if (method_p (fndecl))
992 if (arg_idx == 0)
993 return label_text::borrow ("'this'");
994 pretty_printer pp;
995 pp_printf (&pp, "%u", arg_idx + 1 - method_p (fndecl));
996 return label_text::take (xstrdup (pp_formatted_text (&pp)));
997 }
998
999 /* Subroutine for use by possible_null_arg::emit and null_arg::emit.
1000 Issue a note informing that the pertinent argument must be non-NULL. */
1001
1002 static void
1003 inform_nonnull_attribute (tree fndecl, int arg_idx)
1004 {
1005 label_text arg_desc = describe_argument_index (fndecl, arg_idx);
1006 inform (DECL_SOURCE_LOCATION (fndecl),
1007 "argument %s of %qD must be non-null",
1008 arg_desc.get (), fndecl);
1009 /* Ideally we would use the location of the parm and underline the
1010 attribute also - but we don't have the location_t values at this point
1011 in the middle-end.
1012 For reference, the C and C++ FEs have get_fndecl_argument_location. */
1013 }
1014
1015 /* Concrete subclass for describing passing a possibly-NULL value to a
1016 function marked with __attribute__((nonnull)). */
1017
1018 class possible_null_arg : public possible_null
1019 {
1020 public:
1021 possible_null_arg (const malloc_state_machine &sm, tree arg,
1022 tree fndecl, int arg_idx)
1023 : possible_null (sm, arg),
1024 m_fndecl (fndecl), m_arg_idx (arg_idx)
1025 {}
1026
1027 const char *get_kind () const final override { return "possible_null_arg"; }
1028
1029 bool subclass_equal_p (const pending_diagnostic &base_other)
1030 const final override
1031 {
1032 const possible_null_arg &sub_other
1033 = (const possible_null_arg &)base_other;
1034 return (same_tree_p (m_arg, sub_other.m_arg)
1035 && m_fndecl == sub_other.m_fndecl
1036 && m_arg_idx == sub_other.m_arg_idx);
1037 }
1038
1039 int get_controlling_option () const final override
1040 {
1041 return OPT_Wanalyzer_possible_null_argument;
1042 }
1043
1044 bool emit (rich_location *rich_loc) final override
1045 {
1046 /* CWE-690: Unchecked Return Value to NULL Pointer Dereference. */
1047 auto_diagnostic_group d;
1048 diagnostic_metadata m;
1049 m.add_cwe (690);
1050 bool warned
1051 = warning_meta (rich_loc, m, get_controlling_option (),
1052 "use of possibly-NULL %qE where non-null expected",
1053 m_arg);
1054 if (warned)
1055 inform_nonnull_attribute (m_fndecl, m_arg_idx);
1056 return warned;
1057 }
1058
1059 label_text describe_final_event (const evdesc::final_event &ev) final override
1060 {
1061 label_text arg_desc = describe_argument_index (m_fndecl, m_arg_idx);
1062 label_text result;
1063 if (m_origin_of_unchecked_event.known_p ())
1064 result = ev.formatted_print ("argument %s (%qE) from %@ could be NULL"
1065 " where non-null expected",
1066 arg_desc.get (), ev.m_expr,
1067 &m_origin_of_unchecked_event);
1068 else
1069 result = ev.formatted_print ("argument %s (%qE) could be NULL"
1070 " where non-null expected",
1071 arg_desc.get (), ev.m_expr);
1072 return result;
1073 }
1074
1075 private:
1076 tree m_fndecl;
1077 int m_arg_idx;
1078 };
1079
1080 /* Concrete subclass for describing a dereference of a NULL value. */
1081
1082 class null_deref : public malloc_diagnostic
1083 {
1084 public:
1085 null_deref (const malloc_state_machine &sm, tree arg)
1086 : malloc_diagnostic (sm, arg) {}
1087
1088 const char *get_kind () const final override { return "null_deref"; }
1089
1090 int get_controlling_option () const final override
1091 {
1092 return OPT_Wanalyzer_null_dereference;
1093 }
1094
1095 bool emit (rich_location *rich_loc) final override
1096 {
1097 /* CWE-476: NULL Pointer Dereference. */
1098 diagnostic_metadata m;
1099 m.add_cwe (476);
1100 return warning_meta (rich_loc, m, get_controlling_option (),
1101 "dereference of NULL %qE", m_arg);
1102 }
1103
1104 label_text describe_return_of_state (const evdesc::return_of_state &info)
1105 final override
1106 {
1107 if (info.m_state == m_sm.m_null)
1108 return info.formatted_print ("return of NULL to %qE from %qE",
1109 info.m_caller_fndecl, info.m_callee_fndecl);
1110 return label_text ();
1111 }
1112
1113 label_text describe_final_event (const evdesc::final_event &ev) final override
1114 {
1115 return ev.formatted_print ("dereference of NULL %qE", ev.m_expr);
1116 }
1117 };
1118
1119 /* Concrete subclass for describing passing a NULL value to a
1120 function marked with __attribute__((nonnull)). */
1121
1122 class null_arg : public malloc_diagnostic
1123 {
1124 public:
1125 null_arg (const malloc_state_machine &sm, tree arg,
1126 tree fndecl, int arg_idx)
1127 : malloc_diagnostic (sm, arg),
1128 m_fndecl (fndecl), m_arg_idx (arg_idx)
1129 {}
1130
1131 const char *get_kind () const final override { return "null_arg"; }
1132
1133 bool subclass_equal_p (const pending_diagnostic &base_other)
1134 const final override
1135 {
1136 const null_arg &sub_other
1137 = (const null_arg &)base_other;
1138 return (same_tree_p (m_arg, sub_other.m_arg)
1139 && m_fndecl == sub_other.m_fndecl
1140 && m_arg_idx == sub_other.m_arg_idx);
1141 }
1142
1143 int get_controlling_option () const final override
1144 {
1145 return OPT_Wanalyzer_null_argument;
1146 }
1147
1148 bool emit (rich_location *rich_loc) final override
1149 {
1150 /* CWE-476: NULL Pointer Dereference. */
1151 auto_diagnostic_group d;
1152 diagnostic_metadata m;
1153 m.add_cwe (476);
1154
1155 bool warned;
1156 if (zerop (m_arg))
1157 warned = warning_meta (rich_loc, m, get_controlling_option (),
1158 "use of NULL where non-null expected");
1159 else
1160 warned = warning_meta (rich_loc, m, get_controlling_option (),
1161 "use of NULL %qE where non-null expected",
1162 m_arg);
1163 if (warned)
1164 inform_nonnull_attribute (m_fndecl, m_arg_idx);
1165 return warned;
1166 }
1167
1168 label_text describe_final_event (const evdesc::final_event &ev) final override
1169 {
1170 label_text arg_desc = describe_argument_index (m_fndecl, m_arg_idx);
1171 label_text result;
1172 if (zerop (ev.m_expr))
1173 result = ev.formatted_print ("argument %s NULL where non-null expected",
1174 arg_desc.get ());
1175 else
1176 result = ev.formatted_print ("argument %s (%qE) NULL"
1177 " where non-null expected",
1178 arg_desc.get (), ev.m_expr);
1179 return result;
1180 }
1181
1182 private:
1183 tree m_fndecl;
1184 int m_arg_idx;
1185 };
1186
1187 class use_after_free : public malloc_diagnostic
1188 {
1189 public:
1190 use_after_free (const malloc_state_machine &sm, tree arg,
1191 const deallocator *deallocator)
1192 : malloc_diagnostic (sm, arg),
1193 m_deallocator (deallocator)
1194 {
1195 gcc_assert (deallocator);
1196 }
1197
1198 const char *get_kind () const final override { return "use_after_free"; }
1199
1200 int get_controlling_option () const final override
1201 {
1202 return OPT_Wanalyzer_use_after_free;
1203 }
1204
1205 bool emit (rich_location *rich_loc) final override
1206 {
1207 /* CWE-416: Use After Free. */
1208 diagnostic_metadata m;
1209 m.add_cwe (416);
1210 return warning_meta (rich_loc, m, get_controlling_option (),
1211 "use after %<%s%> of %qE",
1212 m_deallocator->m_name, m_arg);
1213 }
1214
1215 label_text describe_state_change (const evdesc::state_change &change)
1216 final override
1217 {
1218 if (freed_p (change.m_new_state))
1219 {
1220 m_free_event = change.m_event_id;
1221 switch (m_deallocator->m_wording)
1222 {
1223 default:
1224 case WORDING_REALLOCATED:
1225 gcc_unreachable ();
1226 case WORDING_FREED:
1227 return label_text::borrow ("freed here");
1228 case WORDING_DELETED:
1229 return label_text::borrow ("deleted here");
1230 case WORDING_DEALLOCATED:
1231 return label_text::borrow ("deallocated here");
1232 }
1233 }
1234 return malloc_diagnostic::describe_state_change (change);
1235 }
1236
1237 label_text describe_final_event (const evdesc::final_event &ev) final override
1238 {
1239 const char *funcname = m_deallocator->m_name;
1240 if (m_free_event.known_p ())
1241 switch (m_deallocator->m_wording)
1242 {
1243 default:
1244 case WORDING_REALLOCATED:
1245 gcc_unreachable ();
1246 case WORDING_FREED:
1247 return ev.formatted_print ("use after %<%s%> of %qE; freed at %@",
1248 funcname, ev.m_expr, &m_free_event);
1249 case WORDING_DELETED:
1250 return ev.formatted_print ("use after %<%s%> of %qE; deleted at %@",
1251 funcname, ev.m_expr, &m_free_event);
1252 case WORDING_DEALLOCATED:
1253 return ev.formatted_print ("use after %<%s%> of %qE;"
1254 " deallocated at %@",
1255 funcname, ev.m_expr, &m_free_event);
1256 }
1257 else
1258 return ev.formatted_print ("use after %<%s%> of %qE",
1259 funcname, ev.m_expr);
1260 }
1261
1262 /* Implementation of pending_diagnostic::supercedes_p for
1263 use_after_free.
1264
1265 We want use-after-free to supercede use-of-unitialized-value,
1266 so that if we have these at the same stmt, we don't emit
1267 a use-of-uninitialized, just the use-after-free.
1268 (this is because we fully purge information about freed
1269 buffers when we free them to avoid state explosions, so
1270 that if they are accessed after the free, it looks like
1271 they are uninitialized). */
1272
1273 bool supercedes_p (const pending_diagnostic &other) const final override
1274 {
1275 if (other.use_of_uninit_p ())
1276 return true;
1277
1278 return false;
1279 }
1280
1281 private:
1282 diagnostic_event_id_t m_free_event;
1283 const deallocator *m_deallocator;
1284 };
1285
1286 class malloc_leak : public malloc_diagnostic
1287 {
1288 public:
1289 malloc_leak (const malloc_state_machine &sm, tree arg)
1290 : malloc_diagnostic (sm, arg) {}
1291
1292 const char *get_kind () const final override { return "malloc_leak"; }
1293
1294 int get_controlling_option () const final override
1295 {
1296 return OPT_Wanalyzer_malloc_leak;
1297 }
1298
1299 bool emit (rich_location *rich_loc) final override
1300 {
1301 /* "CWE-401: Missing Release of Memory after Effective Lifetime". */
1302 diagnostic_metadata m;
1303 m.add_cwe (401);
1304 if (m_arg)
1305 return warning_meta (rich_loc, m, get_controlling_option (),
1306 "leak of %qE", m_arg);
1307 else
1308 return warning_meta (rich_loc, m, get_controlling_option (),
1309 "leak of %qs", "<unknown>");
1310 }
1311
1312 label_text describe_state_change (const evdesc::state_change &change)
1313 final override
1314 {
1315 if (unchecked_p (change.m_new_state)
1316 || (start_p (change.m_old_state) && nonnull_p (change.m_new_state)))
1317 {
1318 m_alloc_event = change.m_event_id;
1319 return label_text::borrow ("allocated here");
1320 }
1321 return malloc_diagnostic::describe_state_change (change);
1322 }
1323
1324 label_text describe_final_event (const evdesc::final_event &ev) final override
1325 {
1326 if (ev.m_expr)
1327 {
1328 if (m_alloc_event.known_p ())
1329 return ev.formatted_print ("%qE leaks here; was allocated at %@",
1330 ev.m_expr, &m_alloc_event);
1331 else
1332 return ev.formatted_print ("%qE leaks here", ev.m_expr);
1333 }
1334 else
1335 {
1336 if (m_alloc_event.known_p ())
1337 return ev.formatted_print ("%qs leaks here; was allocated at %@",
1338 "<unknown>", &m_alloc_event);
1339 else
1340 return ev.formatted_print ("%qs leaks here", "<unknown>");
1341 }
1342 }
1343
1344 private:
1345 diagnostic_event_id_t m_alloc_event;
1346 };
1347
1348 class free_of_non_heap : public malloc_diagnostic
1349 {
1350 public:
1351 free_of_non_heap (const malloc_state_machine &sm, tree arg,
1352 const region *freed_reg,
1353 const char *funcname)
1354 : malloc_diagnostic (sm, arg), m_freed_reg (freed_reg), m_funcname (funcname)
1355 {
1356 }
1357
1358 const char *get_kind () const final override { return "free_of_non_heap"; }
1359
1360 bool subclass_equal_p (const pending_diagnostic &base_other) const
1361 final override
1362 {
1363 const free_of_non_heap &other = (const free_of_non_heap &)base_other;
1364 return (same_tree_p (m_arg, other.m_arg)
1365 && m_freed_reg == other.m_freed_reg);
1366 }
1367
1368 int get_controlling_option () const final override
1369 {
1370 return OPT_Wanalyzer_free_of_non_heap;
1371 }
1372
1373 bool emit (rich_location *rich_loc) final override
1374 {
1375 auto_diagnostic_group d;
1376 diagnostic_metadata m;
1377 m.add_cwe (590); /* CWE-590: Free of Memory not on the Heap. */
1378 switch (get_memory_space ())
1379 {
1380 default:
1381 case MEMSPACE_HEAP:
1382 gcc_unreachable ();
1383 case MEMSPACE_UNKNOWN:
1384 case MEMSPACE_CODE:
1385 case MEMSPACE_GLOBALS:
1386 case MEMSPACE_READONLY_DATA:
1387 return warning_meta (rich_loc, m, get_controlling_option (),
1388 "%<%s%> of %qE which points to memory"
1389 " not on the heap",
1390 m_funcname, m_arg);
1391 break;
1392 case MEMSPACE_STACK:
1393 return warning_meta (rich_loc, m, get_controlling_option (),
1394 "%<%s%> of %qE which points to memory"
1395 " on the stack",
1396 m_funcname, m_arg);
1397 break;
1398 }
1399 }
1400
1401 label_text describe_state_change (const evdesc::state_change &)
1402 final override
1403 {
1404 return label_text::borrow ("pointer is from here");
1405 }
1406
1407 label_text describe_final_event (const evdesc::final_event &ev) final override
1408 {
1409 return ev.formatted_print ("call to %qs here", m_funcname);
1410 }
1411
1412 void mark_interesting_stuff (interesting_t *interest) final override
1413 {
1414 if (m_freed_reg)
1415 interest->add_region_creation (m_freed_reg);
1416 }
1417
1418 private:
1419 enum memory_space get_memory_space () const
1420 {
1421 if (m_freed_reg)
1422 return m_freed_reg->get_memory_space ();
1423 else
1424 return MEMSPACE_UNKNOWN;
1425 }
1426
1427 const region *m_freed_reg;
1428 const char *m_funcname;
1429 };
1430
1431 /* struct allocation_state : public state_machine::state. */
1432
1433 /* Implementation of state_machine::state::dump_to_pp vfunc
1434 for allocation_state: append the API that this allocation is
1435 associated with. */
1436
1437 void
1438 allocation_state::dump_to_pp (pretty_printer *pp) const
1439 {
1440 state_machine::state::dump_to_pp (pp);
1441 if (m_deallocators)
1442 {
1443 pp_string (pp, " (");
1444 m_deallocators->dump_to_pp (pp);
1445 pp_character (pp, ')');
1446 }
1447 }
1448
1449 /* Given a allocation_state for a deallocator_set, get the "nonnull" state
1450 for the corresponding allocator(s). */
1451
1452 const allocation_state *
1453 allocation_state::get_nonnull () const
1454 {
1455 gcc_assert (m_deallocators);
1456 return as_a_allocation_state (m_deallocators->m_nonnull);
1457 }
1458
1459 /* malloc_state_machine's ctor. */
1460
1461 malloc_state_machine::malloc_state_machine (logger *logger)
1462 : state_machine ("malloc", logger),
1463 m_free (this, "free", WORDING_FREED),
1464 m_scalar_delete (this, "delete", WORDING_DELETED),
1465 m_vector_delete (this, "delete[]", WORDING_DELETED),
1466 m_realloc (this, "realloc", WORDING_REALLOCATED)
1467 {
1468 gcc_assert (m_start->get_id () == 0);
1469 m_null = add_state ("null", RS_FREED, NULL, NULL);
1470 m_non_heap = add_state ("non-heap", RS_NON_HEAP, NULL, NULL);
1471 m_stop = add_state ("stop", RS_STOP, NULL, NULL);
1472 }
1473
1474 malloc_state_machine::~malloc_state_machine ()
1475 {
1476 unsigned i;
1477 custom_deallocator_set *set;
1478 FOR_EACH_VEC_ELT (m_dynamic_sets, i, set)
1479 delete set;
1480 custom_deallocator *d;
1481 FOR_EACH_VEC_ELT (m_dynamic_deallocators, i, d)
1482 delete d;
1483 }
1484
1485 state_machine::state_t
1486 malloc_state_machine::add_state (const char *name, enum resource_state rs,
1487 const deallocator_set *deallocators,
1488 const deallocator *deallocator)
1489 {
1490 return add_custom_state (new allocation_state (name, alloc_state_id (),
1491 rs, deallocators,
1492 deallocator));
1493 }
1494
1495 /* If ALLOCATOR_FNDECL has any "__attribute__((malloc(FOO)))",
1496 return a custom_deallocator_set for them, consolidating them
1497 to ensure uniqueness of the sets.
1498
1499 Return NULL if it has no such attributes. */
1500
1501 const custom_deallocator_set *
1502 malloc_state_machine::
1503 get_or_create_custom_deallocator_set (tree allocator_fndecl)
1504 {
1505 /* Early rejection of decls without attributes. */
1506 tree attrs = DECL_ATTRIBUTES (allocator_fndecl);
1507 if (!attrs)
1508 return NULL;
1509
1510 /* Otherwise, call maybe_create_custom_deallocator_set,
1511 memoizing the result. */
1512 if (custom_deallocator_set **slot
1513 = m_custom_deallocator_set_cache.get (allocator_fndecl))
1514 return *slot;
1515 custom_deallocator_set *set
1516 = maybe_create_custom_deallocator_set (allocator_fndecl);
1517 m_custom_deallocator_set_cache.put (allocator_fndecl, set);
1518 return set;
1519 }
1520
1521 /* Given ALLOCATOR_FNDECL, a FUNCTION_DECL with attributes,
1522 look for any "__attribute__((malloc(FOO)))" and return a
1523 custom_deallocator_set for them, consolidating them
1524 to ensure uniqueness of the sets.
1525
1526 Return NULL if it has no such attributes.
1527
1528 Subroutine of get_or_create_custom_deallocator_set which
1529 memoizes the result. */
1530
1531 custom_deallocator_set *
1532 malloc_state_machine::
1533 maybe_create_custom_deallocator_set (tree allocator_fndecl)
1534 {
1535 tree attrs = DECL_ATTRIBUTES (allocator_fndecl);
1536 gcc_assert (attrs);
1537
1538 /* Look for instances of __attribute__((malloc(FOO))). */
1539 auto_vec<const deallocator *> deallocator_vec;
1540 for (tree allocs = attrs;
1541 (allocs = lookup_attribute ("malloc", allocs));
1542 allocs = TREE_CHAIN (allocs))
1543 {
1544 tree args = TREE_VALUE (allocs);
1545 if (!args)
1546 continue;
1547 if (TREE_VALUE (args))
1548 {
1549 const deallocator *d
1550 = get_or_create_deallocator (TREE_VALUE (args));
1551 deallocator_vec.safe_push (d);
1552 }
1553 }
1554
1555 /* If there weren't any deallocators, bail. */
1556 if (deallocator_vec.length () == 0)
1557 return NULL;
1558
1559 /* Consolidate, so that we reuse existing deallocator_set
1560 instances. */
1561 deallocator_vec.qsort (deallocator::cmp_ptr_ptr);
1562 custom_deallocator_set **slot
1563 = m_custom_deallocator_set_map.get (&deallocator_vec);
1564 if (slot)
1565 return *slot;
1566 custom_deallocator_set *set
1567 = new custom_deallocator_set (this, &deallocator_vec, WORDING_DEALLOCATED);
1568 m_custom_deallocator_set_map.put (&set->m_deallocator_vec, set);
1569 m_dynamic_sets.safe_push (set);
1570 return set;
1571 }
1572
1573 /* Get the deallocator for DEALLOCATOR_FNDECL, creating it if necessary. */
1574
1575 const deallocator *
1576 malloc_state_machine::get_or_create_deallocator (tree deallocator_fndecl)
1577 {
1578 deallocator **slot = m_deallocator_map.get (deallocator_fndecl);
1579 if (slot)
1580 return *slot;
1581
1582 /* Reuse "free". */
1583 deallocator *d;
1584 if (is_named_call_p (deallocator_fndecl, "free")
1585 || is_std_named_call_p (deallocator_fndecl, "free")
1586 || is_named_call_p (deallocator_fndecl, "__builtin_free"))
1587 d = &m_free.m_deallocator;
1588 else
1589 {
1590 custom_deallocator *cd
1591 = new custom_deallocator (this, deallocator_fndecl,
1592 WORDING_DEALLOCATED);
1593 m_dynamic_deallocators.safe_push (cd);
1594 d = cd;
1595 }
1596 m_deallocator_map.put (deallocator_fndecl, d);
1597 return d;
1598 }
1599
1600 /* Try to identify the function declaration either by name or as a known malloc
1601 builtin. */
1602
1603 static bool
1604 known_allocator_p (const_tree fndecl, const gcall *call)
1605 {
1606 /* Either it is a function we know by name and number of arguments... */
1607 if (is_named_call_p (fndecl, "malloc", call, 1)
1608 || is_named_call_p (fndecl, "calloc", call, 2)
1609 || is_std_named_call_p (fndecl, "malloc", call, 1)
1610 || is_std_named_call_p (fndecl, "calloc", call, 2)
1611 || is_named_call_p (fndecl, "strdup", call, 1)
1612 || is_named_call_p (fndecl, "strndup", call, 2))
1613 return true;
1614
1615 /* ... or it is a builtin allocator that allocates objects freed with
1616 __builtin_free. */
1617 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1618 switch (DECL_FUNCTION_CODE (fndecl))
1619 {
1620 case BUILT_IN_MALLOC:
1621 case BUILT_IN_CALLOC:
1622 case BUILT_IN_STRDUP:
1623 case BUILT_IN_STRNDUP:
1624 return true;
1625 default:
1626 break;
1627 }
1628
1629 return false;
1630 }
1631
1632 /* Implementation of state_machine::on_stmt vfunc for malloc_state_machine. */
1633
1634 bool
1635 malloc_state_machine::on_stmt (sm_context *sm_ctxt,
1636 const supernode *node,
1637 const gimple *stmt) const
1638 {
1639 if (const gcall *call = dyn_cast <const gcall *> (stmt))
1640 if (tree callee_fndecl = sm_ctxt->get_fndecl_for_call (call))
1641 {
1642 if (known_allocator_p (callee_fndecl, call))
1643 {
1644 on_allocator_call (sm_ctxt, call, &m_free);
1645 return true;
1646 }
1647
1648 if (is_named_call_p (callee_fndecl, "operator new", call, 1))
1649 on_allocator_call (sm_ctxt, call, &m_scalar_delete);
1650 else if (is_named_call_p (callee_fndecl, "operator new []", call, 1))
1651 on_allocator_call (sm_ctxt, call, &m_vector_delete);
1652 else if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
1653 || is_named_call_p (callee_fndecl, "operator delete", call, 2))
1654 {
1655 on_deallocator_call (sm_ctxt, node, call,
1656 &m_scalar_delete.m_deallocator, 0);
1657 return true;
1658 }
1659 else if (is_named_call_p (callee_fndecl, "operator delete []", call, 1))
1660 {
1661 on_deallocator_call (sm_ctxt, node, call,
1662 &m_vector_delete.m_deallocator, 0);
1663 return true;
1664 }
1665
1666 if (is_named_call_p (callee_fndecl, "alloca", call, 1)
1667 || is_named_call_p (callee_fndecl, "__builtin_alloca", call, 1))
1668 {
1669 tree lhs = gimple_call_lhs (call);
1670 if (lhs)
1671 sm_ctxt->on_transition (node, stmt, lhs, m_start, m_non_heap);
1672 return true;
1673 }
1674
1675 if (is_named_call_p (callee_fndecl, "free", call, 1)
1676 || is_std_named_call_p (callee_fndecl, "free", call, 1)
1677 || is_named_call_p (callee_fndecl, "__builtin_free", call, 1))
1678 {
1679 on_deallocator_call (sm_ctxt, node, call,
1680 &m_free.m_deallocator, 0);
1681 return true;
1682 }
1683
1684 if (is_named_call_p (callee_fndecl, "realloc", call, 2)
1685 || is_named_call_p (callee_fndecl, "__builtin_realloc", call, 2))
1686 {
1687 on_realloc_call (sm_ctxt, node, call);
1688 return true;
1689 }
1690
1691 if (unaffected_by_call_p (callee_fndecl))
1692 return true;
1693
1694 /* Cast away const-ness for cache-like operations. */
1695 malloc_state_machine *mutable_this
1696 = const_cast <malloc_state_machine *> (this);
1697
1698 /* Handle "__attribute__((malloc(FOO)))". */
1699 if (const deallocator_set *deallocators
1700 = mutable_this->get_or_create_custom_deallocator_set
1701 (callee_fndecl))
1702 {
1703 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee_fndecl));
1704 bool returns_nonnull
1705 = lookup_attribute ("returns_nonnull", attrs);
1706 on_allocator_call (sm_ctxt, call, deallocators, returns_nonnull);
1707 }
1708
1709 /* Handle "__attribute__((nonnull))". */
1710 {
1711 tree fntype = TREE_TYPE (callee_fndecl);
1712 bitmap nonnull_args = get_nonnull_args (fntype);
1713 if (nonnull_args)
1714 {
1715 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
1716 {
1717 tree arg = gimple_call_arg (stmt, i);
1718 if (TREE_CODE (TREE_TYPE (arg)) != POINTER_TYPE)
1719 continue;
1720 /* If we have a nonnull-args, and either all pointers, or just
1721 the specified pointers. */
1722 if (bitmap_empty_p (nonnull_args)
1723 || bitmap_bit_p (nonnull_args, i))
1724 {
1725 state_t state = sm_ctxt->get_state (stmt, arg);
1726 /* Can't use a switch as the states are non-const. */
1727 if (unchecked_p (state))
1728 {
1729 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1730 sm_ctxt->warn (node, stmt, arg,
1731 make_unique<possible_null_arg>
1732 (*this, diag_arg, callee_fndecl, i));
1733 const allocation_state *astate
1734 = as_a_allocation_state (state);
1735 sm_ctxt->set_next_state (stmt, arg,
1736 astate->get_nonnull ());
1737 }
1738 else if (state == m_null)
1739 {
1740 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1741 sm_ctxt->warn (node, stmt, arg,
1742 make_unique<null_arg>
1743 (*this, diag_arg, callee_fndecl, i));
1744 sm_ctxt->set_next_state (stmt, arg, m_stop);
1745 }
1746 }
1747 }
1748 BITMAP_FREE (nonnull_args);
1749 }
1750 }
1751
1752 /* Check for this after nonnull, so that if we have both
1753 then we transition to "freed", rather than "checked". */
1754 unsigned dealloc_argno = fndecl_dealloc_argno (callee_fndecl);
1755 if (dealloc_argno != UINT_MAX)
1756 {
1757 const deallocator *d
1758 = mutable_this->get_or_create_deallocator (callee_fndecl);
1759 on_deallocator_call (sm_ctxt, node, call, d, dealloc_argno);
1760 }
1761 }
1762
1763 if (tree lhs = sm_ctxt->is_zero_assignment (stmt))
1764 if (any_pointer_p (lhs))
1765 on_zero_assignment (sm_ctxt, stmt,lhs);
1766
1767 /* Handle dereferences. */
1768 for (unsigned i = 0; i < gimple_num_ops (stmt); i++)
1769 {
1770 tree op = gimple_op (stmt, i);
1771 if (!op)
1772 continue;
1773 if (TREE_CODE (op) == COMPONENT_REF)
1774 op = TREE_OPERAND (op, 0);
1775
1776 if (TREE_CODE (op) == MEM_REF)
1777 {
1778 tree arg = TREE_OPERAND (op, 0);
1779
1780 state_t state = sm_ctxt->get_state (stmt, arg);
1781 if (unchecked_p (state))
1782 {
1783 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1784 sm_ctxt->warn (node, stmt, arg,
1785 make_unique<possible_null_deref> (*this,
1786 diag_arg));
1787 const allocation_state *astate = as_a_allocation_state (state);
1788 sm_ctxt->set_next_state (stmt, arg, astate->get_nonnull ());
1789 }
1790 else if (state == m_null)
1791 {
1792 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1793 sm_ctxt->warn (node, stmt, arg,
1794 make_unique<null_deref> (*this, diag_arg));
1795 sm_ctxt->set_next_state (stmt, arg, m_stop);
1796 }
1797 else if (freed_p (state))
1798 {
1799 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1800 const allocation_state *astate = as_a_allocation_state (state);
1801 sm_ctxt->warn (node, stmt, arg,
1802 make_unique<use_after_free>
1803 (*this, diag_arg, astate->m_deallocator));
1804 sm_ctxt->set_next_state (stmt, arg, m_stop);
1805 }
1806 }
1807 }
1808 return false;
1809 }
1810
1811 /* Handle a call to an allocator.
1812 RETURNS_NONNULL is true if CALL is to a fndecl known to have
1813 __attribute__((returns_nonnull)). */
1814
1815 void
1816 malloc_state_machine::on_allocator_call (sm_context *sm_ctxt,
1817 const gcall *call,
1818 const deallocator_set *deallocators,
1819 bool returns_nonnull) const
1820 {
1821 tree lhs = gimple_call_lhs (call);
1822 if (lhs)
1823 {
1824 if (sm_ctxt->get_state (call, lhs) == m_start)
1825 sm_ctxt->set_next_state (call, lhs,
1826 (returns_nonnull
1827 ? deallocators->m_nonnull
1828 : deallocators->m_unchecked));
1829 }
1830 else
1831 {
1832 /* TODO: report leak. */
1833 }
1834 }
1835
1836 /* Handle deallocations of non-heap pointers.
1837 non-heap -> stop, with warning. */
1838
1839 void
1840 malloc_state_machine::handle_free_of_non_heap (sm_context *sm_ctxt,
1841 const supernode *node,
1842 const gcall *call,
1843 tree arg,
1844 const deallocator *d) const
1845 {
1846 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1847 const region *freed_reg = NULL;
1848 if (const program_state *old_state = sm_ctxt->get_old_program_state ())
1849 {
1850 const region_model *old_model = old_state->m_region_model;
1851 const svalue *ptr_sval = old_model->get_rvalue (arg, NULL);
1852 freed_reg = old_model->deref_rvalue (ptr_sval, arg, NULL);
1853 }
1854 sm_ctxt->warn (node, call, arg,
1855 make_unique<free_of_non_heap>
1856 (*this, diag_arg, freed_reg, d->m_name));
1857 sm_ctxt->set_next_state (call, arg, m_stop);
1858 }
1859
1860 void
1861 malloc_state_machine::on_deallocator_call (sm_context *sm_ctxt,
1862 const supernode *node,
1863 const gcall *call,
1864 const deallocator *d,
1865 unsigned argno) const
1866 {
1867 if (argno >= gimple_call_num_args (call))
1868 return;
1869 tree arg = gimple_call_arg (call, argno);
1870
1871 state_t state = sm_ctxt->get_state (call, arg);
1872
1873 /* start/unchecked/nonnull -> freed. */
1874 if (state == m_start)
1875 sm_ctxt->set_next_state (call, arg, d->m_freed);
1876 else if (unchecked_p (state) || nonnull_p (state))
1877 {
1878 const allocation_state *astate = as_a_allocation_state (state);
1879 gcc_assert (astate->m_deallocators);
1880 if (!astate->m_deallocators->contains_p (d))
1881 {
1882 /* Wrong allocator. */
1883 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1884 sm_ctxt->warn (node, call, arg,
1885 make_unique<mismatching_deallocation>
1886 (*this, diag_arg,
1887 astate->m_deallocators,
1888 d));
1889 }
1890 sm_ctxt->set_next_state (call, arg, d->m_freed);
1891 }
1892
1893 /* Keep state "null" as-is, rather than transitioning to "freed";
1894 we don't want to complain about double-free of NULL. */
1895 else if (state == d->m_freed)
1896 {
1897 /* freed -> stop, with warning. */
1898 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1899 sm_ctxt->warn (node, call, arg,
1900 make_unique<double_free> (*this, diag_arg, d->m_name));
1901 sm_ctxt->set_next_state (call, arg, m_stop);
1902 }
1903 else if (state == m_non_heap)
1904 {
1905 /* non-heap -> stop, with warning. */
1906 handle_free_of_non_heap (sm_ctxt, node, call, arg, d);
1907 }
1908 }
1909
1910 /* Handle a call to "realloc".
1911 Check for free of non-heap or mismatching allocators,
1912 transitioning to the "stop" state for such cases.
1913
1914 Otherwise, region_model::impl_call_realloc will later
1915 get called (which will handle other sm-state transitions
1916 when the state is bifurcated). */
1917
1918 void
1919 malloc_state_machine::on_realloc_call (sm_context *sm_ctxt,
1920 const supernode *node,
1921 const gcall *call) const
1922 {
1923 const unsigned argno = 0;
1924 const deallocator *d = &m_realloc;
1925
1926 tree arg = gimple_call_arg (call, argno);
1927
1928 state_t state = sm_ctxt->get_state (call, arg);
1929
1930 if (unchecked_p (state) || nonnull_p (state))
1931 {
1932 const allocation_state *astate = as_a_allocation_state (state);
1933 gcc_assert (astate->m_deallocators);
1934 if (!astate->m_deallocators->contains_p (&m_free.m_deallocator))
1935 {
1936 /* Wrong allocator. */
1937 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1938 sm_ctxt->warn (node, call, arg,
1939 make_unique<mismatching_deallocation>
1940 (*this, diag_arg,
1941 astate->m_deallocators, d));
1942 sm_ctxt->set_next_state (call, arg, m_stop);
1943 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
1944 path_ctxt->terminate_path ();
1945 }
1946 }
1947 else if (state == m_free.m_deallocator.m_freed)
1948 {
1949 /* freed -> stop, with warning. */
1950 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1951 sm_ctxt->warn (node, call, arg,
1952 make_unique<double_free> (*this, diag_arg, "free"));
1953 sm_ctxt->set_next_state (call, arg, m_stop);
1954 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
1955 path_ctxt->terminate_path ();
1956 }
1957 else if (state == m_non_heap)
1958 {
1959 /* non-heap -> stop, with warning. */
1960 handle_free_of_non_heap (sm_ctxt, node, call, arg, d);
1961 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
1962 path_ctxt->terminate_path ();
1963 }
1964 }
1965
1966 /* Implementation of state_machine::on_phi vfunc for malloc_state_machine. */
1967
1968 void
1969 malloc_state_machine::on_phi (sm_context *sm_ctxt,
1970 const supernode *node ATTRIBUTE_UNUSED,
1971 const gphi *phi,
1972 tree rhs) const
1973 {
1974 if (zerop (rhs))
1975 {
1976 tree lhs = gimple_phi_result (phi);
1977 on_zero_assignment (sm_ctxt, phi, lhs);
1978 }
1979 }
1980
1981 /* Implementation of state_machine::on_condition vfunc for malloc_state_machine.
1982 Potentially transition state 'unchecked' to 'nonnull' or to 'null'. */
1983
1984 void
1985 malloc_state_machine::on_condition (sm_context *sm_ctxt,
1986 const supernode *node ATTRIBUTE_UNUSED,
1987 const gimple *stmt,
1988 const svalue *lhs,
1989 enum tree_code op,
1990 const svalue *rhs) const
1991 {
1992 if (!rhs->all_zeroes_p ())
1993 return;
1994
1995 if (!any_pointer_p (lhs))
1996 return;
1997 if (!any_pointer_p (rhs))
1998 return;
1999
2000 if (op == NE_EXPR)
2001 {
2002 log ("got 'ARG != 0' match");
2003 state_t s = sm_ctxt->get_state (stmt, lhs);
2004 if (unchecked_p (s))
2005 {
2006 const allocation_state *astate = as_a_allocation_state (s);
2007 sm_ctxt->set_next_state (stmt, lhs, astate->get_nonnull ());
2008 }
2009 }
2010 else if (op == EQ_EXPR)
2011 {
2012 log ("got 'ARG == 0' match");
2013 state_t s = sm_ctxt->get_state (stmt, lhs);
2014 if (unchecked_p (s))
2015 sm_ctxt->set_next_state (stmt, lhs, m_null);
2016 }
2017 }
2018
2019 /* Implementation of state_machine::can_purge_p vfunc for malloc_state_machine.
2020 Don't allow purging of pointers in state 'unchecked' or 'nonnull'
2021 (to avoid false leak reports). */
2022
2023 bool
2024 malloc_state_machine::can_purge_p (state_t s) const
2025 {
2026 enum resource_state rs = get_rs (s);
2027 return rs != RS_UNCHECKED && rs != RS_NONNULL;
2028 }
2029
2030 /* Implementation of state_machine::on_leak vfunc for malloc_state_machine
2031 (for complaining about leaks of pointers in state 'unchecked' and
2032 'nonnull'). */
2033
2034 std::unique_ptr<pending_diagnostic>
2035 malloc_state_machine::on_leak (tree var) const
2036 {
2037 return make_unique<malloc_leak> (*this, var);
2038 }
2039
2040 /* Implementation of state_machine::reset_when_passed_to_unknown_fn_p vfunc
2041 for malloc_state_machine. */
2042
2043 bool
2044 malloc_state_machine::reset_when_passed_to_unknown_fn_p (state_t s,
2045 bool is_mutable) const
2046 {
2047 /* An on-stack ptr doesn't stop being stack-allocated when passed to an
2048 unknown fn. */
2049 if (s == m_non_heap)
2050 return false;
2051
2052 /* Otherwise, pointers passed as non-const can be freed. */
2053 return is_mutable;
2054 }
2055
2056 /* Return true if calls to FNDECL are known to not affect this sm-state. */
2057
2058 bool
2059 malloc_state_machine::unaffected_by_call_p (tree fndecl)
2060 {
2061 /* A set of functions that are known to not affect allocation
2062 status, even if we haven't fully modelled the rest of their
2063 behavior yet. */
2064 static const char * const funcnames[] = {
2065 /* This array must be kept sorted. */
2066 "strsep",
2067 };
2068 const size_t count = ARRAY_SIZE (funcnames);
2069 function_set fs (funcnames, count);
2070
2071 if (fs.contains_decl_p (fndecl))
2072 return true;
2073
2074 return false;
2075 }
2076
2077 /* Shared logic for handling GIMPLE_ASSIGNs and GIMPLE_PHIs that
2078 assign zero to LHS. */
2079
2080 void
2081 malloc_state_machine::on_zero_assignment (sm_context *sm_ctxt,
2082 const gimple *stmt,
2083 tree lhs) const
2084 {
2085 state_t s = sm_ctxt->get_state (stmt, lhs);
2086 enum resource_state rs = get_rs (s);
2087 if (rs == RS_START
2088 || rs == RS_UNCHECKED
2089 || rs == RS_NONNULL
2090 || rs == RS_FREED)
2091 sm_ctxt->set_next_state (stmt, lhs, m_null);
2092 }
2093
2094 /* Special-case hook for handling realloc, for the "success with move to
2095 a new buffer" case, marking OLD_PTR_SVAL as freed and NEW_PTR_SVAL as
2096 non-null.
2097
2098 This is similar to on_deallocator_call and on_allocator_call,
2099 but the checks happen in on_realloc_call, and by splitting the states. */
2100
2101 void
2102 malloc_state_machine::
2103 on_realloc_with_move (region_model *model,
2104 sm_state_map *smap,
2105 const svalue *old_ptr_sval,
2106 const svalue *new_ptr_sval,
2107 const extrinsic_state &ext_state) const
2108 {
2109 smap->set_state (model, old_ptr_sval,
2110 m_free.m_deallocator.m_freed,
2111 NULL, ext_state);
2112
2113 smap->set_state (model, new_ptr_sval,
2114 m_free.m_nonnull,
2115 NULL, ext_state);
2116 }
2117
2118 } // anonymous namespace
2119
2120 /* Internal interface to this file. */
2121
2122 state_machine *
2123 make_malloc_state_machine (logger *logger)
2124 {
2125 return new malloc_state_machine (logger);
2126 }
2127
2128 /* Specialcase hook for handling realloc, for use by
2129 region_model::impl_call_realloc::success_with_move::update_model. */
2130
2131 void
2132 region_model::on_realloc_with_move (const call_details &cd,
2133 const svalue *old_ptr_sval,
2134 const svalue *new_ptr_sval)
2135 {
2136 region_model_context *ctxt = cd.get_ctxt ();
2137 if (!ctxt)
2138 return;
2139 const extrinsic_state *ext_state = ctxt->get_ext_state ();
2140 if (!ext_state)
2141 return;
2142
2143 sm_state_map *smap;
2144 const state_machine *sm;
2145 unsigned sm_idx;
2146 if (!ctxt->get_malloc_map (&smap, &sm, &sm_idx))
2147 return;
2148
2149 gcc_assert (smap);
2150 gcc_assert (sm);
2151
2152 const malloc_state_machine &malloc_sm
2153 = (const malloc_state_machine &)*sm;
2154
2155 malloc_sm.on_realloc_with_move (this,
2156 smap,
2157 old_ptr_sval,
2158 new_ptr_sval,
2159 *ext_state);
2160 }
2161
2162 } // namespace ana
2163
2164 #endif /* #if ENABLE_ANALYZER */