]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/analyzer/sm-malloc.cc
Change references of .c files to .cc files
[thirdparty/gcc.git] / gcc / analyzer / sm-malloc.cc
1 /* A state machine for detecting misuses of the malloc/free API.
2 Copyright (C) 2019-2022 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "function.h"
26 #include "basic-block.h"
27 #include "gimple.h"
28 #include "options.h"
29 #include "bitmap.h"
30 #include "diagnostic-path.h"
31 #include "diagnostic-metadata.h"
32 #include "function.h"
33 #include "json.h"
34 #include "analyzer/analyzer.h"
35 #include "diagnostic-event-id.h"
36 #include "analyzer/analyzer-logging.h"
37 #include "analyzer/sm.h"
38 #include "analyzer/pending-diagnostic.h"
39 #include "tristate.h"
40 #include "selftest.h"
41 #include "analyzer/call-string.h"
42 #include "analyzer/program-point.h"
43 #include "analyzer/store.h"
44 #include "analyzer/region-model.h"
45 #include "stringpool.h"
46 #include "attribs.h"
47 #include "analyzer/function-set.h"
48 #include "analyzer/program-state.h"
49
50 #if ENABLE_ANALYZER
51
52 namespace ana {
53
54 namespace {
55
56 /* This state machine and its various support classes track allocations
57 and deallocations.
58
59 It has a few standard allocation/deallocation pairs (e.g. new/delete),
60 and also supports user-defined ones via
61 __attribute__ ((malloc(DEALLOCATOR))).
62
63 There can be more than one valid deallocator for a given allocator,
64 for example:
65 __attribute__ ((malloc (fclose)))
66 __attribute__ ((malloc (freopen, 3)))
67 FILE* fopen (const char*, const char*);
68 A deallocator_set represents a particular set of valid deallocators.
69
70 We track the expected deallocator_set for a value, but not the allocation
71 function - there could be more than one allocator per deallocator_set.
72 For example, there could be dozens of allocators for "free" beyond just
73 malloc e.g. calloc, xstrdup, etc. We don't want to explode the number
74 of states by tracking individual allocators in the exploded graph;
75 we merely want to track "this value expects to have 'free' called on it".
76 Perhaps we can reconstruct which allocator was used later, when emitting
77 the path, if it's necessary for precision of wording of diagnostics. */
78
79 class deallocator;
80 class deallocator_set;
81 class malloc_state_machine;
82
83 /* An enum for discriminating between different kinds of allocation_state. */
84
85 enum resource_state
86 {
87 /* States that are independent of allocator/deallocator. */
88
89 /* The start state. */
90 RS_START,
91
92 /* State for a pointer that's known to be NULL. */
93 RS_NULL,
94
95 /* State for a pointer that's known to not be on the heap (e.g. to a local
96 or global). */
97 RS_NON_HEAP,
98
99 /* Stop state, for pointers we don't want to track any more. */
100 RS_STOP,
101
102 /* States that relate to a specific deallocator_set. */
103
104 /* State for a pointer returned from an allocator that hasn't
105 been checked for NULL.
106 It could be a pointer to heap-allocated memory, or could be NULL. */
107 RS_UNCHECKED,
108
109 /* State for a pointer returned from an allocator,
110 known to be non-NULL. */
111 RS_NONNULL,
112
113 /* State for a pointer passed to a deallocator. */
114 RS_FREED
115 };
116
117 /* Custom state subclass, which can optionally refer to an a
118 deallocator_set. */
119
120 struct allocation_state : public state_machine::state
121 {
122 allocation_state (const char *name, unsigned id,
123 enum resource_state rs,
124 const deallocator_set *deallocators,
125 const deallocator *deallocator)
126 : state (name, id), m_rs (rs),
127 m_deallocators (deallocators),
128 m_deallocator (deallocator)
129 {}
130
131 void dump_to_pp (pretty_printer *pp) const FINAL OVERRIDE;
132
133 const allocation_state *get_nonnull () const;
134
135 enum resource_state m_rs;
136 const deallocator_set *m_deallocators;
137 const deallocator *m_deallocator;
138 };
139
140 /* An enum for choosing which wording to use in various diagnostics
141 when describing deallocations. */
142
143 enum wording
144 {
145 WORDING_FREED,
146 WORDING_DELETED,
147 WORDING_DEALLOCATED,
148 WORDING_REALLOCATED
149 };
150
151 /* Base class representing a deallocation function,
152 either a built-in one we know about, or one exposed via
153 __attribute__((malloc(DEALLOCATOR))). */
154
155 struct deallocator
156 {
157 hashval_t hash () const;
158 void dump_to_pp (pretty_printer *pp) const;
159 static int cmp (const deallocator *a, const deallocator *b);
160 static int cmp_ptr_ptr (const void *, const void *);
161
162 /* Name to use in diagnostics. */
163 const char *m_name;
164
165 /* Which wording to use in diagnostics. */
166 enum wording m_wording;
167
168 /* State for a value passed to one of the deallocators. */
169 state_machine::state_t m_freed;
170
171 protected:
172 deallocator (malloc_state_machine *sm,
173 const char *name,
174 enum wording wording);
175 };
176
177 /* Subclass representing a predefined deallocator.
178 e.g. "delete []", without needing a specific FUNCTION_DECL
179 ahead of time. */
180
181 struct standard_deallocator : public deallocator
182 {
183 standard_deallocator (malloc_state_machine *sm,
184 const char *name,
185 enum wording wording);
186 };
187
188 /* Subclass representing a user-defined deallocator
189 via __attribute__((malloc(DEALLOCATOR))) given
190 a specific FUNCTION_DECL. */
191
192 struct custom_deallocator : public deallocator
193 {
194 custom_deallocator (malloc_state_machine *sm,
195 tree deallocator_fndecl,
196 enum wording wording)
197 : deallocator (sm, IDENTIFIER_POINTER (DECL_NAME (deallocator_fndecl)),
198 wording)
199 {
200 }
201 };
202
203 /* Base class representing a set of possible deallocators.
204 Often this will be just a single deallocator, but some
205 allocators have multiple valid deallocators (e.g. the result of
206 "fopen" can be closed by either "fclose" or "freopen"). */
207
208 struct deallocator_set
209 {
210 deallocator_set (malloc_state_machine *sm,
211 enum wording wording);
212 virtual ~deallocator_set () {}
213
214 virtual bool contains_p (const deallocator *d) const = 0;
215 virtual const deallocator *maybe_get_single () const = 0;
216 virtual void dump_to_pp (pretty_printer *pp) const = 0;
217 void dump () const;
218
219 /* Which wording to use in diagnostics. */
220 enum wording m_wording;
221
222 /* Pointers to states.
223 These states are owned by the state_machine base class. */
224
225 /* State for an unchecked result from an allocator using this set. */
226 state_machine::state_t m_unchecked;
227
228 /* State for a known non-NULL result from such an allocator. */
229 state_machine::state_t m_nonnull;
230 };
231
232 /* Subclass of deallocator_set representing a set of deallocators
233 defined by one or more __attribute__((malloc(DEALLOCATOR))). */
234
235 struct custom_deallocator_set : public deallocator_set
236 {
237 typedef const auto_vec <const deallocator *> *key_t;
238
239 custom_deallocator_set (malloc_state_machine *sm,
240 const auto_vec <const deallocator *> *vec,
241 //const char *name,
242 //const char *dealloc_funcname,
243 //unsigned arg_idx,
244 enum wording wording);
245
246 bool contains_p (const deallocator *d) const FINAL OVERRIDE;
247 const deallocator *maybe_get_single () const FINAL OVERRIDE;
248 void dump_to_pp (pretty_printer *pp) const FINAL OVERRIDE;
249
250 auto_vec <const deallocator *> m_deallocator_vec;
251 };
252
253 /* Subclass of deallocator_set representing a set of deallocators
254 with a single standard_deallocator, e.g. "delete []". */
255
256 struct standard_deallocator_set : public deallocator_set
257 {
258 standard_deallocator_set (malloc_state_machine *sm,
259 const char *name,
260 enum wording wording);
261
262 bool contains_p (const deallocator *d) const FINAL OVERRIDE;
263 const deallocator *maybe_get_single () const FINAL OVERRIDE;
264 void dump_to_pp (pretty_printer *pp) const FINAL OVERRIDE;
265
266 standard_deallocator m_deallocator;
267 };
268
269 /* Traits class for ensuring uniqueness of deallocator_sets within
270 malloc_state_machine. */
271
272 struct deallocator_set_map_traits
273 {
274 typedef custom_deallocator_set::key_t key_type;
275 typedef custom_deallocator_set *value_type;
276 typedef custom_deallocator_set *compare_type;
277
278 static inline hashval_t hash (const key_type &k)
279 {
280 gcc_assert (k != NULL);
281 gcc_assert (k != reinterpret_cast<key_type> (1));
282
283 hashval_t result = 0;
284 unsigned i;
285 const deallocator *d;
286 FOR_EACH_VEC_ELT (*k, i, d)
287 result ^= d->hash ();
288 return result;
289 }
290 static inline bool equal_keys (const key_type &k1, const key_type &k2)
291 {
292 if (k1->length () != k2->length ())
293 return false;
294
295 for (unsigned i = 0; i < k1->length (); i++)
296 if ((*k1)[i] != (*k2)[i])
297 return false;
298
299 return true;
300 }
301 template <typename T>
302 static inline void remove (T &)
303 {
304 /* empty; the nodes are handled elsewhere. */
305 }
306 template <typename T>
307 static inline void mark_deleted (T &entry)
308 {
309 entry.m_key = reinterpret_cast<key_type> (1);
310 }
311 template <typename T>
312 static inline void mark_empty (T &entry)
313 {
314 entry.m_key = NULL;
315 }
316 template <typename T>
317 static inline bool is_deleted (const T &entry)
318 {
319 return entry.m_key == reinterpret_cast<key_type> (1);
320 }
321 template <typename T>
322 static inline bool is_empty (const T &entry)
323 {
324 return entry.m_key == NULL;
325 }
326 static const bool empty_zero_p = false;
327 };
328
329 /* A state machine for detecting misuses of the malloc/free API.
330
331 See sm-malloc.dot for an overview (keep this in-sync with that file). */
332
333 class malloc_state_machine : public state_machine
334 {
335 public:
336 typedef allocation_state custom_data_t;
337
338 malloc_state_machine (logger *logger);
339 ~malloc_state_machine ();
340
341 state_t
342 add_state (const char *name, enum resource_state rs,
343 const deallocator_set *deallocators,
344 const deallocator *deallocator);
345
346 bool inherited_state_p () const FINAL OVERRIDE { return false; }
347
348 state_machine::state_t
349 get_default_state (const svalue *sval) const FINAL OVERRIDE
350 {
351 if (tree cst = sval->maybe_get_constant ())
352 {
353 if (zerop (cst))
354 return m_null;
355 }
356 if (const region_svalue *ptr = sval->dyn_cast_region_svalue ())
357 {
358 const region *reg = ptr->get_pointee ();
359 const region *base_reg = reg->get_base_region ();
360 if (base_reg->get_kind () == RK_DECL
361 || base_reg->get_kind () == RK_STRING)
362 return m_non_heap;
363 }
364 return m_start;
365 }
366
367 bool on_stmt (sm_context *sm_ctxt,
368 const supernode *node,
369 const gimple *stmt) const FINAL OVERRIDE;
370
371 void on_phi (sm_context *sm_ctxt,
372 const supernode *node,
373 const gphi *phi,
374 tree rhs) const FINAL OVERRIDE;
375
376 void on_condition (sm_context *sm_ctxt,
377 const supernode *node,
378 const gimple *stmt,
379 const svalue *lhs,
380 enum tree_code op,
381 const svalue *rhs) const FINAL OVERRIDE;
382
383 bool can_purge_p (state_t s) const FINAL OVERRIDE;
384 pending_diagnostic *on_leak (tree var) const FINAL OVERRIDE;
385
386 bool reset_when_passed_to_unknown_fn_p (state_t s,
387 bool is_mutable) const FINAL OVERRIDE;
388
389 static bool unaffected_by_call_p (tree fndecl);
390
391 void on_realloc_with_move (region_model *model,
392 sm_state_map *smap,
393 const svalue *old_ptr_sval,
394 const svalue *new_ptr_sval,
395 const extrinsic_state &ext_state) const;
396
397 standard_deallocator_set m_free;
398 standard_deallocator_set m_scalar_delete;
399 standard_deallocator_set m_vector_delete;
400
401 standard_deallocator m_realloc;
402
403 /* States that are independent of api. */
404
405 /* State for a pointer that's known to be NULL. */
406 state_t m_null;
407
408 /* State for a pointer that's known to not be on the heap (e.g. to a local
409 or global). */
410 state_t m_non_heap; // TODO: or should this be a different state machine?
411 // or do we need child values etc?
412
413 /* Stop state, for pointers we don't want to track any more. */
414 state_t m_stop;
415
416 private:
417 const custom_deallocator_set *
418 get_or_create_custom_deallocator_set (tree allocator_fndecl);
419 custom_deallocator_set *
420 maybe_create_custom_deallocator_set (tree allocator_fndecl);
421 const deallocator *
422 get_or_create_deallocator (tree deallocator_fndecl);
423
424 void on_allocator_call (sm_context *sm_ctxt,
425 const gcall *call,
426 const deallocator_set *deallocators,
427 bool returns_nonnull = false) const;
428 void on_deallocator_call (sm_context *sm_ctxt,
429 const supernode *node,
430 const gcall *call,
431 const deallocator *d,
432 unsigned argno) const;
433 void on_realloc_call (sm_context *sm_ctxt,
434 const supernode *node,
435 const gcall *call) const;
436 void on_zero_assignment (sm_context *sm_ctxt,
437 const gimple *stmt,
438 tree lhs) const;
439
440 /* A map for consolidating deallocators so that they are
441 unique per deallocator FUNCTION_DECL. */
442 typedef hash_map<tree, deallocator *> deallocator_map_t;
443 deallocator_map_t m_deallocator_map;
444
445 /* Memoized lookups from FUNCTION_DECL to custom_deallocator_set *. */
446 typedef hash_map<tree, custom_deallocator_set *> deallocator_set_cache_t;
447 deallocator_set_cache_t m_custom_deallocator_set_cache;
448
449 /* A map for consolidating custom_deallocator_set instances. */
450 typedef hash_map<custom_deallocator_set::key_t,
451 custom_deallocator_set *,
452 deallocator_set_map_traits> custom_deallocator_set_map_t;
453 custom_deallocator_set_map_t m_custom_deallocator_set_map;
454
455 /* Record of dynamically-allocated objects, for cleanup. */
456 auto_vec <custom_deallocator_set *> m_dynamic_sets;
457 auto_vec <custom_deallocator *> m_dynamic_deallocators;
458 };
459
460 /* struct deallocator. */
461
462 deallocator::deallocator (malloc_state_machine *sm,
463 const char *name,
464 enum wording wording)
465 : m_name (name),
466 m_wording (wording),
467 m_freed (sm->add_state ("freed", RS_FREED, NULL, this))
468 {
469 }
470
471 hashval_t
472 deallocator::hash () const
473 {
474 return (hashval_t)m_freed->get_id ();
475 }
476
477 void
478 deallocator::dump_to_pp (pretty_printer *pp) const
479 {
480 pp_printf (pp, "%qs", m_name);
481 }
482
483 int
484 deallocator::cmp (const deallocator *a, const deallocator *b)
485 {
486 return (int)a->m_freed->get_id () - (int)b->m_freed->get_id ();
487 }
488
489 int
490 deallocator::cmp_ptr_ptr (const void *a, const void *b)
491 {
492 return cmp (*(const deallocator * const *)a,
493 *(const deallocator * const *)b);
494 }
495
496
497 /* struct standard_deallocator : public deallocator. */
498
499 standard_deallocator::standard_deallocator (malloc_state_machine *sm,
500 const char *name,
501 enum wording wording)
502 : deallocator (sm, name, wording)
503 {
504 }
505
506 /* struct deallocator_set. */
507
508 deallocator_set::deallocator_set (malloc_state_machine *sm,
509 enum wording wording)
510 : m_wording (wording),
511 m_unchecked (sm->add_state ("unchecked", RS_UNCHECKED, this, NULL)),
512 m_nonnull (sm->add_state ("nonnull", RS_NONNULL, this, NULL))
513 {
514 }
515
516 /* Dump a description of this deallocator_set to stderr. */
517
518 DEBUG_FUNCTION void
519 deallocator_set::dump () const
520 {
521 pretty_printer pp;
522 pp_show_color (&pp) = pp_show_color (global_dc->printer);
523 pp.buffer->stream = stderr;
524 dump_to_pp (&pp);
525 pp_newline (&pp);
526 pp_flush (&pp);
527 }
528
529 /* struct custom_deallocator_set : public deallocator_set. */
530
531 custom_deallocator_set::
532 custom_deallocator_set (malloc_state_machine *sm,
533 const auto_vec <const deallocator *> *vec,
534 enum wording wording)
535 : deallocator_set (sm, wording),
536 m_deallocator_vec (vec->length ())
537 {
538 unsigned i;
539 const deallocator *d;
540 FOR_EACH_VEC_ELT (*vec, i, d)
541 m_deallocator_vec.safe_push (d);
542 }
543
544 bool
545 custom_deallocator_set::contains_p (const deallocator *d) const
546 {
547 unsigned i;
548 const deallocator *cd;
549 FOR_EACH_VEC_ELT (m_deallocator_vec, i, cd)
550 if (cd == d)
551 return true;
552 return false;
553 }
554
555 const deallocator *
556 custom_deallocator_set::maybe_get_single () const
557 {
558 if (m_deallocator_vec.length () == 1)
559 return m_deallocator_vec[0];
560 return NULL;
561 }
562
563 void
564 custom_deallocator_set::dump_to_pp (pretty_printer *pp) const
565 {
566 pp_character (pp, '{');
567 unsigned i;
568 const deallocator *d;
569 FOR_EACH_VEC_ELT (m_deallocator_vec, i, d)
570 {
571 if (i > 0)
572 pp_string (pp, ", ");
573 d->dump_to_pp (pp);
574 }
575 pp_character (pp, '}');
576 }
577
578 /* struct standard_deallocator_set : public deallocator_set. */
579
580 standard_deallocator_set::standard_deallocator_set (malloc_state_machine *sm,
581 const char *name,
582 enum wording wording)
583 : deallocator_set (sm, wording),
584 m_deallocator (sm, name, wording)
585 {
586 }
587
588 bool
589 standard_deallocator_set::contains_p (const deallocator *d) const
590 {
591 return d == &m_deallocator;
592 }
593
594 const deallocator *
595 standard_deallocator_set::maybe_get_single () const
596 {
597 return &m_deallocator;
598 }
599
600 void
601 standard_deallocator_set::dump_to_pp (pretty_printer *pp) const
602 {
603 pp_character (pp, '{');
604 pp_string (pp, m_deallocator.m_name);
605 pp_character (pp, '}');
606 }
607
608 /* Return STATE cast to the custom state subclass, or NULL for the start state.
609 Everything should be an allocation_state apart from the start state. */
610
611 static const allocation_state *
612 dyn_cast_allocation_state (state_machine::state_t state)
613 {
614 if (state->get_id () == 0)
615 return NULL;
616 return static_cast <const allocation_state *> (state);
617 }
618
619 /* Return STATE cast to the custom state subclass, for a state that is
620 already known to not be the start state . */
621
622 static const allocation_state *
623 as_a_allocation_state (state_machine::state_t state)
624 {
625 gcc_assert (state->get_id () != 0);
626 return static_cast <const allocation_state *> (state);
627 }
628
629 /* Get the resource_state for STATE. */
630
631 static enum resource_state
632 get_rs (state_machine::state_t state)
633 {
634 if (const allocation_state *astate = dyn_cast_allocation_state (state))
635 return astate->m_rs;
636 else
637 return RS_START;
638 }
639
640 /* Return true if STATE is the start state. */
641
642 static bool
643 start_p (state_machine::state_t state)
644 {
645 return get_rs (state) == RS_START;
646 }
647
648 /* Return true if STATE is an unchecked result from an allocator. */
649
650 static bool
651 unchecked_p (state_machine::state_t state)
652 {
653 return get_rs (state) == RS_UNCHECKED;
654 }
655
656 /* Return true if STATE is a non-null result from an allocator. */
657
658 static bool
659 nonnull_p (state_machine::state_t state)
660 {
661 return get_rs (state) == RS_NONNULL;
662 }
663
664 /* Return true if STATE is a value that has been passed to a deallocator. */
665
666 static bool
667 freed_p (state_machine::state_t state)
668 {
669 return get_rs (state) == RS_FREED;
670 }
671
672 /* Class for diagnostics relating to malloc_state_machine. */
673
674 class malloc_diagnostic : public pending_diagnostic
675 {
676 public:
677 malloc_diagnostic (const malloc_state_machine &sm, tree arg)
678 : m_sm (sm), m_arg (arg)
679 {}
680
681 bool subclass_equal_p (const pending_diagnostic &base_other) const OVERRIDE
682 {
683 return same_tree_p (m_arg, ((const malloc_diagnostic &)base_other).m_arg);
684 }
685
686 label_text describe_state_change (const evdesc::state_change &change)
687 OVERRIDE
688 {
689 if (change.m_old_state == m_sm.get_start_state ()
690 && unchecked_p (change.m_new_state))
691 // TODO: verify that it's the allocation stmt, not a copy
692 return label_text::borrow ("allocated here");
693 if (unchecked_p (change.m_old_state)
694 && nonnull_p (change.m_new_state))
695 {
696 if (change.m_expr)
697 return change.formatted_print ("assuming %qE is non-NULL",
698 change.m_expr);
699 else
700 return change.formatted_print ("assuming %qs is non-NULL",
701 "<unknown>");
702 }
703 if (change.m_new_state == m_sm.m_null)
704 {
705 if (unchecked_p (change.m_old_state))
706 {
707 if (change.m_expr)
708 return change.formatted_print ("assuming %qE is NULL",
709 change.m_expr);
710 else
711 return change.formatted_print ("assuming %qs is NULL",
712 "<unknown>");
713 }
714 else
715 {
716 if (change.m_expr)
717 return change.formatted_print ("%qE is NULL",
718 change.m_expr);
719 else
720 return change.formatted_print ("%qs is NULL",
721 "<unknown>");
722 }
723 }
724
725 return label_text ();
726 }
727
728 protected:
729 const malloc_state_machine &m_sm;
730 tree m_arg;
731 };
732
733 /* Concrete subclass for reporting mismatching allocator/deallocator
734 diagnostics. */
735
736 class mismatching_deallocation : public malloc_diagnostic
737 {
738 public:
739 mismatching_deallocation (const malloc_state_machine &sm, tree arg,
740 const deallocator_set *expected_deallocators,
741 const deallocator *actual_dealloc)
742 : malloc_diagnostic (sm, arg),
743 m_expected_deallocators (expected_deallocators),
744 m_actual_dealloc (actual_dealloc)
745 {}
746
747 const char *get_kind () const FINAL OVERRIDE
748 {
749 return "mismatching_deallocation";
750 }
751
752 bool emit (rich_location *rich_loc) FINAL OVERRIDE
753 {
754 auto_diagnostic_group d;
755 diagnostic_metadata m;
756 m.add_cwe (762); /* CWE-762: Mismatched Memory Management Routines. */
757 if (const deallocator *expected_dealloc
758 = m_expected_deallocators->maybe_get_single ())
759 return warning_meta (rich_loc, m, OPT_Wanalyzer_mismatching_deallocation,
760 "%qE should have been deallocated with %qs"
761 " but was deallocated with %qs",
762 m_arg, expected_dealloc->m_name,
763 m_actual_dealloc->m_name);
764 else
765 return warning_meta (rich_loc, m, OPT_Wanalyzer_mismatching_deallocation,
766 "%qs called on %qE returned from a mismatched"
767 " allocation function",
768 m_actual_dealloc->m_name, m_arg);
769 }
770
771 label_text describe_state_change (const evdesc::state_change &change)
772 FINAL OVERRIDE
773 {
774 if (unchecked_p (change.m_new_state))
775 {
776 m_alloc_event = change.m_event_id;
777 if (const deallocator *expected_dealloc
778 = m_expected_deallocators->maybe_get_single ())
779 return change.formatted_print ("allocated here"
780 " (expects deallocation with %qs)",
781 expected_dealloc->m_name);
782 else
783 return change.formatted_print ("allocated here");
784 }
785 return malloc_diagnostic::describe_state_change (change);
786 }
787
788 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
789 {
790 if (m_alloc_event.known_p ())
791 {
792 if (const deallocator *expected_dealloc
793 = m_expected_deallocators->maybe_get_single ())
794 return ev.formatted_print
795 ("deallocated with %qs here;"
796 " allocation at %@ expects deallocation with %qs",
797 m_actual_dealloc->m_name, &m_alloc_event,
798 expected_dealloc->m_name);
799 else
800 return ev.formatted_print
801 ("deallocated with %qs here;"
802 " allocated at %@",
803 m_actual_dealloc->m_name, &m_alloc_event);
804 }
805 return ev.formatted_print ("deallocated with %qs here",
806 m_actual_dealloc->m_name);
807 }
808
809 private:
810 diagnostic_event_id_t m_alloc_event;
811 const deallocator_set *m_expected_deallocators;
812 const deallocator *m_actual_dealloc;
813 };
814
815 /* Concrete subclass for reporting double-free diagnostics. */
816
817 class double_free : public malloc_diagnostic
818 {
819 public:
820 double_free (const malloc_state_machine &sm, tree arg, const char *funcname)
821 : malloc_diagnostic (sm, arg), m_funcname (funcname)
822 {}
823
824 const char *get_kind () const FINAL OVERRIDE { return "double_free"; }
825
826 bool emit (rich_location *rich_loc) FINAL OVERRIDE
827 {
828 auto_diagnostic_group d;
829 diagnostic_metadata m;
830 m.add_cwe (415); /* CWE-415: Double Free. */
831 return warning_meta (rich_loc, m, OPT_Wanalyzer_double_free,
832 "double-%qs of %qE", m_funcname, m_arg);
833 }
834
835 label_text describe_state_change (const evdesc::state_change &change)
836 FINAL OVERRIDE
837 {
838 if (freed_p (change.m_new_state))
839 {
840 m_first_free_event = change.m_event_id;
841 return change.formatted_print ("first %qs here", m_funcname);
842 }
843 return malloc_diagnostic::describe_state_change (change);
844 }
845
846 label_text describe_call_with_state (const evdesc::call_with_state &info)
847 FINAL OVERRIDE
848 {
849 if (freed_p (info.m_state))
850 return info.formatted_print
851 ("passing freed pointer %qE in call to %qE from %qE",
852 info.m_expr, info.m_callee_fndecl, info.m_caller_fndecl);
853 return label_text ();
854 }
855
856 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
857 {
858 if (m_first_free_event.known_p ())
859 return ev.formatted_print ("second %qs here; first %qs was at %@",
860 m_funcname, m_funcname,
861 &m_first_free_event);
862 return ev.formatted_print ("second %qs here", m_funcname);
863 }
864
865 private:
866 diagnostic_event_id_t m_first_free_event;
867 const char *m_funcname;
868 };
869
870 /* Abstract subclass for describing possible bad uses of NULL.
871 Responsible for describing the call that could return NULL. */
872
873 class possible_null : public malloc_diagnostic
874 {
875 public:
876 possible_null (const malloc_state_machine &sm, tree arg)
877 : malloc_diagnostic (sm, arg)
878 {}
879
880 label_text describe_state_change (const evdesc::state_change &change)
881 FINAL OVERRIDE
882 {
883 if (change.m_old_state == m_sm.get_start_state ()
884 && unchecked_p (change.m_new_state))
885 {
886 m_origin_of_unchecked_event = change.m_event_id;
887 return label_text::borrow ("this call could return NULL");
888 }
889 return malloc_diagnostic::describe_state_change (change);
890 }
891
892 label_text describe_return_of_state (const evdesc::return_of_state &info)
893 FINAL OVERRIDE
894 {
895 if (unchecked_p (info.m_state))
896 return info.formatted_print ("possible return of NULL to %qE from %qE",
897 info.m_caller_fndecl, info.m_callee_fndecl);
898 return label_text ();
899 }
900
901 protected:
902 diagnostic_event_id_t m_origin_of_unchecked_event;
903 };
904
905 /* Concrete subclass for describing dereference of a possible NULL
906 value. */
907
908 class possible_null_deref : public possible_null
909 {
910 public:
911 possible_null_deref (const malloc_state_machine &sm, tree arg)
912 : possible_null (sm, arg)
913 {}
914
915 const char *get_kind () const FINAL OVERRIDE { return "possible_null_deref"; }
916
917 bool emit (rich_location *rich_loc) FINAL OVERRIDE
918 {
919 /* CWE-690: Unchecked Return Value to NULL Pointer Dereference. */
920 diagnostic_metadata m;
921 m.add_cwe (690);
922 return warning_meta (rich_loc, m,
923 OPT_Wanalyzer_possible_null_dereference,
924 "dereference of possibly-NULL %qE", m_arg);
925 }
926
927 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
928 {
929 if (m_origin_of_unchecked_event.known_p ())
930 return ev.formatted_print ("%qE could be NULL: unchecked value from %@",
931 ev.m_expr,
932 &m_origin_of_unchecked_event);
933 else
934 return ev.formatted_print ("%qE could be NULL", ev.m_expr);
935 }
936
937 };
938
939 /* Return true if FNDECL is a C++ method. */
940
941 static bool
942 method_p (tree fndecl)
943 {
944 return TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE;
945 }
946
947 /* Return a 1-based description of ARG_IDX (0-based) of FNDECL.
948 Compare with %P in the C++ FE (implemented in cp/error.cc: parm_to_string
949 as called from cp_printer). */
950
951 static label_text
952 describe_argument_index (tree fndecl, int arg_idx)
953 {
954 if (method_p (fndecl))
955 if (arg_idx == 0)
956 return label_text::borrow ("'this'");
957 pretty_printer pp;
958 pp_printf (&pp, "%u", arg_idx + 1 - method_p (fndecl));
959 return label_text::take (xstrdup (pp_formatted_text (&pp)));
960 }
961
962 /* Subroutine for use by possible_null_arg::emit and null_arg::emit.
963 Issue a note informing that the pertinent argument must be non-NULL. */
964
965 static void
966 inform_nonnull_attribute (tree fndecl, int arg_idx)
967 {
968 label_text arg_desc = describe_argument_index (fndecl, arg_idx);
969 inform (DECL_SOURCE_LOCATION (fndecl),
970 "argument %s of %qD must be non-null",
971 arg_desc.m_buffer, fndecl);
972 arg_desc.maybe_free ();
973 /* Ideally we would use the location of the parm and underline the
974 attribute also - but we don't have the location_t values at this point
975 in the middle-end.
976 For reference, the C and C++ FEs have get_fndecl_argument_location. */
977 }
978
979 /* Concrete subclass for describing passing a possibly-NULL value to a
980 function marked with __attribute__((nonnull)). */
981
982 class possible_null_arg : public possible_null
983 {
984 public:
985 possible_null_arg (const malloc_state_machine &sm, tree arg,
986 tree fndecl, int arg_idx)
987 : possible_null (sm, arg),
988 m_fndecl (fndecl), m_arg_idx (arg_idx)
989 {}
990
991 const char *get_kind () const FINAL OVERRIDE { return "possible_null_arg"; }
992
993 bool subclass_equal_p (const pending_diagnostic &base_other) const
994 {
995 const possible_null_arg &sub_other
996 = (const possible_null_arg &)base_other;
997 return (same_tree_p (m_arg, sub_other.m_arg)
998 && m_fndecl == sub_other.m_fndecl
999 && m_arg_idx == sub_other.m_arg_idx);
1000 }
1001
1002
1003 bool emit (rich_location *rich_loc) FINAL OVERRIDE
1004 {
1005 /* CWE-690: Unchecked Return Value to NULL Pointer Dereference. */
1006 auto_diagnostic_group d;
1007 diagnostic_metadata m;
1008 m.add_cwe (690);
1009 bool warned
1010 = warning_meta (rich_loc, m, OPT_Wanalyzer_possible_null_argument,
1011 "use of possibly-NULL %qE where non-null expected",
1012 m_arg);
1013 if (warned)
1014 inform_nonnull_attribute (m_fndecl, m_arg_idx);
1015 return warned;
1016 }
1017
1018 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
1019 {
1020 label_text arg_desc = describe_argument_index (m_fndecl, m_arg_idx);
1021 label_text result;
1022 if (m_origin_of_unchecked_event.known_p ())
1023 result = ev.formatted_print ("argument %s (%qE) from %@ could be NULL"
1024 " where non-null expected",
1025 arg_desc.m_buffer, ev.m_expr,
1026 &m_origin_of_unchecked_event);
1027 else
1028 result = ev.formatted_print ("argument %s (%qE) could be NULL"
1029 " where non-null expected",
1030 arg_desc.m_buffer, ev.m_expr);
1031 arg_desc.maybe_free ();
1032 return result;
1033 }
1034
1035 private:
1036 tree m_fndecl;
1037 int m_arg_idx;
1038 };
1039
1040 /* Concrete subclass for describing a dereference of a NULL value. */
1041
1042 class null_deref : public malloc_diagnostic
1043 {
1044 public:
1045 null_deref (const malloc_state_machine &sm, tree arg)
1046 : malloc_diagnostic (sm, arg) {}
1047
1048 const char *get_kind () const FINAL OVERRIDE { return "null_deref"; }
1049
1050 bool emit (rich_location *rich_loc) FINAL OVERRIDE
1051 {
1052 /* CWE-476: NULL Pointer Dereference. */
1053 diagnostic_metadata m;
1054 m.add_cwe (476);
1055 return warning_meta (rich_loc, m,
1056 OPT_Wanalyzer_null_dereference,
1057 "dereference of NULL %qE", m_arg);
1058 }
1059
1060 label_text describe_return_of_state (const evdesc::return_of_state &info)
1061 FINAL OVERRIDE
1062 {
1063 if (info.m_state == m_sm.m_null)
1064 return info.formatted_print ("return of NULL to %qE from %qE",
1065 info.m_caller_fndecl, info.m_callee_fndecl);
1066 return label_text ();
1067 }
1068
1069 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
1070 {
1071 return ev.formatted_print ("dereference of NULL %qE", ev.m_expr);
1072 }
1073 };
1074
1075 /* Concrete subclass for describing passing a NULL value to a
1076 function marked with __attribute__((nonnull)). */
1077
1078 class null_arg : public malloc_diagnostic
1079 {
1080 public:
1081 null_arg (const malloc_state_machine &sm, tree arg,
1082 tree fndecl, int arg_idx)
1083 : malloc_diagnostic (sm, arg),
1084 m_fndecl (fndecl), m_arg_idx (arg_idx)
1085 {}
1086
1087 const char *get_kind () const FINAL OVERRIDE { return "null_arg"; }
1088
1089 bool subclass_equal_p (const pending_diagnostic &base_other) const
1090 {
1091 const null_arg &sub_other
1092 = (const null_arg &)base_other;
1093 return (same_tree_p (m_arg, sub_other.m_arg)
1094 && m_fndecl == sub_other.m_fndecl
1095 && m_arg_idx == sub_other.m_arg_idx);
1096 }
1097
1098 bool emit (rich_location *rich_loc) FINAL OVERRIDE
1099 {
1100 /* CWE-476: NULL Pointer Dereference. */
1101 auto_diagnostic_group d;
1102 diagnostic_metadata m;
1103 m.add_cwe (476);
1104
1105 bool warned;
1106 if (zerop (m_arg))
1107 warned = warning_meta (rich_loc, m, OPT_Wanalyzer_null_argument,
1108 "use of NULL where non-null expected");
1109 else
1110 warned = warning_meta (rich_loc, m, OPT_Wanalyzer_null_argument,
1111 "use of NULL %qE where non-null expected",
1112 m_arg);
1113 if (warned)
1114 inform_nonnull_attribute (m_fndecl, m_arg_idx);
1115 return warned;
1116 }
1117
1118 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
1119 {
1120 label_text arg_desc = describe_argument_index (m_fndecl, m_arg_idx);
1121 label_text result;
1122 if (zerop (ev.m_expr))
1123 result = ev.formatted_print ("argument %s NULL where non-null expected",
1124 arg_desc.m_buffer);
1125 else
1126 result = ev.formatted_print ("argument %s (%qE) NULL"
1127 " where non-null expected",
1128 arg_desc.m_buffer, ev.m_expr);
1129 arg_desc.maybe_free ();
1130 return result;
1131 }
1132
1133 private:
1134 tree m_fndecl;
1135 int m_arg_idx;
1136 };
1137
1138 class use_after_free : public malloc_diagnostic
1139 {
1140 public:
1141 use_after_free (const malloc_state_machine &sm, tree arg,
1142 const deallocator *deallocator)
1143 : malloc_diagnostic (sm, arg),
1144 m_deallocator (deallocator)
1145 {
1146 gcc_assert (deallocator);
1147 }
1148
1149 const char *get_kind () const FINAL OVERRIDE { return "use_after_free"; }
1150
1151 bool emit (rich_location *rich_loc) FINAL OVERRIDE
1152 {
1153 /* CWE-416: Use After Free. */
1154 diagnostic_metadata m;
1155 m.add_cwe (416);
1156 return warning_meta (rich_loc, m, OPT_Wanalyzer_use_after_free,
1157 "use after %<%s%> of %qE",
1158 m_deallocator->m_name, m_arg);
1159 }
1160
1161 label_text describe_state_change (const evdesc::state_change &change)
1162 FINAL OVERRIDE
1163 {
1164 if (freed_p (change.m_new_state))
1165 {
1166 m_free_event = change.m_event_id;
1167 switch (m_deallocator->m_wording)
1168 {
1169 default:
1170 case WORDING_REALLOCATED:
1171 gcc_unreachable ();
1172 case WORDING_FREED:
1173 return label_text::borrow ("freed here");
1174 case WORDING_DELETED:
1175 return label_text::borrow ("deleted here");
1176 case WORDING_DEALLOCATED:
1177 return label_text::borrow ("deallocated here");
1178 }
1179 }
1180 return malloc_diagnostic::describe_state_change (change);
1181 }
1182
1183 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
1184 {
1185 const char *funcname = m_deallocator->m_name;
1186 if (m_free_event.known_p ())
1187 switch (m_deallocator->m_wording)
1188 {
1189 default:
1190 case WORDING_REALLOCATED:
1191 gcc_unreachable ();
1192 case WORDING_FREED:
1193 return ev.formatted_print ("use after %<%s%> of %qE; freed at %@",
1194 funcname, ev.m_expr, &m_free_event);
1195 case WORDING_DELETED:
1196 return ev.formatted_print ("use after %<%s%> of %qE; deleted at %@",
1197 funcname, ev.m_expr, &m_free_event);
1198 case WORDING_DEALLOCATED:
1199 return ev.formatted_print ("use after %<%s%> of %qE;"
1200 " deallocated at %@",
1201 funcname, ev.m_expr, &m_free_event);
1202 }
1203 else
1204 return ev.formatted_print ("use after %<%s%> of %qE",
1205 funcname, ev.m_expr);
1206 }
1207
1208 /* Implementation of pending_diagnostic::supercedes_p for
1209 use_after_free.
1210
1211 We want use-after-free to supercede use-of-unitialized-value,
1212 so that if we have these at the same stmt, we don't emit
1213 a use-of-uninitialized, just the use-after-free.
1214 (this is because we fully purge information about freed
1215 buffers when we free them to avoid state explosions, so
1216 that if they are accessed after the free, it looks like
1217 they are uninitialized). */
1218
1219 bool supercedes_p (const pending_diagnostic &other) const FINAL OVERRIDE
1220 {
1221 if (other.use_of_uninit_p ())
1222 return true;
1223
1224 return false;
1225 }
1226
1227 private:
1228 diagnostic_event_id_t m_free_event;
1229 const deallocator *m_deallocator;
1230 };
1231
1232 class malloc_leak : public malloc_diagnostic
1233 {
1234 public:
1235 malloc_leak (const malloc_state_machine &sm, tree arg)
1236 : malloc_diagnostic (sm, arg) {}
1237
1238 const char *get_kind () const FINAL OVERRIDE { return "malloc_leak"; }
1239
1240 bool emit (rich_location *rich_loc) FINAL OVERRIDE
1241 {
1242 diagnostic_metadata m;
1243 m.add_cwe (401);
1244 if (m_arg)
1245 return warning_meta (rich_loc, m, OPT_Wanalyzer_malloc_leak,
1246 "leak of %qE", m_arg);
1247 else
1248 return warning_meta (rich_loc, m, OPT_Wanalyzer_malloc_leak,
1249 "leak of %qs", "<unknown>");
1250 }
1251
1252 label_text describe_state_change (const evdesc::state_change &change)
1253 FINAL OVERRIDE
1254 {
1255 if (unchecked_p (change.m_new_state)
1256 || (start_p (change.m_old_state) && nonnull_p (change.m_new_state)))
1257 {
1258 m_alloc_event = change.m_event_id;
1259 return label_text::borrow ("allocated here");
1260 }
1261 return malloc_diagnostic::describe_state_change (change);
1262 }
1263
1264 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
1265 {
1266 if (ev.m_expr)
1267 {
1268 if (m_alloc_event.known_p ())
1269 return ev.formatted_print ("%qE leaks here; was allocated at %@",
1270 ev.m_expr, &m_alloc_event);
1271 else
1272 return ev.formatted_print ("%qE leaks here", ev.m_expr);
1273 }
1274 else
1275 {
1276 if (m_alloc_event.known_p ())
1277 return ev.formatted_print ("%qs leaks here; was allocated at %@",
1278 "<unknown>", &m_alloc_event);
1279 else
1280 return ev.formatted_print ("%qs leaks here", "<unknown>");
1281 }
1282 }
1283
1284 private:
1285 diagnostic_event_id_t m_alloc_event;
1286 };
1287
1288 class free_of_non_heap : public malloc_diagnostic
1289 {
1290 public:
1291 free_of_non_heap (const malloc_state_machine &sm, tree arg,
1292 const char *funcname)
1293 : malloc_diagnostic (sm, arg), m_funcname (funcname), m_kind (KIND_UNKNOWN)
1294 {
1295 }
1296
1297 const char *get_kind () const FINAL OVERRIDE { return "free_of_non_heap"; }
1298
1299 bool subclass_equal_p (const pending_diagnostic &base_other) const
1300 FINAL OVERRIDE
1301 {
1302 const free_of_non_heap &other = (const free_of_non_heap &)base_other;
1303 return (same_tree_p (m_arg, other.m_arg) && m_kind == other.m_kind);
1304 }
1305
1306 bool emit (rich_location *rich_loc) FINAL OVERRIDE
1307 {
1308 auto_diagnostic_group d;
1309 diagnostic_metadata m;
1310 m.add_cwe (590); /* CWE-590: Free of Memory not on the Heap. */
1311 switch (m_kind)
1312 {
1313 default:
1314 gcc_unreachable ();
1315 case KIND_UNKNOWN:
1316 return warning_meta (rich_loc, m, OPT_Wanalyzer_free_of_non_heap,
1317 "%<%s%> of %qE which points to memory"
1318 " not on the heap",
1319 m_funcname, m_arg);
1320 break;
1321 case KIND_ALLOCA:
1322 return warning_meta (rich_loc, m, OPT_Wanalyzer_free_of_non_heap,
1323 "%<%s%> of memory allocated on the stack by"
1324 " %qs (%qE) will corrupt the heap",
1325 m_funcname, "alloca", m_arg);
1326 break;
1327 }
1328 }
1329
1330 label_text describe_state_change (const evdesc::state_change &change)
1331 FINAL OVERRIDE
1332 {
1333 /* Attempt to reconstruct what kind of pointer it is.
1334 (It seems neater for this to be a part of the state, though). */
1335 if (change.m_expr && TREE_CODE (change.m_expr) == SSA_NAME)
1336 {
1337 gimple *def_stmt = SSA_NAME_DEF_STMT (change.m_expr);
1338 if (gcall *call = dyn_cast <gcall *> (def_stmt))
1339 {
1340 if (is_special_named_call_p (call, "alloca", 1)
1341 || is_special_named_call_p (call, "__builtin_alloca", 1))
1342 {
1343 m_kind = KIND_ALLOCA;
1344 return label_text::borrow
1345 ("memory is allocated on the stack here");
1346 }
1347 }
1348 }
1349 return label_text::borrow ("pointer is from here");
1350 }
1351
1352 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
1353 {
1354 return ev.formatted_print ("call to %qs here", m_funcname);
1355 }
1356
1357 private:
1358 enum kind
1359 {
1360 KIND_UNKNOWN,
1361 KIND_ALLOCA
1362 };
1363 const char *m_funcname;
1364 enum kind m_kind;
1365 };
1366
1367 /* struct allocation_state : public state_machine::state. */
1368
1369 /* Implementation of state_machine::state::dump_to_pp vfunc
1370 for allocation_state: append the API that this allocation is
1371 associated with. */
1372
1373 void
1374 allocation_state::dump_to_pp (pretty_printer *pp) const
1375 {
1376 state_machine::state::dump_to_pp (pp);
1377 if (m_deallocators)
1378 {
1379 pp_string (pp, " (");
1380 m_deallocators->dump_to_pp (pp);
1381 pp_character (pp, ')');
1382 }
1383 }
1384
1385 /* Given a allocation_state for a deallocator_set, get the "nonnull" state
1386 for the corresponding allocator(s). */
1387
1388 const allocation_state *
1389 allocation_state::get_nonnull () const
1390 {
1391 gcc_assert (m_deallocators);
1392 return as_a_allocation_state (m_deallocators->m_nonnull);
1393 }
1394
1395 /* malloc_state_machine's ctor. */
1396
1397 malloc_state_machine::malloc_state_machine (logger *logger)
1398 : state_machine ("malloc", logger),
1399 m_free (this, "free", WORDING_FREED),
1400 m_scalar_delete (this, "delete", WORDING_DELETED),
1401 m_vector_delete (this, "delete[]", WORDING_DELETED),
1402 m_realloc (this, "realloc", WORDING_REALLOCATED)
1403 {
1404 gcc_assert (m_start->get_id () == 0);
1405 m_null = add_state ("null", RS_FREED, NULL, NULL);
1406 m_non_heap = add_state ("non-heap", RS_NON_HEAP, NULL, NULL);
1407 m_stop = add_state ("stop", RS_STOP, NULL, NULL);
1408 }
1409
1410 malloc_state_machine::~malloc_state_machine ()
1411 {
1412 unsigned i;
1413 custom_deallocator_set *set;
1414 FOR_EACH_VEC_ELT (m_dynamic_sets, i, set)
1415 delete set;
1416 custom_deallocator *d;
1417 FOR_EACH_VEC_ELT (m_dynamic_deallocators, i, d)
1418 delete d;
1419 }
1420
1421 state_machine::state_t
1422 malloc_state_machine::add_state (const char *name, enum resource_state rs,
1423 const deallocator_set *deallocators,
1424 const deallocator *deallocator)
1425 {
1426 return add_custom_state (new allocation_state (name, alloc_state_id (),
1427 rs, deallocators,
1428 deallocator));
1429 }
1430
1431 /* If ALLOCATOR_FNDECL has any "__attribute__((malloc(FOO)))",
1432 return a custom_deallocator_set for them, consolidating them
1433 to ensure uniqueness of the sets.
1434
1435 Return NULL if it has no such attributes. */
1436
1437 const custom_deallocator_set *
1438 malloc_state_machine::
1439 get_or_create_custom_deallocator_set (tree allocator_fndecl)
1440 {
1441 /* Early rejection of decls without attributes. */
1442 tree attrs = DECL_ATTRIBUTES (allocator_fndecl);
1443 if (!attrs)
1444 return NULL;
1445
1446 /* Otherwise, call maybe_create_custom_deallocator_set,
1447 memoizing the result. */
1448 if (custom_deallocator_set **slot
1449 = m_custom_deallocator_set_cache.get (allocator_fndecl))
1450 return *slot;
1451 custom_deallocator_set *set
1452 = maybe_create_custom_deallocator_set (allocator_fndecl);
1453 m_custom_deallocator_set_cache.put (allocator_fndecl, set);
1454 return set;
1455 }
1456
1457 /* Given ALLOCATOR_FNDECL, a FUNCTION_DECL with attributes,
1458 look for any "__attribute__((malloc(FOO)))" and return a
1459 custom_deallocator_set for them, consolidating them
1460 to ensure uniqueness of the sets.
1461
1462 Return NULL if it has no such attributes.
1463
1464 Subroutine of get_or_create_custom_deallocator_set which
1465 memoizes the result. */
1466
1467 custom_deallocator_set *
1468 malloc_state_machine::
1469 maybe_create_custom_deallocator_set (tree allocator_fndecl)
1470 {
1471 tree attrs = DECL_ATTRIBUTES (allocator_fndecl);
1472 gcc_assert (attrs);
1473
1474 /* Look for instances of __attribute__((malloc(FOO))). */
1475 auto_vec<const deallocator *> deallocator_vec;
1476 for (tree allocs = attrs;
1477 (allocs = lookup_attribute ("malloc", allocs));
1478 allocs = TREE_CHAIN (allocs))
1479 {
1480 tree args = TREE_VALUE (allocs);
1481 if (!args)
1482 continue;
1483 if (TREE_VALUE (args))
1484 {
1485 const deallocator *d
1486 = get_or_create_deallocator (TREE_VALUE (args));
1487 deallocator_vec.safe_push (d);
1488 }
1489 }
1490
1491 /* If there weren't any deallocators, bail. */
1492 if (deallocator_vec.length () == 0)
1493 return NULL;
1494
1495 /* Consolidate, so that we reuse existing deallocator_set
1496 instances. */
1497 deallocator_vec.qsort (deallocator::cmp_ptr_ptr);
1498 custom_deallocator_set **slot
1499 = m_custom_deallocator_set_map.get (&deallocator_vec);
1500 if (slot)
1501 return *slot;
1502 custom_deallocator_set *set
1503 = new custom_deallocator_set (this, &deallocator_vec, WORDING_DEALLOCATED);
1504 m_custom_deallocator_set_map.put (&set->m_deallocator_vec, set);
1505 m_dynamic_sets.safe_push (set);
1506 return set;
1507 }
1508
1509 /* Get the deallocator for DEALLOCATOR_FNDECL, creating it if necessary. */
1510
1511 const deallocator *
1512 malloc_state_machine::get_or_create_deallocator (tree deallocator_fndecl)
1513 {
1514 deallocator **slot = m_deallocator_map.get (deallocator_fndecl);
1515 if (slot)
1516 return *slot;
1517
1518 /* Reuse "free". */
1519 deallocator *d;
1520 if (is_named_call_p (deallocator_fndecl, "free")
1521 || is_std_named_call_p (deallocator_fndecl, "free")
1522 || is_named_call_p (deallocator_fndecl, "__builtin_free"))
1523 d = &m_free.m_deallocator;
1524 else
1525 {
1526 custom_deallocator *cd
1527 = new custom_deallocator (this, deallocator_fndecl,
1528 WORDING_DEALLOCATED);
1529 m_dynamic_deallocators.safe_push (cd);
1530 d = cd;
1531 }
1532 m_deallocator_map.put (deallocator_fndecl, d);
1533 return d;
1534 }
1535
1536 /* Try to identify the function declaration either by name or as a known malloc
1537 builtin. */
1538
1539 static bool
1540 known_allocator_p (const_tree fndecl, const gcall *call)
1541 {
1542 /* Either it is a function we know by name and number of arguments... */
1543 if (is_named_call_p (fndecl, "malloc", call, 1)
1544 || is_named_call_p (fndecl, "calloc", call, 2)
1545 || is_std_named_call_p (fndecl, "malloc", call, 1)
1546 || is_std_named_call_p (fndecl, "calloc", call, 2)
1547 || is_named_call_p (fndecl, "strdup", call, 1)
1548 || is_named_call_p (fndecl, "strndup", call, 2))
1549 return true;
1550
1551 /* ... or it is a builtin allocator that allocates objects freed with
1552 __builtin_free. */
1553 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1554 switch (DECL_FUNCTION_CODE (fndecl))
1555 {
1556 case BUILT_IN_MALLOC:
1557 case BUILT_IN_CALLOC:
1558 case BUILT_IN_STRDUP:
1559 case BUILT_IN_STRNDUP:
1560 return true;
1561 default:
1562 break;
1563 }
1564
1565 return false;
1566 }
1567
1568 /* Implementation of state_machine::on_stmt vfunc for malloc_state_machine. */
1569
1570 bool
1571 malloc_state_machine::on_stmt (sm_context *sm_ctxt,
1572 const supernode *node,
1573 const gimple *stmt) const
1574 {
1575 if (const gcall *call = dyn_cast <const gcall *> (stmt))
1576 if (tree callee_fndecl = sm_ctxt->get_fndecl_for_call (call))
1577 {
1578 if (known_allocator_p (callee_fndecl, call))
1579 {
1580 on_allocator_call (sm_ctxt, call, &m_free);
1581 return true;
1582 }
1583
1584 if (is_named_call_p (callee_fndecl, "operator new", call, 1))
1585 on_allocator_call (sm_ctxt, call, &m_scalar_delete);
1586 else if (is_named_call_p (callee_fndecl, "operator new []", call, 1))
1587 on_allocator_call (sm_ctxt, call, &m_vector_delete);
1588 else if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
1589 || is_named_call_p (callee_fndecl, "operator delete", call, 2))
1590 {
1591 on_deallocator_call (sm_ctxt, node, call,
1592 &m_scalar_delete.m_deallocator, 0);
1593 return true;
1594 }
1595 else if (is_named_call_p (callee_fndecl, "operator delete []", call, 1))
1596 {
1597 on_deallocator_call (sm_ctxt, node, call,
1598 &m_vector_delete.m_deallocator, 0);
1599 return true;
1600 }
1601
1602 if (is_named_call_p (callee_fndecl, "alloca", call, 1)
1603 || is_named_call_p (callee_fndecl, "__builtin_alloca", call, 1))
1604 {
1605 tree lhs = gimple_call_lhs (call);
1606 if (lhs)
1607 sm_ctxt->on_transition (node, stmt, lhs, m_start, m_non_heap);
1608 return true;
1609 }
1610
1611 if (is_named_call_p (callee_fndecl, "free", call, 1)
1612 || is_std_named_call_p (callee_fndecl, "free", call, 1)
1613 || is_named_call_p (callee_fndecl, "__builtin_free", call, 1))
1614 {
1615 on_deallocator_call (sm_ctxt, node, call,
1616 &m_free.m_deallocator, 0);
1617 return true;
1618 }
1619
1620 if (is_named_call_p (callee_fndecl, "realloc", call, 2)
1621 || is_named_call_p (callee_fndecl, "__builtin_realloc", call, 2))
1622 {
1623 on_realloc_call (sm_ctxt, node, call);
1624 return true;
1625 }
1626
1627 if (unaffected_by_call_p (callee_fndecl))
1628 return true;
1629
1630 /* Cast away const-ness for cache-like operations. */
1631 malloc_state_machine *mutable_this
1632 = const_cast <malloc_state_machine *> (this);
1633
1634 /* Handle "__attribute__((malloc(FOO)))". */
1635 if (const deallocator_set *deallocators
1636 = mutable_this->get_or_create_custom_deallocator_set
1637 (callee_fndecl))
1638 {
1639 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee_fndecl));
1640 bool returns_nonnull
1641 = lookup_attribute ("returns_nonnull", attrs);
1642 on_allocator_call (sm_ctxt, call, deallocators, returns_nonnull);
1643 }
1644
1645 /* Handle "__attribute__((nonnull))". */
1646 {
1647 tree fntype = TREE_TYPE (callee_fndecl);
1648 bitmap nonnull_args = get_nonnull_args (fntype);
1649 if (nonnull_args)
1650 {
1651 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
1652 {
1653 tree arg = gimple_call_arg (stmt, i);
1654 if (TREE_CODE (TREE_TYPE (arg)) != POINTER_TYPE)
1655 continue;
1656 /* If we have a nonnull-args, and either all pointers, or just
1657 the specified pointers. */
1658 if (bitmap_empty_p (nonnull_args)
1659 || bitmap_bit_p (nonnull_args, i))
1660 {
1661 state_t state = sm_ctxt->get_state (stmt, arg);
1662 /* Can't use a switch as the states are non-const. */
1663 if (unchecked_p (state))
1664 {
1665 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1666 sm_ctxt->warn (node, stmt, arg,
1667 new possible_null_arg (*this, diag_arg,
1668 callee_fndecl,
1669 i));
1670 const allocation_state *astate
1671 = as_a_allocation_state (state);
1672 sm_ctxt->set_next_state (stmt, arg,
1673 astate->get_nonnull ());
1674 }
1675 else if (state == m_null)
1676 {
1677 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1678 sm_ctxt->warn (node, stmt, arg,
1679 new null_arg (*this, diag_arg,
1680 callee_fndecl, i));
1681 sm_ctxt->set_next_state (stmt, arg, m_stop);
1682 }
1683 }
1684 }
1685 BITMAP_FREE (nonnull_args);
1686 }
1687 }
1688
1689 /* Check for this after nonnull, so that if we have both
1690 then we transition to "freed", rather than "checked". */
1691 unsigned dealloc_argno = fndecl_dealloc_argno (callee_fndecl);
1692 if (dealloc_argno != UINT_MAX)
1693 {
1694 const deallocator *d
1695 = mutable_this->get_or_create_deallocator (callee_fndecl);
1696 on_deallocator_call (sm_ctxt, node, call, d, dealloc_argno);
1697 }
1698 }
1699
1700 if (tree lhs = sm_ctxt->is_zero_assignment (stmt))
1701 if (any_pointer_p (lhs))
1702 on_zero_assignment (sm_ctxt, stmt,lhs);
1703
1704 /* If we have "LHS = &EXPR;" and EXPR is something other than a MEM_REF,
1705 transition LHS from start to non_heap.
1706 Doing it for ADDR_EXPR(MEM_REF()) is likely wrong, and can lead to
1707 unbounded chains of unmergeable sm-state on pointer arithmetic in loops
1708 when optimization is enabled. */
1709 if (const gassign *assign_stmt = dyn_cast <const gassign *> (stmt))
1710 {
1711 enum tree_code op = gimple_assign_rhs_code (assign_stmt);
1712 if (op == ADDR_EXPR)
1713 {
1714 tree lhs = gimple_assign_lhs (assign_stmt);
1715 if (lhs)
1716 {
1717 tree addr_expr = gimple_assign_rhs1 (assign_stmt);
1718 if (TREE_CODE (TREE_OPERAND (addr_expr, 0)) != MEM_REF)
1719 sm_ctxt->on_transition (node, stmt, lhs, m_start, m_non_heap);
1720 }
1721 }
1722 }
1723
1724 /* Handle dereferences. */
1725 for (unsigned i = 0; i < gimple_num_ops (stmt); i++)
1726 {
1727 tree op = gimple_op (stmt, i);
1728 if (!op)
1729 continue;
1730 if (TREE_CODE (op) == COMPONENT_REF)
1731 op = TREE_OPERAND (op, 0);
1732
1733 if (TREE_CODE (op) == MEM_REF)
1734 {
1735 tree arg = TREE_OPERAND (op, 0);
1736
1737 state_t state = sm_ctxt->get_state (stmt, arg);
1738 if (unchecked_p (state))
1739 {
1740 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1741 sm_ctxt->warn (node, stmt, arg,
1742 new possible_null_deref (*this, diag_arg));
1743 const allocation_state *astate = as_a_allocation_state (state);
1744 sm_ctxt->set_next_state (stmt, arg, astate->get_nonnull ());
1745 }
1746 else if (state == m_null)
1747 {
1748 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1749 sm_ctxt->warn (node, stmt, arg,
1750 new null_deref (*this, diag_arg));
1751 sm_ctxt->set_next_state (stmt, arg, m_stop);
1752 }
1753 else if (freed_p (state))
1754 {
1755 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1756 const allocation_state *astate = as_a_allocation_state (state);
1757 sm_ctxt->warn (node, stmt, arg,
1758 new use_after_free (*this, diag_arg,
1759 astate->m_deallocator));
1760 sm_ctxt->set_next_state (stmt, arg, m_stop);
1761 }
1762 }
1763 }
1764 return false;
1765 }
1766
1767 /* Handle a call to an allocator.
1768 RETURNS_NONNULL is true if CALL is to a fndecl known to have
1769 __attribute__((returns_nonnull)). */
1770
1771 void
1772 malloc_state_machine::on_allocator_call (sm_context *sm_ctxt,
1773 const gcall *call,
1774 const deallocator_set *deallocators,
1775 bool returns_nonnull) const
1776 {
1777 tree lhs = gimple_call_lhs (call);
1778 if (lhs)
1779 {
1780 if (sm_ctxt->get_state (call, lhs) == m_start)
1781 sm_ctxt->set_next_state (call, lhs,
1782 (returns_nonnull
1783 ? deallocators->m_nonnull
1784 : deallocators->m_unchecked));
1785 }
1786 else
1787 {
1788 /* TODO: report leak. */
1789 }
1790 }
1791
1792 void
1793 malloc_state_machine::on_deallocator_call (sm_context *sm_ctxt,
1794 const supernode *node,
1795 const gcall *call,
1796 const deallocator *d,
1797 unsigned argno) const
1798 {
1799 if (argno >= gimple_call_num_args (call))
1800 return;
1801 tree arg = gimple_call_arg (call, argno);
1802
1803 state_t state = sm_ctxt->get_state (call, arg);
1804
1805 /* start/unchecked/nonnull -> freed. */
1806 if (state == m_start)
1807 sm_ctxt->set_next_state (call, arg, d->m_freed);
1808 else if (unchecked_p (state) || nonnull_p (state))
1809 {
1810 const allocation_state *astate = as_a_allocation_state (state);
1811 gcc_assert (astate->m_deallocators);
1812 if (!astate->m_deallocators->contains_p (d))
1813 {
1814 /* Wrong allocator. */
1815 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1816 pending_diagnostic *pd
1817 = new mismatching_deallocation (*this, diag_arg,
1818 astate->m_deallocators,
1819 d);
1820 sm_ctxt->warn (node, call, arg, pd);
1821 }
1822 sm_ctxt->set_next_state (call, arg, d->m_freed);
1823 }
1824
1825 /* Keep state "null" as-is, rather than transitioning to "freed";
1826 we don't want to complain about double-free of NULL. */
1827 else if (state == d->m_freed)
1828 {
1829 /* freed -> stop, with warning. */
1830 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1831 sm_ctxt->warn (node, call, arg,
1832 new double_free (*this, diag_arg, d->m_name));
1833 sm_ctxt->set_next_state (call, arg, m_stop);
1834 }
1835 else if (state == m_non_heap)
1836 {
1837 /* non-heap -> stop, with warning. */
1838 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1839 sm_ctxt->warn (node, call, arg,
1840 new free_of_non_heap (*this, diag_arg,
1841 d->m_name));
1842 sm_ctxt->set_next_state (call, arg, m_stop);
1843 }
1844 }
1845
1846 /* Handle a call to "realloc".
1847 Check for free of non-heap or mismatching allocators,
1848 transitioning to the "stop" state for such cases.
1849
1850 Otherwise, region_model::impl_call_realloc will later
1851 get called (which will handle other sm-state transitions
1852 when the state is bifurcated). */
1853
1854 void
1855 malloc_state_machine::on_realloc_call (sm_context *sm_ctxt,
1856 const supernode *node,
1857 const gcall *call) const
1858 {
1859 const unsigned argno = 0;
1860 const deallocator *d = &m_realloc;
1861
1862 tree arg = gimple_call_arg (call, argno);
1863
1864 state_t state = sm_ctxt->get_state (call, arg);
1865
1866 if (unchecked_p (state) || nonnull_p (state))
1867 {
1868 const allocation_state *astate = as_a_allocation_state (state);
1869 gcc_assert (astate->m_deallocators);
1870 if (!astate->m_deallocators->contains_p (&m_free.m_deallocator))
1871 {
1872 /* Wrong allocator. */
1873 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1874 pending_diagnostic *pd
1875 = new mismatching_deallocation (*this, diag_arg,
1876 astate->m_deallocators,
1877 d);
1878 sm_ctxt->warn (node, call, arg, pd);
1879 sm_ctxt->set_next_state (call, arg, m_stop);
1880 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
1881 path_ctxt->terminate_path ();
1882 }
1883 }
1884 else if (state == m_free.m_deallocator.m_freed)
1885 {
1886 /* freed -> stop, with warning. */
1887 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1888 sm_ctxt->warn (node, call, arg,
1889 new double_free (*this, diag_arg, "free"));
1890 sm_ctxt->set_next_state (call, arg, m_stop);
1891 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
1892 path_ctxt->terminate_path ();
1893 }
1894 else if (state == m_non_heap)
1895 {
1896 /* non-heap -> stop, with warning. */
1897 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1898 sm_ctxt->warn (node, call, arg,
1899 new free_of_non_heap (*this, diag_arg,
1900 d->m_name));
1901 sm_ctxt->set_next_state (call, arg, m_stop);
1902 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
1903 path_ctxt->terminate_path ();
1904 }
1905 }
1906
1907 /* Implementation of state_machine::on_phi vfunc for malloc_state_machine. */
1908
1909 void
1910 malloc_state_machine::on_phi (sm_context *sm_ctxt,
1911 const supernode *node ATTRIBUTE_UNUSED,
1912 const gphi *phi,
1913 tree rhs) const
1914 {
1915 if (zerop (rhs))
1916 {
1917 tree lhs = gimple_phi_result (phi);
1918 on_zero_assignment (sm_ctxt, phi, lhs);
1919 }
1920 }
1921
1922 /* Implementation of state_machine::on_condition vfunc for malloc_state_machine.
1923 Potentially transition state 'unchecked' to 'nonnull' or to 'null'. */
1924
1925 void
1926 malloc_state_machine::on_condition (sm_context *sm_ctxt,
1927 const supernode *node ATTRIBUTE_UNUSED,
1928 const gimple *stmt,
1929 const svalue *lhs,
1930 enum tree_code op,
1931 const svalue *rhs) const
1932 {
1933 if (!rhs->all_zeroes_p ())
1934 return;
1935
1936 if (!any_pointer_p (lhs))
1937 return;
1938 if (!any_pointer_p (rhs))
1939 return;
1940
1941 if (op == NE_EXPR)
1942 {
1943 log ("got 'ARG != 0' match");
1944 state_t s = sm_ctxt->get_state (stmt, lhs);
1945 if (unchecked_p (s))
1946 {
1947 const allocation_state *astate = as_a_allocation_state (s);
1948 sm_ctxt->set_next_state (stmt, lhs, astate->get_nonnull ());
1949 }
1950 }
1951 else if (op == EQ_EXPR)
1952 {
1953 log ("got 'ARG == 0' match");
1954 state_t s = sm_ctxt->get_state (stmt, lhs);
1955 if (unchecked_p (s))
1956 sm_ctxt->set_next_state (stmt, lhs, m_null);
1957 }
1958 }
1959
1960 /* Implementation of state_machine::can_purge_p vfunc for malloc_state_machine.
1961 Don't allow purging of pointers in state 'unchecked' or 'nonnull'
1962 (to avoid false leak reports). */
1963
1964 bool
1965 malloc_state_machine::can_purge_p (state_t s) const
1966 {
1967 enum resource_state rs = get_rs (s);
1968 return rs != RS_UNCHECKED && rs != RS_NONNULL;
1969 }
1970
1971 /* Implementation of state_machine::on_leak vfunc for malloc_state_machine
1972 (for complaining about leaks of pointers in state 'unchecked' and
1973 'nonnull'). */
1974
1975 pending_diagnostic *
1976 malloc_state_machine::on_leak (tree var) const
1977 {
1978 return new malloc_leak (*this, var);
1979 }
1980
1981 /* Implementation of state_machine::reset_when_passed_to_unknown_fn_p vfunc
1982 for malloc_state_machine. */
1983
1984 bool
1985 malloc_state_machine::reset_when_passed_to_unknown_fn_p (state_t s,
1986 bool is_mutable) const
1987 {
1988 /* An on-stack ptr doesn't stop being stack-allocated when passed to an
1989 unknown fn. */
1990 if (s == m_non_heap)
1991 return false;
1992
1993 /* Otherwise, pointers passed as non-const can be freed. */
1994 return is_mutable;
1995 }
1996
1997 /* Return true if calls to FNDECL are known to not affect this sm-state. */
1998
1999 bool
2000 malloc_state_machine::unaffected_by_call_p (tree fndecl)
2001 {
2002 /* A set of functions that are known to not affect allocation
2003 status, even if we haven't fully modelled the rest of their
2004 behavior yet. */
2005 static const char * const funcnames[] = {
2006 /* This array must be kept sorted. */
2007 "strsep",
2008 };
2009 const size_t count
2010 = sizeof(funcnames) / sizeof (funcnames[0]);
2011 function_set fs (funcnames, count);
2012
2013 if (fs.contains_decl_p (fndecl))
2014 return true;
2015
2016 return false;
2017 }
2018
2019 /* Shared logic for handling GIMPLE_ASSIGNs and GIMPLE_PHIs that
2020 assign zero to LHS. */
2021
2022 void
2023 malloc_state_machine::on_zero_assignment (sm_context *sm_ctxt,
2024 const gimple *stmt,
2025 tree lhs) const
2026 {
2027 state_t s = sm_ctxt->get_state (stmt, lhs);
2028 enum resource_state rs = get_rs (s);
2029 if (rs == RS_START
2030 || rs == RS_UNCHECKED
2031 || rs == RS_NONNULL
2032 || rs == RS_FREED)
2033 sm_ctxt->set_next_state (stmt, lhs, m_null);
2034 }
2035
2036 /* Special-case hook for handling realloc, for the "success with move to
2037 a new buffer" case, marking OLD_PTR_SVAL as freed and NEW_PTR_SVAL as
2038 non-null.
2039
2040 This is similar to on_deallocator_call and on_allocator_call,
2041 but the checks happen in on_realloc_call, and by splitting the states. */
2042
2043 void
2044 malloc_state_machine::
2045 on_realloc_with_move (region_model *model,
2046 sm_state_map *smap,
2047 const svalue *old_ptr_sval,
2048 const svalue *new_ptr_sval,
2049 const extrinsic_state &ext_state) const
2050 {
2051 smap->set_state (model, old_ptr_sval,
2052 m_free.m_deallocator.m_freed,
2053 NULL, ext_state);
2054
2055 smap->set_state (model, new_ptr_sval,
2056 m_free.m_nonnull,
2057 NULL, ext_state);
2058 }
2059
2060 } // anonymous namespace
2061
2062 /* Internal interface to this file. */
2063
2064 state_machine *
2065 make_malloc_state_machine (logger *logger)
2066 {
2067 return new malloc_state_machine (logger);
2068 }
2069
2070 /* Specialcase hook for handling realloc, for use by
2071 region_model::impl_call_realloc::success_with_move::update_model. */
2072
2073 void
2074 region_model::on_realloc_with_move (const call_details &cd,
2075 const svalue *old_ptr_sval,
2076 const svalue *new_ptr_sval)
2077 {
2078 region_model_context *ctxt = cd.get_ctxt ();
2079 if (!ctxt)
2080 return;
2081 const extrinsic_state *ext_state = ctxt->get_ext_state ();
2082 if (!ext_state)
2083 return;
2084
2085 sm_state_map *smap;
2086 const state_machine *sm;
2087 unsigned sm_idx;
2088 if (!ctxt->get_malloc_map (&smap, &sm, &sm_idx))
2089 return;
2090
2091 gcc_assert (smap);
2092 gcc_assert (sm);
2093
2094 const malloc_state_machine &malloc_sm
2095 = (const malloc_state_machine &)*sm;
2096
2097 malloc_sm.on_realloc_with_move (this,
2098 smap,
2099 old_ptr_sval,
2100 new_ptr_sval,
2101 *ext_state);
2102 }
2103
2104 } // namespace ana
2105
2106 #endif /* #if ENABLE_ANALYZER */