]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/analyzer/sm-malloc.cc
Update copyright years.
[thirdparty/gcc.git] / gcc / analyzer / sm-malloc.cc
1 /* A state machine for detecting misuses of the malloc/free API.
2 Copyright (C) 2019-2023 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #define INCLUDE_MEMORY
23 #include "system.h"
24 #include "coretypes.h"
25 #include "make-unique.h"
26 #include "tree.h"
27 #include "function.h"
28 #include "basic-block.h"
29 #include "gimple.h"
30 #include "options.h"
31 #include "bitmap.h"
32 #include "diagnostic-path.h"
33 #include "diagnostic-metadata.h"
34 #include "analyzer/analyzer.h"
35 #include "diagnostic-event-id.h"
36 #include "analyzer/analyzer-logging.h"
37 #include "analyzer/sm.h"
38 #include "analyzer/pending-diagnostic.h"
39 #include "analyzer/call-string.h"
40 #include "analyzer/program-point.h"
41 #include "analyzer/store.h"
42 #include "analyzer/region-model.h"
43 #include "analyzer/call-details.h"
44 #include "stringpool.h"
45 #include "attribs.h"
46 #include "analyzer/function-set.h"
47 #include "analyzer/program-state.h"
48
49 #if ENABLE_ANALYZER
50
51 namespace ana {
52
53 namespace {
54
55 /* This state machine and its various support classes track allocations
56 and deallocations.
57
58 It has a few standard allocation/deallocation pairs (e.g. new/delete),
59 and also supports user-defined ones via
60 __attribute__ ((malloc(DEALLOCATOR))).
61
62 There can be more than one valid deallocator for a given allocator,
63 for example:
64 __attribute__ ((malloc (fclose)))
65 __attribute__ ((malloc (freopen, 3)))
66 FILE* fopen (const char*, const char*);
67 A deallocator_set represents a particular set of valid deallocators.
68
69 We track the expected deallocator_set for a value, but not the allocation
70 function - there could be more than one allocator per deallocator_set.
71 For example, there could be dozens of allocators for "free" beyond just
72 malloc e.g. calloc, xstrdup, etc. We don't want to explode the number
73 of states by tracking individual allocators in the exploded graph;
74 we merely want to track "this value expects to have 'free' called on it".
75 Perhaps we can reconstruct which allocator was used later, when emitting
76 the path, if it's necessary for precision of wording of diagnostics. */
77
78 class deallocator;
79 class deallocator_set;
80 class malloc_state_machine;
81
82 /* An enum for discriminating between different kinds of allocation_state. */
83
84 enum resource_state
85 {
86 /* States that are independent of allocator/deallocator. */
87
88 /* The start state. */
89 RS_START,
90
91 /* State for a pointer that's been unconditionally dereferenced. */
92 RS_ASSUMED_NON_NULL,
93
94 /* State for a pointer that's known to be NULL. */
95 RS_NULL,
96
97 /* State for a pointer that's known to not be on the heap (e.g. to a local
98 or global). */
99 RS_NON_HEAP,
100
101 /* Stop state, for pointers we don't want to track any more. */
102 RS_STOP,
103
104 /* States that relate to a specific deallocator_set. */
105
106 /* State for a pointer returned from an allocator that hasn't
107 been checked for NULL.
108 It could be a pointer to heap-allocated memory, or could be NULL. */
109 RS_UNCHECKED,
110
111 /* State for a pointer returned from an allocator,
112 known to be non-NULL. */
113 RS_NONNULL,
114
115 /* State for a pointer passed to a deallocator. */
116 RS_FREED
117 };
118
119 /* Custom state subclass, which can optionally refer to an a
120 deallocator_set. */
121
122 struct allocation_state : public state_machine::state
123 {
124 allocation_state (const char *name, unsigned id,
125 enum resource_state rs,
126 const deallocator_set *deallocators,
127 const deallocator *deallocator)
128 : state (name, id), m_rs (rs),
129 m_deallocators (deallocators),
130 m_deallocator (deallocator)
131 {}
132
133 void dump_to_pp (pretty_printer *pp) const override;
134
135 const allocation_state *get_nonnull () const;
136
137 enum resource_state m_rs;
138 const deallocator_set *m_deallocators;
139 const deallocator *m_deallocator;
140 };
141
142 /* Custom state subclass, for the "assumed-non-null" state
143 where the assumption happens in a particular frame. */
144
145 struct assumed_non_null_state : public allocation_state
146 {
147 assumed_non_null_state (const char *name, unsigned id,
148 const frame_region *frame)
149 : allocation_state (name, id, RS_ASSUMED_NON_NULL,
150 NULL, NULL),
151 m_frame (frame)
152 {
153 gcc_assert (m_frame);
154 }
155
156 void dump_to_pp (pretty_printer *pp) const final override;
157
158 const frame_region *m_frame;
159 };
160
161 /* An enum for choosing which wording to use in various diagnostics
162 when describing deallocations. */
163
164 enum wording
165 {
166 WORDING_FREED,
167 WORDING_DELETED,
168 WORDING_DEALLOCATED,
169 WORDING_REALLOCATED
170 };
171
172 /* Base class representing a deallocation function,
173 either a built-in one we know about, or one exposed via
174 __attribute__((malloc(DEALLOCATOR))). */
175
176 struct deallocator
177 {
178 hashval_t hash () const;
179 void dump_to_pp (pretty_printer *pp) const;
180 static int cmp (const deallocator *a, const deallocator *b);
181 static int cmp_ptr_ptr (const void *, const void *);
182
183 /* Name to use in diagnostics. */
184 const char *m_name;
185
186 /* Which wording to use in diagnostics. */
187 enum wording m_wording;
188
189 /* State for a value passed to one of the deallocators. */
190 state_machine::state_t m_freed;
191
192 protected:
193 deallocator (malloc_state_machine *sm,
194 const char *name,
195 enum wording wording);
196 };
197
198 /* Subclass representing a predefined deallocator.
199 e.g. "delete []", without needing a specific FUNCTION_DECL
200 ahead of time. */
201
202 struct standard_deallocator : public deallocator
203 {
204 standard_deallocator (malloc_state_machine *sm,
205 const char *name,
206 enum wording wording);
207 };
208
209 /* Subclass representing a user-defined deallocator
210 via __attribute__((malloc(DEALLOCATOR))) given
211 a specific FUNCTION_DECL. */
212
213 struct custom_deallocator : public deallocator
214 {
215 custom_deallocator (malloc_state_machine *sm,
216 tree deallocator_fndecl,
217 enum wording wording)
218 : deallocator (sm, IDENTIFIER_POINTER (DECL_NAME (deallocator_fndecl)),
219 wording)
220 {
221 }
222 };
223
224 /* Base class representing a set of possible deallocators.
225 Often this will be just a single deallocator, but some
226 allocators have multiple valid deallocators (e.g. the result of
227 "fopen" can be closed by either "fclose" or "freopen"). */
228
229 struct deallocator_set
230 {
231 deallocator_set (malloc_state_machine *sm,
232 enum wording wording);
233 virtual ~deallocator_set () {}
234
235 virtual bool contains_p (const deallocator *d) const = 0;
236 virtual const deallocator *maybe_get_single () const = 0;
237 virtual void dump_to_pp (pretty_printer *pp) const = 0;
238 void dump () const;
239
240 /* Which wording to use in diagnostics. */
241 enum wording m_wording;
242
243 /* Pointers to states.
244 These states are owned by the state_machine base class. */
245
246 /* State for an unchecked result from an allocator using this set. */
247 state_machine::state_t m_unchecked;
248
249 /* State for a known non-NULL result from such an allocator. */
250 state_machine::state_t m_nonnull;
251 };
252
253 /* Subclass of deallocator_set representing a set of deallocators
254 defined by one or more __attribute__((malloc(DEALLOCATOR))). */
255
256 struct custom_deallocator_set : public deallocator_set
257 {
258 typedef const auto_vec <const deallocator *> *key_t;
259
260 custom_deallocator_set (malloc_state_machine *sm,
261 const auto_vec <const deallocator *> *vec,
262 //const char *name,
263 //const char *dealloc_funcname,
264 //unsigned arg_idx,
265 enum wording wording);
266
267 bool contains_p (const deallocator *d) const final override;
268 const deallocator *maybe_get_single () const final override;
269 void dump_to_pp (pretty_printer *pp) const final override;
270
271 auto_vec <const deallocator *> m_deallocator_vec;
272 };
273
274 /* Subclass of deallocator_set representing a set of deallocators
275 with a single standard_deallocator, e.g. "delete []". */
276
277 struct standard_deallocator_set : public deallocator_set
278 {
279 standard_deallocator_set (malloc_state_machine *sm,
280 const char *name,
281 enum wording wording);
282
283 bool contains_p (const deallocator *d) const final override;
284 const deallocator *maybe_get_single () const final override;
285 void dump_to_pp (pretty_printer *pp) const final override;
286
287 standard_deallocator m_deallocator;
288 };
289
290 /* Traits class for ensuring uniqueness of deallocator_sets within
291 malloc_state_machine. */
292
293 struct deallocator_set_map_traits
294 {
295 typedef custom_deallocator_set::key_t key_type;
296 typedef custom_deallocator_set *value_type;
297 typedef custom_deallocator_set *compare_type;
298
299 static inline hashval_t hash (const key_type &k)
300 {
301 gcc_assert (k != NULL);
302 gcc_assert (k != reinterpret_cast<key_type> (1));
303
304 hashval_t result = 0;
305 unsigned i;
306 const deallocator *d;
307 FOR_EACH_VEC_ELT (*k, i, d)
308 result ^= d->hash ();
309 return result;
310 }
311 static inline bool equal_keys (const key_type &k1, const key_type &k2)
312 {
313 if (k1->length () != k2->length ())
314 return false;
315
316 for (unsigned i = 0; i < k1->length (); i++)
317 if ((*k1)[i] != (*k2)[i])
318 return false;
319
320 return true;
321 }
322 template <typename T>
323 static inline void remove (T &)
324 {
325 /* empty; the nodes are handled elsewhere. */
326 }
327 template <typename T>
328 static inline void mark_deleted (T &entry)
329 {
330 entry.m_key = reinterpret_cast<key_type> (1);
331 }
332 template <typename T>
333 static inline void mark_empty (T &entry)
334 {
335 entry.m_key = NULL;
336 }
337 template <typename T>
338 static inline bool is_deleted (const T &entry)
339 {
340 return entry.m_key == reinterpret_cast<key_type> (1);
341 }
342 template <typename T>
343 static inline bool is_empty (const T &entry)
344 {
345 return entry.m_key == NULL;
346 }
347 static const bool empty_zero_p = false;
348 };
349
350 /* A state machine for detecting misuses of the malloc/free API.
351
352 See sm-malloc.dot for an overview (keep this in-sync with that file). */
353
354 class malloc_state_machine : public state_machine
355 {
356 public:
357 typedef allocation_state custom_data_t;
358
359 malloc_state_machine (logger *logger);
360 ~malloc_state_machine ();
361
362 state_t
363 add_state (const char *name, enum resource_state rs,
364 const deallocator_set *deallocators,
365 const deallocator *deallocator);
366
367 bool inherited_state_p () const final override { return false; }
368
369 state_machine::state_t
370 get_default_state (const svalue *sval) const final override
371 {
372 if (tree cst = sval->maybe_get_constant ())
373 {
374 if (zerop (cst))
375 return m_null;
376 }
377 if (const region_svalue *ptr = sval->dyn_cast_region_svalue ())
378 {
379 const region *reg = ptr->get_pointee ();
380 switch (reg->get_memory_space ())
381 {
382 default:
383 break;
384 case MEMSPACE_CODE:
385 case MEMSPACE_GLOBALS:
386 case MEMSPACE_STACK:
387 case MEMSPACE_READONLY_DATA:
388 return m_non_heap;
389 }
390 }
391 return m_start;
392 }
393
394 bool on_stmt (sm_context *sm_ctxt,
395 const supernode *node,
396 const gimple *stmt) const final override;
397
398 void on_phi (sm_context *sm_ctxt,
399 const supernode *node,
400 const gphi *phi,
401 tree rhs) const final override;
402
403 void on_condition (sm_context *sm_ctxt,
404 const supernode *node,
405 const gimple *stmt,
406 const svalue *lhs,
407 enum tree_code op,
408 const svalue *rhs) const final override;
409
410 void on_pop_frame (sm_state_map *smap,
411 const frame_region *) const final override;
412
413 bool can_purge_p (state_t s) const final override;
414 std::unique_ptr<pending_diagnostic> on_leak (tree var) const final override;
415
416 bool reset_when_passed_to_unknown_fn_p (state_t s,
417 bool is_mutable) const final override;
418
419 state_t
420 maybe_get_merged_states_nonequal (state_t state_a,
421 state_t state_b) const final override;
422
423 static bool unaffected_by_call_p (tree fndecl);
424
425 void maybe_assume_non_null (sm_context *sm_ctxt,
426 tree ptr,
427 const gimple *stmt) const;
428
429 void on_realloc_with_move (region_model *model,
430 sm_state_map *smap,
431 const svalue *old_ptr_sval,
432 const svalue *new_ptr_sval,
433 const extrinsic_state &ext_state) const;
434
435 standard_deallocator_set m_free;
436 standard_deallocator_set m_scalar_delete;
437 standard_deallocator_set m_vector_delete;
438
439 standard_deallocator m_realloc;
440
441 /* States that are independent of api. */
442
443 /* States for a pointer that's been unconditionally dereferenced
444 in a particular stack frame. */
445 hash_map<const frame_region *, state_t> m_assumed_non_null;
446
447 /* State for a pointer that's known to be NULL. */
448 state_t m_null;
449
450 /* State for a pointer that's known to not be on the heap (e.g. to a local
451 or global). */
452 state_t m_non_heap; // TODO: or should this be a different state machine?
453 // or do we need child values etc?
454
455 /* Stop state, for pointers we don't want to track any more. */
456 state_t m_stop;
457
458 private:
459 const custom_deallocator_set *
460 get_or_create_custom_deallocator_set (tree allocator_fndecl);
461 custom_deallocator_set *
462 maybe_create_custom_deallocator_set (tree allocator_fndecl);
463 const deallocator *
464 get_or_create_deallocator (tree deallocator_fndecl);
465
466 state_t
467 get_or_create_assumed_non_null_state_for_frame (const frame_region *frame);
468
469 void
470 maybe_complain_about_deref_before_check (sm_context *sm_ctxt,
471 const supernode *node,
472 const gimple *stmt,
473 const assumed_non_null_state *,
474 tree ptr) const;
475
476 void on_allocator_call (sm_context *sm_ctxt,
477 const gcall *call,
478 const deallocator_set *deallocators,
479 bool returns_nonnull = false) const;
480 void handle_free_of_non_heap (sm_context *sm_ctxt,
481 const supernode *node,
482 const gcall *call,
483 tree arg,
484 const deallocator *d) const;
485 void on_deallocator_call (sm_context *sm_ctxt,
486 const supernode *node,
487 const gcall *call,
488 const deallocator *d,
489 unsigned argno) const;
490 void on_realloc_call (sm_context *sm_ctxt,
491 const supernode *node,
492 const gcall *call) const;
493 void on_zero_assignment (sm_context *sm_ctxt,
494 const gimple *stmt,
495 tree lhs) const;
496
497 /* A map for consolidating deallocators so that they are
498 unique per deallocator FUNCTION_DECL. */
499 typedef hash_map<tree, deallocator *> deallocator_map_t;
500 deallocator_map_t m_deallocator_map;
501
502 /* Memoized lookups from FUNCTION_DECL to custom_deallocator_set *. */
503 typedef hash_map<tree, custom_deallocator_set *> deallocator_set_cache_t;
504 deallocator_set_cache_t m_custom_deallocator_set_cache;
505
506 /* A map for consolidating custom_deallocator_set instances. */
507 typedef hash_map<custom_deallocator_set::key_t,
508 custom_deallocator_set *,
509 deallocator_set_map_traits> custom_deallocator_set_map_t;
510 custom_deallocator_set_map_t m_custom_deallocator_set_map;
511
512 /* Record of dynamically-allocated objects, for cleanup. */
513 auto_vec <custom_deallocator_set *> m_dynamic_sets;
514 auto_vec <custom_deallocator *> m_dynamic_deallocators;
515 };
516
517 /* struct deallocator. */
518
519 deallocator::deallocator (malloc_state_machine *sm,
520 const char *name,
521 enum wording wording)
522 : m_name (name),
523 m_wording (wording),
524 m_freed (sm->add_state ("freed", RS_FREED, NULL, this))
525 {
526 }
527
528 hashval_t
529 deallocator::hash () const
530 {
531 return (hashval_t)m_freed->get_id ();
532 }
533
534 void
535 deallocator::dump_to_pp (pretty_printer *pp) const
536 {
537 pp_printf (pp, "%qs", m_name);
538 }
539
540 int
541 deallocator::cmp (const deallocator *a, const deallocator *b)
542 {
543 return (int)a->m_freed->get_id () - (int)b->m_freed->get_id ();
544 }
545
546 int
547 deallocator::cmp_ptr_ptr (const void *a, const void *b)
548 {
549 return cmp (*(const deallocator * const *)a,
550 *(const deallocator * const *)b);
551 }
552
553
554 /* struct standard_deallocator : public deallocator. */
555
556 standard_deallocator::standard_deallocator (malloc_state_machine *sm,
557 const char *name,
558 enum wording wording)
559 : deallocator (sm, name, wording)
560 {
561 }
562
563 /* struct deallocator_set. */
564
565 deallocator_set::deallocator_set (malloc_state_machine *sm,
566 enum wording wording)
567 : m_wording (wording),
568 m_unchecked (sm->add_state ("unchecked", RS_UNCHECKED, this, NULL)),
569 m_nonnull (sm->add_state ("nonnull", RS_NONNULL, this, NULL))
570 {
571 }
572
573 /* Dump a description of this deallocator_set to stderr. */
574
575 DEBUG_FUNCTION void
576 deallocator_set::dump () const
577 {
578 pretty_printer pp;
579 pp_show_color (&pp) = pp_show_color (global_dc->printer);
580 pp.buffer->stream = stderr;
581 dump_to_pp (&pp);
582 pp_newline (&pp);
583 pp_flush (&pp);
584 }
585
586 /* struct custom_deallocator_set : public deallocator_set. */
587
588 custom_deallocator_set::
589 custom_deallocator_set (malloc_state_machine *sm,
590 const auto_vec <const deallocator *> *vec,
591 enum wording wording)
592 : deallocator_set (sm, wording),
593 m_deallocator_vec (vec->length ())
594 {
595 unsigned i;
596 const deallocator *d;
597 FOR_EACH_VEC_ELT (*vec, i, d)
598 m_deallocator_vec.safe_push (d);
599 }
600
601 bool
602 custom_deallocator_set::contains_p (const deallocator *d) const
603 {
604 unsigned i;
605 const deallocator *cd;
606 FOR_EACH_VEC_ELT (m_deallocator_vec, i, cd)
607 if (cd == d)
608 return true;
609 return false;
610 }
611
612 const deallocator *
613 custom_deallocator_set::maybe_get_single () const
614 {
615 if (m_deallocator_vec.length () == 1)
616 return m_deallocator_vec[0];
617 return NULL;
618 }
619
620 void
621 custom_deallocator_set::dump_to_pp (pretty_printer *pp) const
622 {
623 pp_character (pp, '{');
624 unsigned i;
625 const deallocator *d;
626 FOR_EACH_VEC_ELT (m_deallocator_vec, i, d)
627 {
628 if (i > 0)
629 pp_string (pp, ", ");
630 d->dump_to_pp (pp);
631 }
632 pp_character (pp, '}');
633 }
634
635 /* struct standard_deallocator_set : public deallocator_set. */
636
637 standard_deallocator_set::standard_deallocator_set (malloc_state_machine *sm,
638 const char *name,
639 enum wording wording)
640 : deallocator_set (sm, wording),
641 m_deallocator (sm, name, wording)
642 {
643 }
644
645 bool
646 standard_deallocator_set::contains_p (const deallocator *d) const
647 {
648 return d == &m_deallocator;
649 }
650
651 const deallocator *
652 standard_deallocator_set::maybe_get_single () const
653 {
654 return &m_deallocator;
655 }
656
657 void
658 standard_deallocator_set::dump_to_pp (pretty_printer *pp) const
659 {
660 pp_character (pp, '{');
661 pp_string (pp, m_deallocator.m_name);
662 pp_character (pp, '}');
663 }
664
665 /* Return STATE cast to the custom state subclass, or NULL for the start state.
666 Everything should be an allocation_state apart from the start state. */
667
668 static const allocation_state *
669 dyn_cast_allocation_state (state_machine::state_t state)
670 {
671 if (state->get_id () == 0)
672 return NULL;
673 return static_cast <const allocation_state *> (state);
674 }
675
676 /* Return STATE cast to the custom state subclass, for a state that is
677 already known to not be the start state . */
678
679 static const allocation_state *
680 as_a_allocation_state (state_machine::state_t state)
681 {
682 gcc_assert (state->get_id () != 0);
683 return static_cast <const allocation_state *> (state);
684 }
685
686 /* Get the resource_state for STATE. */
687
688 static enum resource_state
689 get_rs (state_machine::state_t state)
690 {
691 if (const allocation_state *astate = dyn_cast_allocation_state (state))
692 return astate->m_rs;
693 else
694 return RS_START;
695 }
696
697 /* Return true if STATE is the start state. */
698
699 static bool
700 start_p (state_machine::state_t state)
701 {
702 return get_rs (state) == RS_START;
703 }
704
705 /* Return true if STATE is an unchecked result from an allocator. */
706
707 static bool
708 unchecked_p (state_machine::state_t state)
709 {
710 return get_rs (state) == RS_UNCHECKED;
711 }
712
713 /* Return true if STATE is a non-null result from an allocator. */
714
715 static bool
716 nonnull_p (state_machine::state_t state)
717 {
718 return get_rs (state) == RS_NONNULL;
719 }
720
721 /* Return true if STATE is a value that has been passed to a deallocator. */
722
723 static bool
724 freed_p (state_machine::state_t state)
725 {
726 return get_rs (state) == RS_FREED;
727 }
728
729 /* Return true if STATE is a value that has been assumed to be non-NULL. */
730
731 static bool
732 assumed_non_null_p (state_machine::state_t state)
733 {
734 return get_rs (state) == RS_ASSUMED_NON_NULL;
735 }
736
737 /* Class for diagnostics relating to malloc_state_machine. */
738
739 class malloc_diagnostic : public pending_diagnostic
740 {
741 public:
742 malloc_diagnostic (const malloc_state_machine &sm, tree arg)
743 : m_sm (sm), m_arg (arg)
744 {}
745
746 bool subclass_equal_p (const pending_diagnostic &base_other) const override
747 {
748 return same_tree_p (m_arg, ((const malloc_diagnostic &)base_other).m_arg);
749 }
750
751 label_text describe_state_change (const evdesc::state_change &change)
752 override
753 {
754 if (change.m_old_state == m_sm.get_start_state ()
755 && unchecked_p (change.m_new_state))
756 // TODO: verify that it's the allocation stmt, not a copy
757 return label_text::borrow ("allocated here");
758 if (unchecked_p (change.m_old_state)
759 && nonnull_p (change.m_new_state))
760 {
761 if (change.m_expr)
762 return change.formatted_print ("assuming %qE is non-NULL",
763 change.m_expr);
764 else
765 return change.formatted_print ("assuming %qs is non-NULL",
766 "<unknown>");
767 }
768 if (change.m_new_state == m_sm.m_null)
769 {
770 if (unchecked_p (change.m_old_state))
771 {
772 if (change.m_expr)
773 return change.formatted_print ("assuming %qE is NULL",
774 change.m_expr);
775 else
776 return change.formatted_print ("assuming %qs is NULL",
777 "<unknown>");
778 }
779 else
780 {
781 if (change.m_expr)
782 return change.formatted_print ("%qE is NULL",
783 change.m_expr);
784 else
785 return change.formatted_print ("%qs is NULL",
786 "<unknown>");
787 }
788 }
789
790 return label_text ();
791 }
792
793 diagnostic_event::meaning
794 get_meaning_for_state_change (const evdesc::state_change &change)
795 const final override
796 {
797 if (change.m_old_state == m_sm.get_start_state ()
798 && unchecked_p (change.m_new_state))
799 return diagnostic_event::meaning (diagnostic_event::VERB_acquire,
800 diagnostic_event::NOUN_memory);
801 if (freed_p (change.m_new_state))
802 return diagnostic_event::meaning (diagnostic_event::VERB_release,
803 diagnostic_event::NOUN_memory);
804 return diagnostic_event::meaning ();
805 }
806
807 protected:
808 const malloc_state_machine &m_sm;
809 tree m_arg;
810 };
811
812 /* Concrete subclass for reporting mismatching allocator/deallocator
813 diagnostics. */
814
815 class mismatching_deallocation : public malloc_diagnostic
816 {
817 public:
818 mismatching_deallocation (const malloc_state_machine &sm, tree arg,
819 const deallocator_set *expected_deallocators,
820 const deallocator *actual_dealloc)
821 : malloc_diagnostic (sm, arg),
822 m_expected_deallocators (expected_deallocators),
823 m_actual_dealloc (actual_dealloc)
824 {}
825
826 const char *get_kind () const final override
827 {
828 return "mismatching_deallocation";
829 }
830
831 int get_controlling_option () const final override
832 {
833 return OPT_Wanalyzer_mismatching_deallocation;
834 }
835
836 bool emit (rich_location *rich_loc) final override
837 {
838 auto_diagnostic_group d;
839 diagnostic_metadata m;
840 m.add_cwe (762); /* CWE-762: Mismatched Memory Management Routines. */
841 if (const deallocator *expected_dealloc
842 = m_expected_deallocators->maybe_get_single ())
843 return warning_meta (rich_loc, m, get_controlling_option (),
844 "%qE should have been deallocated with %qs"
845 " but was deallocated with %qs",
846 m_arg, expected_dealloc->m_name,
847 m_actual_dealloc->m_name);
848 else
849 return warning_meta (rich_loc, m, get_controlling_option (),
850 "%qs called on %qE returned from a mismatched"
851 " allocation function",
852 m_actual_dealloc->m_name, m_arg);
853 }
854
855 label_text describe_state_change (const evdesc::state_change &change)
856 final override
857 {
858 if (unchecked_p (change.m_new_state))
859 {
860 m_alloc_event = change.m_event_id;
861 if (const deallocator *expected_dealloc
862 = m_expected_deallocators->maybe_get_single ())
863 return change.formatted_print ("allocated here"
864 " (expects deallocation with %qs)",
865 expected_dealloc->m_name);
866 else
867 return change.formatted_print ("allocated here");
868 }
869 return malloc_diagnostic::describe_state_change (change);
870 }
871
872 label_text describe_final_event (const evdesc::final_event &ev) final override
873 {
874 if (m_alloc_event.known_p ())
875 {
876 if (const deallocator *expected_dealloc
877 = m_expected_deallocators->maybe_get_single ())
878 return ev.formatted_print
879 ("deallocated with %qs here;"
880 " allocation at %@ expects deallocation with %qs",
881 m_actual_dealloc->m_name, &m_alloc_event,
882 expected_dealloc->m_name);
883 else
884 return ev.formatted_print
885 ("deallocated with %qs here;"
886 " allocated at %@",
887 m_actual_dealloc->m_name, &m_alloc_event);
888 }
889 return ev.formatted_print ("deallocated with %qs here",
890 m_actual_dealloc->m_name);
891 }
892
893 private:
894 diagnostic_event_id_t m_alloc_event;
895 const deallocator_set *m_expected_deallocators;
896 const deallocator *m_actual_dealloc;
897 };
898
899 /* Concrete subclass for reporting double-free diagnostics. */
900
901 class double_free : public malloc_diagnostic
902 {
903 public:
904 double_free (const malloc_state_machine &sm, tree arg, const char *funcname)
905 : malloc_diagnostic (sm, arg), m_funcname (funcname)
906 {}
907
908 const char *get_kind () const final override { return "double_free"; }
909
910 int get_controlling_option () const final override
911 {
912 return OPT_Wanalyzer_double_free;
913 }
914
915 bool emit (rich_location *rich_loc) final override
916 {
917 auto_diagnostic_group d;
918 diagnostic_metadata m;
919 m.add_cwe (415); /* CWE-415: Double Free. */
920 return warning_meta (rich_loc, m, get_controlling_option (),
921 "double-%qs of %qE", m_funcname, m_arg);
922 }
923
924 label_text describe_state_change (const evdesc::state_change &change)
925 final override
926 {
927 if (freed_p (change.m_new_state))
928 {
929 m_first_free_event = change.m_event_id;
930 return change.formatted_print ("first %qs here", m_funcname);
931 }
932 return malloc_diagnostic::describe_state_change (change);
933 }
934
935 label_text describe_call_with_state (const evdesc::call_with_state &info)
936 final override
937 {
938 if (freed_p (info.m_state))
939 return info.formatted_print
940 ("passing freed pointer %qE in call to %qE from %qE",
941 info.m_expr, info.m_callee_fndecl, info.m_caller_fndecl);
942 return label_text ();
943 }
944
945 label_text describe_final_event (const evdesc::final_event &ev) final override
946 {
947 if (m_first_free_event.known_p ())
948 return ev.formatted_print ("second %qs here; first %qs was at %@",
949 m_funcname, m_funcname,
950 &m_first_free_event);
951 return ev.formatted_print ("second %qs here", m_funcname);
952 }
953
954 private:
955 diagnostic_event_id_t m_first_free_event;
956 const char *m_funcname;
957 };
958
959 /* Abstract subclass for describing possible bad uses of NULL.
960 Responsible for describing the call that could return NULL. */
961
962 class possible_null : public malloc_diagnostic
963 {
964 public:
965 possible_null (const malloc_state_machine &sm, tree arg)
966 : malloc_diagnostic (sm, arg)
967 {}
968
969 label_text describe_state_change (const evdesc::state_change &change)
970 final override
971 {
972 if (change.m_old_state == m_sm.get_start_state ()
973 && unchecked_p (change.m_new_state))
974 {
975 m_origin_of_unchecked_event = change.m_event_id;
976 return label_text::borrow ("this call could return NULL");
977 }
978 return malloc_diagnostic::describe_state_change (change);
979 }
980
981 label_text describe_return_of_state (const evdesc::return_of_state &info)
982 final override
983 {
984 if (unchecked_p (info.m_state))
985 return info.formatted_print ("possible return of NULL to %qE from %qE",
986 info.m_caller_fndecl, info.m_callee_fndecl);
987 return label_text ();
988 }
989
990 protected:
991 diagnostic_event_id_t m_origin_of_unchecked_event;
992 };
993
994 /* Concrete subclass for describing dereference of a possible NULL
995 value. */
996
997 class possible_null_deref : public possible_null
998 {
999 public:
1000 possible_null_deref (const malloc_state_machine &sm, tree arg)
1001 : possible_null (sm, arg)
1002 {}
1003
1004 const char *get_kind () const final override { return "possible_null_deref"; }
1005
1006 int get_controlling_option () const final override
1007 {
1008 return OPT_Wanalyzer_possible_null_dereference;
1009 }
1010
1011 bool emit (rich_location *rich_loc) final override
1012 {
1013 /* CWE-690: Unchecked Return Value to NULL Pointer Dereference. */
1014 diagnostic_metadata m;
1015 m.add_cwe (690);
1016 return warning_meta (rich_loc, m, get_controlling_option (),
1017 "dereference of possibly-NULL %qE", m_arg);
1018 }
1019
1020 label_text describe_final_event (const evdesc::final_event &ev) final override
1021 {
1022 if (m_origin_of_unchecked_event.known_p ())
1023 return ev.formatted_print ("%qE could be NULL: unchecked value from %@",
1024 ev.m_expr,
1025 &m_origin_of_unchecked_event);
1026 else
1027 return ev.formatted_print ("%qE could be NULL", ev.m_expr);
1028 }
1029
1030 };
1031
1032 /* Return true if FNDECL is a C++ method. */
1033
1034 static bool
1035 method_p (tree fndecl)
1036 {
1037 return TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE;
1038 }
1039
1040 /* Return a 1-based description of ARG_IDX (0-based) of FNDECL.
1041 Compare with %P in the C++ FE (implemented in cp/error.cc: parm_to_string
1042 as called from cp_printer). */
1043
1044 static label_text
1045 describe_argument_index (tree fndecl, int arg_idx)
1046 {
1047 if (method_p (fndecl))
1048 if (arg_idx == 0)
1049 return label_text::borrow ("'this'");
1050 pretty_printer pp;
1051 pp_printf (&pp, "%u", arg_idx + 1 - method_p (fndecl));
1052 return label_text::take (xstrdup (pp_formatted_text (&pp)));
1053 }
1054
1055 /* Subroutine for use by possible_null_arg::emit and null_arg::emit.
1056 Issue a note informing that the pertinent argument must be non-NULL. */
1057
1058 static void
1059 inform_nonnull_attribute (tree fndecl, int arg_idx)
1060 {
1061 label_text arg_desc = describe_argument_index (fndecl, arg_idx);
1062 inform (DECL_SOURCE_LOCATION (fndecl),
1063 "argument %s of %qD must be non-null",
1064 arg_desc.get (), fndecl);
1065 /* Ideally we would use the location of the parm and underline the
1066 attribute also - but we don't have the location_t values at this point
1067 in the middle-end.
1068 For reference, the C and C++ FEs have get_fndecl_argument_location. */
1069 }
1070
1071 /* Concrete subclass for describing passing a possibly-NULL value to a
1072 function marked with __attribute__((nonnull)). */
1073
1074 class possible_null_arg : public possible_null
1075 {
1076 public:
1077 possible_null_arg (const malloc_state_machine &sm, tree arg,
1078 tree fndecl, int arg_idx)
1079 : possible_null (sm, arg),
1080 m_fndecl (fndecl), m_arg_idx (arg_idx)
1081 {}
1082
1083 const char *get_kind () const final override { return "possible_null_arg"; }
1084
1085 bool subclass_equal_p (const pending_diagnostic &base_other)
1086 const final override
1087 {
1088 const possible_null_arg &sub_other
1089 = (const possible_null_arg &)base_other;
1090 return (same_tree_p (m_arg, sub_other.m_arg)
1091 && m_fndecl == sub_other.m_fndecl
1092 && m_arg_idx == sub_other.m_arg_idx);
1093 }
1094
1095 int get_controlling_option () const final override
1096 {
1097 return OPT_Wanalyzer_possible_null_argument;
1098 }
1099
1100 bool emit (rich_location *rich_loc) final override
1101 {
1102 /* CWE-690: Unchecked Return Value to NULL Pointer Dereference. */
1103 auto_diagnostic_group d;
1104 diagnostic_metadata m;
1105 m.add_cwe (690);
1106 bool warned
1107 = warning_meta (rich_loc, m, get_controlling_option (),
1108 "use of possibly-NULL %qE where non-null expected",
1109 m_arg);
1110 if (warned)
1111 inform_nonnull_attribute (m_fndecl, m_arg_idx);
1112 return warned;
1113 }
1114
1115 label_text describe_final_event (const evdesc::final_event &ev) final override
1116 {
1117 label_text arg_desc = describe_argument_index (m_fndecl, m_arg_idx);
1118 label_text result;
1119 if (m_origin_of_unchecked_event.known_p ())
1120 result = ev.formatted_print ("argument %s (%qE) from %@ could be NULL"
1121 " where non-null expected",
1122 arg_desc.get (), ev.m_expr,
1123 &m_origin_of_unchecked_event);
1124 else
1125 result = ev.formatted_print ("argument %s (%qE) could be NULL"
1126 " where non-null expected",
1127 arg_desc.get (), ev.m_expr);
1128 return result;
1129 }
1130
1131 private:
1132 tree m_fndecl;
1133 int m_arg_idx;
1134 };
1135
1136 /* Concrete subclass for describing a dereference of a NULL value. */
1137
1138 class null_deref : public malloc_diagnostic
1139 {
1140 public:
1141 null_deref (const malloc_state_machine &sm, tree arg)
1142 : malloc_diagnostic (sm, arg) {}
1143
1144 const char *get_kind () const final override { return "null_deref"; }
1145
1146 int get_controlling_option () const final override
1147 {
1148 return OPT_Wanalyzer_null_dereference;
1149 }
1150
1151 bool emit (rich_location *rich_loc) final override
1152 {
1153 /* CWE-476: NULL Pointer Dereference. */
1154 diagnostic_metadata m;
1155 m.add_cwe (476);
1156 return warning_meta (rich_loc, m, get_controlling_option (),
1157 "dereference of NULL %qE", m_arg);
1158 }
1159
1160 label_text describe_return_of_state (const evdesc::return_of_state &info)
1161 final override
1162 {
1163 if (info.m_state == m_sm.m_null)
1164 return info.formatted_print ("return of NULL to %qE from %qE",
1165 info.m_caller_fndecl, info.m_callee_fndecl);
1166 return label_text ();
1167 }
1168
1169 label_text describe_final_event (const evdesc::final_event &ev) final override
1170 {
1171 return ev.formatted_print ("dereference of NULL %qE", ev.m_expr);
1172 }
1173 };
1174
1175 /* Concrete subclass for describing passing a NULL value to a
1176 function marked with __attribute__((nonnull)). */
1177
1178 class null_arg : public malloc_diagnostic
1179 {
1180 public:
1181 null_arg (const malloc_state_machine &sm, tree arg,
1182 tree fndecl, int arg_idx)
1183 : malloc_diagnostic (sm, arg),
1184 m_fndecl (fndecl), m_arg_idx (arg_idx)
1185 {}
1186
1187 const char *get_kind () const final override { return "null_arg"; }
1188
1189 bool subclass_equal_p (const pending_diagnostic &base_other)
1190 const final override
1191 {
1192 const null_arg &sub_other
1193 = (const null_arg &)base_other;
1194 return (same_tree_p (m_arg, sub_other.m_arg)
1195 && m_fndecl == sub_other.m_fndecl
1196 && m_arg_idx == sub_other.m_arg_idx);
1197 }
1198
1199 int get_controlling_option () const final override
1200 {
1201 return OPT_Wanalyzer_null_argument;
1202 }
1203
1204 bool emit (rich_location *rich_loc) final override
1205 {
1206 /* CWE-476: NULL Pointer Dereference. */
1207 auto_diagnostic_group d;
1208 diagnostic_metadata m;
1209 m.add_cwe (476);
1210
1211 bool warned;
1212 if (zerop (m_arg))
1213 warned = warning_meta (rich_loc, m, get_controlling_option (),
1214 "use of NULL where non-null expected");
1215 else
1216 warned = warning_meta (rich_loc, m, get_controlling_option (),
1217 "use of NULL %qE where non-null expected",
1218 m_arg);
1219 if (warned)
1220 inform_nonnull_attribute (m_fndecl, m_arg_idx);
1221 return warned;
1222 }
1223
1224 label_text describe_final_event (const evdesc::final_event &ev) final override
1225 {
1226 label_text arg_desc = describe_argument_index (m_fndecl, m_arg_idx);
1227 label_text result;
1228 if (zerop (ev.m_expr))
1229 result = ev.formatted_print ("argument %s NULL where non-null expected",
1230 arg_desc.get ());
1231 else
1232 result = ev.formatted_print ("argument %s (%qE) NULL"
1233 " where non-null expected",
1234 arg_desc.get (), ev.m_expr);
1235 return result;
1236 }
1237
1238 private:
1239 tree m_fndecl;
1240 int m_arg_idx;
1241 };
1242
1243 class use_after_free : public malloc_diagnostic
1244 {
1245 public:
1246 use_after_free (const malloc_state_machine &sm, tree arg,
1247 const deallocator *deallocator)
1248 : malloc_diagnostic (sm, arg),
1249 m_deallocator (deallocator)
1250 {
1251 gcc_assert (deallocator);
1252 }
1253
1254 const char *get_kind () const final override { return "use_after_free"; }
1255
1256 int get_controlling_option () const final override
1257 {
1258 return OPT_Wanalyzer_use_after_free;
1259 }
1260
1261 bool emit (rich_location *rich_loc) final override
1262 {
1263 /* CWE-416: Use After Free. */
1264 diagnostic_metadata m;
1265 m.add_cwe (416);
1266 return warning_meta (rich_loc, m, get_controlling_option (),
1267 "use after %<%s%> of %qE",
1268 m_deallocator->m_name, m_arg);
1269 }
1270
1271 label_text describe_state_change (const evdesc::state_change &change)
1272 final override
1273 {
1274 if (freed_p (change.m_new_state))
1275 {
1276 m_free_event = change.m_event_id;
1277 switch (m_deallocator->m_wording)
1278 {
1279 default:
1280 case WORDING_REALLOCATED:
1281 gcc_unreachable ();
1282 case WORDING_FREED:
1283 return label_text::borrow ("freed here");
1284 case WORDING_DELETED:
1285 return label_text::borrow ("deleted here");
1286 case WORDING_DEALLOCATED:
1287 return label_text::borrow ("deallocated here");
1288 }
1289 }
1290 return malloc_diagnostic::describe_state_change (change);
1291 }
1292
1293 label_text describe_final_event (const evdesc::final_event &ev) final override
1294 {
1295 const char *funcname = m_deallocator->m_name;
1296 if (m_free_event.known_p ())
1297 switch (m_deallocator->m_wording)
1298 {
1299 default:
1300 case WORDING_REALLOCATED:
1301 gcc_unreachable ();
1302 case WORDING_FREED:
1303 return ev.formatted_print ("use after %<%s%> of %qE; freed at %@",
1304 funcname, ev.m_expr, &m_free_event);
1305 case WORDING_DELETED:
1306 return ev.formatted_print ("use after %<%s%> of %qE; deleted at %@",
1307 funcname, ev.m_expr, &m_free_event);
1308 case WORDING_DEALLOCATED:
1309 return ev.formatted_print ("use after %<%s%> of %qE;"
1310 " deallocated at %@",
1311 funcname, ev.m_expr, &m_free_event);
1312 }
1313 else
1314 return ev.formatted_print ("use after %<%s%> of %qE",
1315 funcname, ev.m_expr);
1316 }
1317
1318 /* Implementation of pending_diagnostic::supercedes_p for
1319 use_after_free.
1320
1321 We want use-after-free to supercede use-of-unitialized-value,
1322 so that if we have these at the same stmt, we don't emit
1323 a use-of-uninitialized, just the use-after-free.
1324 (this is because we fully purge information about freed
1325 buffers when we free them to avoid state explosions, so
1326 that if they are accessed after the free, it looks like
1327 they are uninitialized). */
1328
1329 bool supercedes_p (const pending_diagnostic &other) const final override
1330 {
1331 if (other.use_of_uninit_p ())
1332 return true;
1333
1334 return false;
1335 }
1336
1337 private:
1338 diagnostic_event_id_t m_free_event;
1339 const deallocator *m_deallocator;
1340 };
1341
1342 class malloc_leak : public malloc_diagnostic
1343 {
1344 public:
1345 malloc_leak (const malloc_state_machine &sm, tree arg)
1346 : malloc_diagnostic (sm, arg) {}
1347
1348 const char *get_kind () const final override { return "malloc_leak"; }
1349
1350 int get_controlling_option () const final override
1351 {
1352 return OPT_Wanalyzer_malloc_leak;
1353 }
1354
1355 bool emit (rich_location *rich_loc) final override
1356 {
1357 /* "CWE-401: Missing Release of Memory after Effective Lifetime". */
1358 diagnostic_metadata m;
1359 m.add_cwe (401);
1360 if (m_arg)
1361 return warning_meta (rich_loc, m, get_controlling_option (),
1362 "leak of %qE", m_arg);
1363 else
1364 return warning_meta (rich_loc, m, get_controlling_option (),
1365 "leak of %qs", "<unknown>");
1366 }
1367
1368 label_text describe_state_change (const evdesc::state_change &change)
1369 final override
1370 {
1371 if (unchecked_p (change.m_new_state)
1372 || (start_p (change.m_old_state) && nonnull_p (change.m_new_state)))
1373 {
1374 m_alloc_event = change.m_event_id;
1375 return label_text::borrow ("allocated here");
1376 }
1377 return malloc_diagnostic::describe_state_change (change);
1378 }
1379
1380 label_text describe_final_event (const evdesc::final_event &ev) final override
1381 {
1382 if (ev.m_expr)
1383 {
1384 if (m_alloc_event.known_p ())
1385 return ev.formatted_print ("%qE leaks here; was allocated at %@",
1386 ev.m_expr, &m_alloc_event);
1387 else
1388 return ev.formatted_print ("%qE leaks here", ev.m_expr);
1389 }
1390 else
1391 {
1392 if (m_alloc_event.known_p ())
1393 return ev.formatted_print ("%qs leaks here; was allocated at %@",
1394 "<unknown>", &m_alloc_event);
1395 else
1396 return ev.formatted_print ("%qs leaks here", "<unknown>");
1397 }
1398 }
1399
1400 private:
1401 diagnostic_event_id_t m_alloc_event;
1402 };
1403
1404 class free_of_non_heap : public malloc_diagnostic
1405 {
1406 public:
1407 free_of_non_heap (const malloc_state_machine &sm, tree arg,
1408 const region *freed_reg,
1409 const char *funcname)
1410 : malloc_diagnostic (sm, arg), m_freed_reg (freed_reg), m_funcname (funcname)
1411 {
1412 }
1413
1414 const char *get_kind () const final override { return "free_of_non_heap"; }
1415
1416 bool subclass_equal_p (const pending_diagnostic &base_other) const
1417 final override
1418 {
1419 const free_of_non_heap &other = (const free_of_non_heap &)base_other;
1420 return (same_tree_p (m_arg, other.m_arg)
1421 && m_freed_reg == other.m_freed_reg);
1422 }
1423
1424 int get_controlling_option () const final override
1425 {
1426 return OPT_Wanalyzer_free_of_non_heap;
1427 }
1428
1429 bool emit (rich_location *rich_loc) final override
1430 {
1431 auto_diagnostic_group d;
1432 diagnostic_metadata m;
1433 m.add_cwe (590); /* CWE-590: Free of Memory not on the Heap. */
1434 switch (get_memory_space ())
1435 {
1436 default:
1437 case MEMSPACE_HEAP:
1438 gcc_unreachable ();
1439 case MEMSPACE_UNKNOWN:
1440 case MEMSPACE_CODE:
1441 case MEMSPACE_GLOBALS:
1442 case MEMSPACE_READONLY_DATA:
1443 return warning_meta (rich_loc, m, get_controlling_option (),
1444 "%<%s%> of %qE which points to memory"
1445 " not on the heap",
1446 m_funcname, m_arg);
1447 break;
1448 case MEMSPACE_STACK:
1449 return warning_meta (rich_loc, m, get_controlling_option (),
1450 "%<%s%> of %qE which points to memory"
1451 " on the stack",
1452 m_funcname, m_arg);
1453 break;
1454 }
1455 }
1456
1457 label_text describe_state_change (const evdesc::state_change &)
1458 final override
1459 {
1460 return label_text::borrow ("pointer is from here");
1461 }
1462
1463 label_text describe_final_event (const evdesc::final_event &ev) final override
1464 {
1465 return ev.formatted_print ("call to %qs here", m_funcname);
1466 }
1467
1468 void mark_interesting_stuff (interesting_t *interest) final override
1469 {
1470 if (m_freed_reg)
1471 interest->add_region_creation (m_freed_reg);
1472 }
1473
1474 private:
1475 enum memory_space get_memory_space () const
1476 {
1477 if (m_freed_reg)
1478 return m_freed_reg->get_memory_space ();
1479 else
1480 return MEMSPACE_UNKNOWN;
1481 }
1482
1483 const region *m_freed_reg;
1484 const char *m_funcname;
1485 };
1486
1487 /* Concrete pending_diagnostic subclass for -Wanalyzer-deref-before-check. */
1488
1489 class deref_before_check : public malloc_diagnostic
1490 {
1491 public:
1492 deref_before_check (const malloc_state_machine &sm, tree arg)
1493 : malloc_diagnostic (sm, arg)
1494 {}
1495
1496 const char *get_kind () const final override { return "deref_before_check"; }
1497
1498 int get_controlling_option () const final override
1499 {
1500 return OPT_Wanalyzer_deref_before_check;
1501 }
1502
1503 bool emit (rich_location *rich_loc) final override
1504 {
1505 if (m_arg)
1506 return warning_at (rich_loc, get_controlling_option (),
1507 "check of %qE for NULL after already"
1508 " dereferencing it",
1509 m_arg);
1510 else
1511 return warning_at (rich_loc, get_controlling_option (),
1512 "check of pointer for NULL after already"
1513 " dereferencing it");
1514 }
1515
1516 label_text describe_state_change (const evdesc::state_change &change)
1517 final override
1518 {
1519 if (change.m_old_state == m_sm.get_start_state ()
1520 && assumed_non_null_p (change.m_new_state))
1521 {
1522 m_first_deref_event = change.m_event_id;
1523 if (m_arg)
1524 return change.formatted_print ("pointer %qE is dereferenced here",
1525 m_arg);
1526 else
1527 return label_text::borrow ("pointer is dereferenced here");
1528 }
1529 return malloc_diagnostic::describe_state_change (change);
1530 }
1531
1532 label_text describe_final_event (const evdesc::final_event &ev) final override
1533 {
1534 if (m_first_deref_event.known_p ())
1535 {
1536 if (m_arg)
1537 return ev.formatted_print ("pointer %qE is checked for NULL here but"
1538 " it was already dereferenced at %@",
1539 m_arg, &m_first_deref_event);
1540 else
1541 return ev.formatted_print ("pointer is checked for NULL here but"
1542 " it was already dereferenced at %@",
1543 &m_first_deref_event);
1544 }
1545 else
1546 {
1547 if (m_arg)
1548 return ev.formatted_print ("pointer %qE is checked for NULL here but"
1549 " it was already dereferenced",
1550 m_arg);
1551 else
1552 return ev.formatted_print ("pointer is checked for NULL here but"
1553 " it was already dereferenced");
1554 }
1555 }
1556
1557 private:
1558 diagnostic_event_id_t m_first_deref_event;
1559 };
1560
1561 /* struct allocation_state : public state_machine::state. */
1562
1563 /* Implementation of state_machine::state::dump_to_pp vfunc
1564 for allocation_state: append the API that this allocation is
1565 associated with. */
1566
1567 void
1568 allocation_state::dump_to_pp (pretty_printer *pp) const
1569 {
1570 state_machine::state::dump_to_pp (pp);
1571 if (m_deallocators)
1572 {
1573 pp_string (pp, " (");
1574 m_deallocators->dump_to_pp (pp);
1575 pp_character (pp, ')');
1576 }
1577 }
1578
1579 /* Given a allocation_state for a deallocator_set, get the "nonnull" state
1580 for the corresponding allocator(s). */
1581
1582 const allocation_state *
1583 allocation_state::get_nonnull () const
1584 {
1585 gcc_assert (m_deallocators);
1586 return as_a_allocation_state (m_deallocators->m_nonnull);
1587 }
1588
1589 /* struct assumed_non_null_state : public allocation_state. */
1590
1591 void
1592 assumed_non_null_state::dump_to_pp (pretty_printer *pp) const
1593 {
1594 allocation_state::dump_to_pp (pp);
1595 pp_string (pp, " (in ");
1596 m_frame->dump_to_pp (pp, true);
1597 pp_character (pp, ')');
1598 }
1599
1600 /* malloc_state_machine's ctor. */
1601
1602 malloc_state_machine::malloc_state_machine (logger *logger)
1603 : state_machine ("malloc", logger),
1604 m_free (this, "free", WORDING_FREED),
1605 m_scalar_delete (this, "delete", WORDING_DELETED),
1606 m_vector_delete (this, "delete[]", WORDING_DELETED),
1607 m_realloc (this, "realloc", WORDING_REALLOCATED)
1608 {
1609 gcc_assert (m_start->get_id () == 0);
1610 m_null = add_state ("null", RS_FREED, NULL, NULL);
1611 m_non_heap = add_state ("non-heap", RS_NON_HEAP, NULL, NULL);
1612 m_stop = add_state ("stop", RS_STOP, NULL, NULL);
1613 }
1614
1615 malloc_state_machine::~malloc_state_machine ()
1616 {
1617 unsigned i;
1618 custom_deallocator_set *set;
1619 FOR_EACH_VEC_ELT (m_dynamic_sets, i, set)
1620 delete set;
1621 custom_deallocator *d;
1622 FOR_EACH_VEC_ELT (m_dynamic_deallocators, i, d)
1623 delete d;
1624 }
1625
1626 state_machine::state_t
1627 malloc_state_machine::add_state (const char *name, enum resource_state rs,
1628 const deallocator_set *deallocators,
1629 const deallocator *deallocator)
1630 {
1631 return add_custom_state (new allocation_state (name, alloc_state_id (),
1632 rs, deallocators,
1633 deallocator));
1634 }
1635
1636 /* If ALLOCATOR_FNDECL has any "__attribute__((malloc(FOO)))",
1637 return a custom_deallocator_set for them, consolidating them
1638 to ensure uniqueness of the sets.
1639
1640 Return NULL if it has no such attributes. */
1641
1642 const custom_deallocator_set *
1643 malloc_state_machine::
1644 get_or_create_custom_deallocator_set (tree allocator_fndecl)
1645 {
1646 /* Early rejection of decls without attributes. */
1647 tree attrs = DECL_ATTRIBUTES (allocator_fndecl);
1648 if (!attrs)
1649 return NULL;
1650
1651 /* Otherwise, call maybe_create_custom_deallocator_set,
1652 memoizing the result. */
1653 if (custom_deallocator_set **slot
1654 = m_custom_deallocator_set_cache.get (allocator_fndecl))
1655 return *slot;
1656 custom_deallocator_set *set
1657 = maybe_create_custom_deallocator_set (allocator_fndecl);
1658 m_custom_deallocator_set_cache.put (allocator_fndecl, set);
1659 return set;
1660 }
1661
1662 /* Given ALLOCATOR_FNDECL, a FUNCTION_DECL with attributes,
1663 look for any "__attribute__((malloc(FOO)))" and return a
1664 custom_deallocator_set for them, consolidating them
1665 to ensure uniqueness of the sets.
1666
1667 Return NULL if it has no such attributes.
1668
1669 Subroutine of get_or_create_custom_deallocator_set which
1670 memoizes the result. */
1671
1672 custom_deallocator_set *
1673 malloc_state_machine::
1674 maybe_create_custom_deallocator_set (tree allocator_fndecl)
1675 {
1676 tree attrs = DECL_ATTRIBUTES (allocator_fndecl);
1677 gcc_assert (attrs);
1678
1679 /* Look for instances of __attribute__((malloc(FOO))). */
1680 auto_vec<const deallocator *> deallocator_vec;
1681 for (tree allocs = attrs;
1682 (allocs = lookup_attribute ("malloc", allocs));
1683 allocs = TREE_CHAIN (allocs))
1684 {
1685 tree args = TREE_VALUE (allocs);
1686 if (!args)
1687 continue;
1688 if (TREE_VALUE (args))
1689 {
1690 const deallocator *d
1691 = get_or_create_deallocator (TREE_VALUE (args));
1692 deallocator_vec.safe_push (d);
1693 }
1694 }
1695
1696 /* If there weren't any deallocators, bail. */
1697 if (deallocator_vec.length () == 0)
1698 return NULL;
1699
1700 /* Consolidate, so that we reuse existing deallocator_set
1701 instances. */
1702 deallocator_vec.qsort (deallocator::cmp_ptr_ptr);
1703 custom_deallocator_set **slot
1704 = m_custom_deallocator_set_map.get (&deallocator_vec);
1705 if (slot)
1706 return *slot;
1707 custom_deallocator_set *set
1708 = new custom_deallocator_set (this, &deallocator_vec, WORDING_DEALLOCATED);
1709 m_custom_deallocator_set_map.put (&set->m_deallocator_vec, set);
1710 m_dynamic_sets.safe_push (set);
1711 return set;
1712 }
1713
1714 /* Get the deallocator for DEALLOCATOR_FNDECL, creating it if necessary. */
1715
1716 const deallocator *
1717 malloc_state_machine::get_or_create_deallocator (tree deallocator_fndecl)
1718 {
1719 deallocator **slot = m_deallocator_map.get (deallocator_fndecl);
1720 if (slot)
1721 return *slot;
1722
1723 /* Reuse "free". */
1724 deallocator *d;
1725 if (is_named_call_p (deallocator_fndecl, "free")
1726 || is_std_named_call_p (deallocator_fndecl, "free")
1727 || is_named_call_p (deallocator_fndecl, "__builtin_free"))
1728 d = &m_free.m_deallocator;
1729 else
1730 {
1731 custom_deallocator *cd
1732 = new custom_deallocator (this, deallocator_fndecl,
1733 WORDING_DEALLOCATED);
1734 m_dynamic_deallocators.safe_push (cd);
1735 d = cd;
1736 }
1737 m_deallocator_map.put (deallocator_fndecl, d);
1738 return d;
1739 }
1740
1741 /* Get the "assumed-non-null" state for assumptions made within FRAME,
1742 creating it if necessary. */
1743
1744 state_machine::state_t
1745 malloc_state_machine::
1746 get_or_create_assumed_non_null_state_for_frame (const frame_region *frame)
1747 {
1748 if (state_t *slot = m_assumed_non_null.get (frame))
1749 return *slot;
1750 state_machine::state *new_state
1751 = new assumed_non_null_state ("assumed-non-null", alloc_state_id (), frame);
1752 add_custom_state (new_state);
1753 m_assumed_non_null.put (frame, new_state);
1754 return new_state;
1755 }
1756
1757 /* Try to identify the function declaration either by name or as a known malloc
1758 builtin. */
1759
1760 static bool
1761 known_allocator_p (const_tree fndecl, const gcall *call)
1762 {
1763 /* Either it is a function we know by name and number of arguments... */
1764 if (is_named_call_p (fndecl, "malloc", call, 1)
1765 || is_named_call_p (fndecl, "calloc", call, 2)
1766 || is_std_named_call_p (fndecl, "malloc", call, 1)
1767 || is_std_named_call_p (fndecl, "calloc", call, 2)
1768 || is_named_call_p (fndecl, "strdup", call, 1)
1769 || is_named_call_p (fndecl, "strndup", call, 2))
1770 return true;
1771
1772 /* ... or it is a builtin allocator that allocates objects freed with
1773 __builtin_free. */
1774 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1775 switch (DECL_FUNCTION_CODE (fndecl))
1776 {
1777 case BUILT_IN_MALLOC:
1778 case BUILT_IN_CALLOC:
1779 case BUILT_IN_STRDUP:
1780 case BUILT_IN_STRNDUP:
1781 return true;
1782 default:
1783 break;
1784 }
1785
1786 return false;
1787 }
1788
1789 /* If PTR's nullness is not known, transition it to the "assumed-non-null"
1790 state for the current frame. */
1791
1792 void
1793 malloc_state_machine::maybe_assume_non_null (sm_context *sm_ctxt,
1794 tree ptr,
1795 const gimple *stmt) const
1796 {
1797 const region_model *old_model = sm_ctxt->get_old_region_model ();
1798 if (!old_model)
1799 return;
1800
1801 tree null_ptr_cst = build_int_cst (TREE_TYPE (ptr), 0);
1802 tristate known_non_null
1803 = old_model->eval_condition (ptr, NE_EXPR, null_ptr_cst, NULL);
1804 if (known_non_null.is_unknown ())
1805 {
1806 /* Cast away const-ness for cache-like operations. */
1807 malloc_state_machine *mut_this
1808 = const_cast <malloc_state_machine *> (this);
1809 state_t next_state
1810 = mut_this->get_or_create_assumed_non_null_state_for_frame
1811 (old_model->get_current_frame ());
1812 sm_ctxt->set_next_state (stmt, ptr, next_state);
1813 }
1814 }
1815
1816 /* Implementation of state_machine::on_stmt vfunc for malloc_state_machine. */
1817
1818 bool
1819 malloc_state_machine::on_stmt (sm_context *sm_ctxt,
1820 const supernode *node,
1821 const gimple *stmt) const
1822 {
1823 if (const gcall *call = dyn_cast <const gcall *> (stmt))
1824 if (tree callee_fndecl = sm_ctxt->get_fndecl_for_call (call))
1825 {
1826 if (known_allocator_p (callee_fndecl, call))
1827 {
1828 on_allocator_call (sm_ctxt, call, &m_free);
1829 return true;
1830 }
1831
1832 if (is_named_call_p (callee_fndecl, "operator new", call, 1))
1833 on_allocator_call (sm_ctxt, call, &m_scalar_delete);
1834 else if (is_named_call_p (callee_fndecl, "operator new []", call, 1))
1835 on_allocator_call (sm_ctxt, call, &m_vector_delete);
1836 else if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
1837 || is_named_call_p (callee_fndecl, "operator delete", call, 2))
1838 {
1839 on_deallocator_call (sm_ctxt, node, call,
1840 &m_scalar_delete.m_deallocator, 0);
1841 return true;
1842 }
1843 else if (is_named_call_p (callee_fndecl, "operator delete []", call, 1))
1844 {
1845 on_deallocator_call (sm_ctxt, node, call,
1846 &m_vector_delete.m_deallocator, 0);
1847 return true;
1848 }
1849
1850 if (is_named_call_p (callee_fndecl, "alloca", call, 1)
1851 || is_named_call_p (callee_fndecl, "__builtin_alloca", call, 1))
1852 {
1853 tree lhs = gimple_call_lhs (call);
1854 if (lhs)
1855 sm_ctxt->on_transition (node, stmt, lhs, m_start, m_non_heap);
1856 return true;
1857 }
1858
1859 if (is_named_call_p (callee_fndecl, "free", call, 1)
1860 || is_std_named_call_p (callee_fndecl, "free", call, 1)
1861 || is_named_call_p (callee_fndecl, "__builtin_free", call, 1))
1862 {
1863 on_deallocator_call (sm_ctxt, node, call,
1864 &m_free.m_deallocator, 0);
1865 return true;
1866 }
1867
1868 if (is_named_call_p (callee_fndecl, "realloc", call, 2)
1869 || is_named_call_p (callee_fndecl, "__builtin_realloc", call, 2))
1870 {
1871 on_realloc_call (sm_ctxt, node, call);
1872 return true;
1873 }
1874
1875 if (unaffected_by_call_p (callee_fndecl))
1876 return true;
1877
1878 /* Cast away const-ness for cache-like operations. */
1879 malloc_state_machine *mutable_this
1880 = const_cast <malloc_state_machine *> (this);
1881
1882 /* Handle "__attribute__((malloc(FOO)))". */
1883 if (const deallocator_set *deallocators
1884 = mutable_this->get_or_create_custom_deallocator_set
1885 (callee_fndecl))
1886 {
1887 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee_fndecl));
1888 bool returns_nonnull
1889 = lookup_attribute ("returns_nonnull", attrs);
1890 on_allocator_call (sm_ctxt, call, deallocators, returns_nonnull);
1891 }
1892
1893 /* Handle "__attribute__((nonnull))". */
1894 {
1895 tree fntype = TREE_TYPE (callee_fndecl);
1896 bitmap nonnull_args = get_nonnull_args (fntype);
1897 if (nonnull_args)
1898 {
1899 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
1900 {
1901 tree arg = gimple_call_arg (stmt, i);
1902 if (TREE_CODE (TREE_TYPE (arg)) != POINTER_TYPE)
1903 continue;
1904 /* If we have a nonnull-args, and either all pointers, or just
1905 the specified pointers. */
1906 if (bitmap_empty_p (nonnull_args)
1907 || bitmap_bit_p (nonnull_args, i))
1908 {
1909 state_t state = sm_ctxt->get_state (stmt, arg);
1910 /* Can't use a switch as the states are non-const. */
1911 if (unchecked_p (state))
1912 {
1913 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1914 sm_ctxt->warn (node, stmt, arg,
1915 make_unique<possible_null_arg>
1916 (*this, diag_arg, callee_fndecl, i));
1917 const allocation_state *astate
1918 = as_a_allocation_state (state);
1919 sm_ctxt->set_next_state (stmt, arg,
1920 astate->get_nonnull ());
1921 }
1922 else if (state == m_null)
1923 {
1924 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1925 sm_ctxt->warn (node, stmt, arg,
1926 make_unique<null_arg>
1927 (*this, diag_arg, callee_fndecl, i));
1928 sm_ctxt->set_next_state (stmt, arg, m_stop);
1929 }
1930 else if (state == m_start)
1931 maybe_assume_non_null (sm_ctxt, arg, stmt);
1932 }
1933 }
1934 BITMAP_FREE (nonnull_args);
1935 }
1936 }
1937
1938 /* Check for this after nonnull, so that if we have both
1939 then we transition to "freed", rather than "checked". */
1940 unsigned dealloc_argno = fndecl_dealloc_argno (callee_fndecl);
1941 if (dealloc_argno != UINT_MAX)
1942 {
1943 const deallocator *d
1944 = mutable_this->get_or_create_deallocator (callee_fndecl);
1945 on_deallocator_call (sm_ctxt, node, call, d, dealloc_argno);
1946 }
1947 }
1948
1949 /* Look for pointers explicitly being compared against zero
1950 that are in state assumed_non_null i.e. we already defererenced
1951 them.
1952 We have to do this check here, rather than in on_condition
1953 because we add a constraint that the pointer is non-null when
1954 dereferencing it, and this makes the apply_constraints_for_gcond
1955 find known-true and known-false conditions; on_condition is only
1956 called when adding new constraints. */
1957 if (const gcond *cond_stmt = dyn_cast <const gcond *> (stmt))
1958 {
1959 enum tree_code op = gimple_cond_code (cond_stmt);
1960 if (op == EQ_EXPR || op == NE_EXPR)
1961 {
1962 tree lhs = gimple_cond_lhs (cond_stmt);
1963 tree rhs = gimple_cond_rhs (cond_stmt);
1964 if (any_pointer_p (lhs)
1965 && any_pointer_p (rhs)
1966 && zerop (rhs))
1967 {
1968 state_t state = sm_ctxt->get_state (stmt, lhs);
1969 if (assumed_non_null_p (state))
1970 maybe_complain_about_deref_before_check
1971 (sm_ctxt, node,
1972 stmt,
1973 (const assumed_non_null_state *)state,
1974 lhs);
1975 }
1976 }
1977 }
1978
1979 if (tree lhs = sm_ctxt->is_zero_assignment (stmt))
1980 if (any_pointer_p (lhs))
1981 on_zero_assignment (sm_ctxt, stmt,lhs);
1982
1983 /* Handle dereferences. */
1984 for (unsigned i = 0; i < gimple_num_ops (stmt); i++)
1985 {
1986 tree op = gimple_op (stmt, i);
1987 if (!op)
1988 continue;
1989 if (TREE_CODE (op) == COMPONENT_REF)
1990 op = TREE_OPERAND (op, 0);
1991
1992 if (TREE_CODE (op) == MEM_REF)
1993 {
1994 tree arg = TREE_OPERAND (op, 0);
1995
1996 state_t state = sm_ctxt->get_state (stmt, arg);
1997 if (state == m_start)
1998 maybe_assume_non_null (sm_ctxt, arg, stmt);
1999 else if (unchecked_p (state))
2000 {
2001 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2002 sm_ctxt->warn (node, stmt, arg,
2003 make_unique<possible_null_deref> (*this,
2004 diag_arg));
2005 const allocation_state *astate = as_a_allocation_state (state);
2006 sm_ctxt->set_next_state (stmt, arg, astate->get_nonnull ());
2007 }
2008 else if (state == m_null)
2009 {
2010 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2011 sm_ctxt->warn (node, stmt, arg,
2012 make_unique<null_deref> (*this, diag_arg));
2013 sm_ctxt->set_next_state (stmt, arg, m_stop);
2014 }
2015 else if (freed_p (state))
2016 {
2017 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2018 const allocation_state *astate = as_a_allocation_state (state);
2019 sm_ctxt->warn (node, stmt, arg,
2020 make_unique<use_after_free>
2021 (*this, diag_arg, astate->m_deallocator));
2022 sm_ctxt->set_next_state (stmt, arg, m_stop);
2023 }
2024 }
2025 }
2026 return false;
2027 }
2028
2029 /* Given a check against null of PTR in assumed-non-null state STATE,
2030 potentially add a deref_before_check warning to SM_CTXT. */
2031
2032 void
2033 malloc_state_machine::
2034 maybe_complain_about_deref_before_check (sm_context *sm_ctxt,
2035 const supernode *node,
2036 const gimple *stmt,
2037 const assumed_non_null_state *state,
2038 tree ptr) const
2039 {
2040 const region_model *model = sm_ctxt->get_old_region_model ();
2041 if (!model)
2042 return;
2043
2044 /* Don't complain if the current frame (where the check is occurring) is
2045 deeper than the frame in which the "not null" assumption was made.
2046 This suppress false positives for cases like:
2047
2048 void foo (struct s *p)
2049 {
2050 int val = s->some_field; // deref here
2051 shared_helper (p);
2052 }
2053
2054 where "shared_helper" has:
2055
2056 void shared_helper (struct s *p)
2057 {
2058 if (!p) // check here
2059 return;
2060 // etc
2061 }
2062
2063 since the check in "shared_helper" is OK. */
2064 const frame_region *checked_in_frame = model->get_current_frame ();
2065 const frame_region *assumed_nonnull_in_frame = state->m_frame;
2066 if (checked_in_frame->get_index () > assumed_nonnull_in_frame->get_index ())
2067 return;
2068
2069 tree diag_ptr = sm_ctxt->get_diagnostic_tree (ptr);
2070 sm_ctxt->warn
2071 (node, stmt, ptr,
2072 make_unique<deref_before_check> (*this, diag_ptr));
2073 sm_ctxt->set_next_state (stmt, ptr, m_stop);
2074 }
2075
2076 /* Handle a call to an allocator.
2077 RETURNS_NONNULL is true if CALL is to a fndecl known to have
2078 __attribute__((returns_nonnull)). */
2079
2080 void
2081 malloc_state_machine::on_allocator_call (sm_context *sm_ctxt,
2082 const gcall *call,
2083 const deallocator_set *deallocators,
2084 bool returns_nonnull) const
2085 {
2086 tree lhs = gimple_call_lhs (call);
2087 if (lhs)
2088 {
2089 if (sm_ctxt->get_state (call, lhs) == m_start)
2090 sm_ctxt->set_next_state (call, lhs,
2091 (returns_nonnull
2092 ? deallocators->m_nonnull
2093 : deallocators->m_unchecked));
2094 }
2095 else
2096 {
2097 /* TODO: report leak. */
2098 }
2099 }
2100
2101 /* Handle deallocations of non-heap pointers.
2102 non-heap -> stop, with warning. */
2103
2104 void
2105 malloc_state_machine::handle_free_of_non_heap (sm_context *sm_ctxt,
2106 const supernode *node,
2107 const gcall *call,
2108 tree arg,
2109 const deallocator *d) const
2110 {
2111 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2112 const region *freed_reg = NULL;
2113 if (const program_state *old_state = sm_ctxt->get_old_program_state ())
2114 {
2115 const region_model *old_model = old_state->m_region_model;
2116 const svalue *ptr_sval = old_model->get_rvalue (arg, NULL);
2117 freed_reg = old_model->deref_rvalue (ptr_sval, arg, NULL);
2118 }
2119 sm_ctxt->warn (node, call, arg,
2120 make_unique<free_of_non_heap>
2121 (*this, diag_arg, freed_reg, d->m_name));
2122 sm_ctxt->set_next_state (call, arg, m_stop);
2123 }
2124
2125 void
2126 malloc_state_machine::on_deallocator_call (sm_context *sm_ctxt,
2127 const supernode *node,
2128 const gcall *call,
2129 const deallocator *d,
2130 unsigned argno) const
2131 {
2132 if (argno >= gimple_call_num_args (call))
2133 return;
2134 tree arg = gimple_call_arg (call, argno);
2135
2136 state_t state = sm_ctxt->get_state (call, arg);
2137
2138 /* start/assumed_non_null/unchecked/nonnull -> freed. */
2139 if (state == m_start || assumed_non_null_p (state))
2140 sm_ctxt->set_next_state (call, arg, d->m_freed);
2141 else if (unchecked_p (state) || nonnull_p (state))
2142 {
2143 const allocation_state *astate = as_a_allocation_state (state);
2144 gcc_assert (astate->m_deallocators);
2145 if (!astate->m_deallocators->contains_p (d))
2146 {
2147 /* Wrong allocator. */
2148 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2149 sm_ctxt->warn (node, call, arg,
2150 make_unique<mismatching_deallocation>
2151 (*this, diag_arg,
2152 astate->m_deallocators,
2153 d));
2154 }
2155 sm_ctxt->set_next_state (call, arg, d->m_freed);
2156 }
2157
2158 /* Keep state "null" as-is, rather than transitioning to "freed";
2159 we don't want to complain about double-free of NULL. */
2160 else if (state == d->m_freed)
2161 {
2162 /* freed -> stop, with warning. */
2163 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2164 sm_ctxt->warn (node, call, arg,
2165 make_unique<double_free> (*this, diag_arg, d->m_name));
2166 sm_ctxt->set_next_state (call, arg, m_stop);
2167 }
2168 else if (state == m_non_heap)
2169 {
2170 /* non-heap -> stop, with warning. */
2171 handle_free_of_non_heap (sm_ctxt, node, call, arg, d);
2172 }
2173 }
2174
2175 /* Handle a call to "realloc".
2176 Check for free of non-heap or mismatching allocators,
2177 transitioning to the "stop" state for such cases.
2178
2179 Otherwise, kf_realloc::impl_call_post will later
2180 get called (which will handle other sm-state transitions
2181 when the state is bifurcated). */
2182
2183 void
2184 malloc_state_machine::on_realloc_call (sm_context *sm_ctxt,
2185 const supernode *node,
2186 const gcall *call) const
2187 {
2188 const unsigned argno = 0;
2189 const deallocator *d = &m_realloc;
2190
2191 tree arg = gimple_call_arg (call, argno);
2192
2193 state_t state = sm_ctxt->get_state (call, arg);
2194
2195 if (unchecked_p (state) || nonnull_p (state))
2196 {
2197 const allocation_state *astate = as_a_allocation_state (state);
2198 gcc_assert (astate->m_deallocators);
2199 if (!astate->m_deallocators->contains_p (&m_free.m_deallocator))
2200 {
2201 /* Wrong allocator. */
2202 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2203 sm_ctxt->warn (node, call, arg,
2204 make_unique<mismatching_deallocation>
2205 (*this, diag_arg,
2206 astate->m_deallocators, d));
2207 sm_ctxt->set_next_state (call, arg, m_stop);
2208 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
2209 path_ctxt->terminate_path ();
2210 }
2211 }
2212 else if (state == m_free.m_deallocator.m_freed)
2213 {
2214 /* freed -> stop, with warning. */
2215 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2216 sm_ctxt->warn (node, call, arg,
2217 make_unique<double_free> (*this, diag_arg, "free"));
2218 sm_ctxt->set_next_state (call, arg, m_stop);
2219 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
2220 path_ctxt->terminate_path ();
2221 }
2222 else if (state == m_non_heap)
2223 {
2224 /* non-heap -> stop, with warning. */
2225 handle_free_of_non_heap (sm_ctxt, node, call, arg, d);
2226 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
2227 path_ctxt->terminate_path ();
2228 }
2229 }
2230
2231 /* Implementation of state_machine::on_phi vfunc for malloc_state_machine. */
2232
2233 void
2234 malloc_state_machine::on_phi (sm_context *sm_ctxt,
2235 const supernode *node ATTRIBUTE_UNUSED,
2236 const gphi *phi,
2237 tree rhs) const
2238 {
2239 if (zerop (rhs))
2240 {
2241 tree lhs = gimple_phi_result (phi);
2242 on_zero_assignment (sm_ctxt, phi, lhs);
2243 }
2244 }
2245
2246 /* Implementation of state_machine::on_condition vfunc for malloc_state_machine.
2247 Potentially transition state 'unchecked' to 'nonnull' or to 'null'. */
2248
2249 void
2250 malloc_state_machine::on_condition (sm_context *sm_ctxt,
2251 const supernode *node ATTRIBUTE_UNUSED,
2252 const gimple *stmt,
2253 const svalue *lhs,
2254 enum tree_code op,
2255 const svalue *rhs) const
2256 {
2257 if (!rhs->all_zeroes_p ())
2258 return;
2259
2260 if (!any_pointer_p (lhs))
2261 return;
2262 if (!any_pointer_p (rhs))
2263 return;
2264
2265 if (op == NE_EXPR)
2266 {
2267 log ("got 'ARG != 0' match");
2268 state_t s = sm_ctxt->get_state (stmt, lhs);
2269 if (unchecked_p (s))
2270 {
2271 const allocation_state *astate = as_a_allocation_state (s);
2272 sm_ctxt->set_next_state (stmt, lhs, astate->get_nonnull ());
2273 }
2274 }
2275 else if (op == EQ_EXPR)
2276 {
2277 log ("got 'ARG == 0' match");
2278 state_t s = sm_ctxt->get_state (stmt, lhs);
2279 if (unchecked_p (s))
2280 sm_ctxt->set_next_state (stmt, lhs, m_null);
2281 }
2282 }
2283
2284 /* Implementation of state_machine::on_pop_frame vfunc for malloc_state_machine.
2285 Clear any "assumed-non-null" state where the assumption happened in
2286 FRAME_REG. */
2287
2288 void
2289 malloc_state_machine::on_pop_frame (sm_state_map *smap,
2290 const frame_region *frame_reg) const
2291 {
2292 hash_set<const svalue *> svals_to_clear;
2293 for (auto kv : *smap)
2294 {
2295 const svalue *sval = kv.first;
2296 state_t state = kv.second.m_state;
2297 if (assumed_non_null_p (state))
2298 {
2299 const assumed_non_null_state *assumed_state
2300 = (const assumed_non_null_state *)state;
2301 if (frame_reg == assumed_state->m_frame)
2302 svals_to_clear.add (sval);
2303 }
2304 }
2305 for (auto sval : svals_to_clear)
2306 smap->clear_any_state (sval);
2307 }
2308
2309 /* Implementation of state_machine::can_purge_p vfunc for malloc_state_machine.
2310 Don't allow purging of pointers in state 'unchecked' or 'nonnull'
2311 (to avoid false leak reports). */
2312
2313 bool
2314 malloc_state_machine::can_purge_p (state_t s) const
2315 {
2316 enum resource_state rs = get_rs (s);
2317 return rs != RS_UNCHECKED && rs != RS_NONNULL;
2318 }
2319
2320 /* Implementation of state_machine::on_leak vfunc for malloc_state_machine
2321 (for complaining about leaks of pointers in state 'unchecked' and
2322 'nonnull'). */
2323
2324 std::unique_ptr<pending_diagnostic>
2325 malloc_state_machine::on_leak (tree var) const
2326 {
2327 return make_unique<malloc_leak> (*this, var);
2328 }
2329
2330 /* Implementation of state_machine::reset_when_passed_to_unknown_fn_p vfunc
2331 for malloc_state_machine. */
2332
2333 bool
2334 malloc_state_machine::reset_when_passed_to_unknown_fn_p (state_t s,
2335 bool is_mutable) const
2336 {
2337 /* An on-stack ptr doesn't stop being stack-allocated when passed to an
2338 unknown fn. */
2339 if (s == m_non_heap)
2340 return false;
2341
2342 /* Otherwise, pointers passed as non-const can be freed. */
2343 return is_mutable;
2344 }
2345
2346 /* Implementation of state_machine::maybe_get_merged_states_nonequal vfunc
2347 for malloc_state_machine.
2348
2349 Support discarding "assumed-non-null" states when merging with
2350 start state. */
2351
2352 state_machine::state_t
2353 malloc_state_machine::maybe_get_merged_states_nonequal (state_t state_a,
2354 state_t state_b) const
2355 {
2356 if (assumed_non_null_p (state_a) && state_b == m_start)
2357 return m_start;
2358 if (state_a == m_start && assumed_non_null_p (state_b))
2359 return m_start;
2360 return NULL;
2361 }
2362
2363 /* Return true if calls to FNDECL are known to not affect this sm-state. */
2364
2365 bool
2366 malloc_state_machine::unaffected_by_call_p (tree fndecl)
2367 {
2368 /* A set of functions that are known to not affect allocation
2369 status, even if we haven't fully modelled the rest of their
2370 behavior yet. */
2371 static const char * const funcnames[] = {
2372 /* This array must be kept sorted. */
2373 "strsep",
2374 };
2375 const size_t count = ARRAY_SIZE (funcnames);
2376 function_set fs (funcnames, count);
2377
2378 if (fs.contains_decl_p (fndecl))
2379 return true;
2380
2381 return false;
2382 }
2383
2384 /* Shared logic for handling GIMPLE_ASSIGNs and GIMPLE_PHIs that
2385 assign zero to LHS. */
2386
2387 void
2388 malloc_state_machine::on_zero_assignment (sm_context *sm_ctxt,
2389 const gimple *stmt,
2390 tree lhs) const
2391 {
2392 state_t s = sm_ctxt->get_state (stmt, lhs);
2393 enum resource_state rs = get_rs (s);
2394 if (rs == RS_START
2395 || rs == RS_UNCHECKED
2396 || rs == RS_NONNULL
2397 || rs == RS_FREED)
2398 sm_ctxt->set_next_state (stmt, lhs, m_null);
2399 }
2400
2401 /* Special-case hook for handling realloc, for the "success with move to
2402 a new buffer" case, marking OLD_PTR_SVAL as freed and NEW_PTR_SVAL as
2403 non-null.
2404
2405 This is similar to on_deallocator_call and on_allocator_call,
2406 but the checks happen in on_realloc_call, and by splitting the states. */
2407
2408 void
2409 malloc_state_machine::
2410 on_realloc_with_move (region_model *model,
2411 sm_state_map *smap,
2412 const svalue *old_ptr_sval,
2413 const svalue *new_ptr_sval,
2414 const extrinsic_state &ext_state) const
2415 {
2416 smap->set_state (model, old_ptr_sval,
2417 m_free.m_deallocator.m_freed,
2418 NULL, ext_state);
2419
2420 smap->set_state (model, new_ptr_sval,
2421 m_free.m_nonnull,
2422 NULL, ext_state);
2423 }
2424
2425 } // anonymous namespace
2426
2427 /* Internal interface to this file. */
2428
2429 state_machine *
2430 make_malloc_state_machine (logger *logger)
2431 {
2432 return new malloc_state_machine (logger);
2433 }
2434
2435 /* Specialcase hook for handling realloc, for use by
2436 kf_realloc::impl_call_post::success_with_move::update_model. */
2437
2438 void
2439 region_model::on_realloc_with_move (const call_details &cd,
2440 const svalue *old_ptr_sval,
2441 const svalue *new_ptr_sval)
2442 {
2443 region_model_context *ctxt = cd.get_ctxt ();
2444 if (!ctxt)
2445 return;
2446 const extrinsic_state *ext_state = ctxt->get_ext_state ();
2447 if (!ext_state)
2448 return;
2449
2450 sm_state_map *smap;
2451 const state_machine *sm;
2452 unsigned sm_idx;
2453 if (!ctxt->get_malloc_map (&smap, &sm, &sm_idx))
2454 return;
2455
2456 gcc_assert (smap);
2457 gcc_assert (sm);
2458
2459 const malloc_state_machine &malloc_sm
2460 = (const malloc_state_machine &)*sm;
2461
2462 malloc_sm.on_realloc_with_move (this,
2463 smap,
2464 old_ptr_sval,
2465 new_ptr_sval,
2466 *ext_state);
2467 }
2468
2469 } // namespace ana
2470
2471 #endif /* #if ENABLE_ANALYZER */