]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/analyzer/sm-malloc.cc
Update copyright years.
[thirdparty/gcc.git] / gcc / analyzer / sm-malloc.cc
1 /* A state machine for detecting misuses of the malloc/free API.
2 Copyright (C) 2019-2024 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #define INCLUDE_MEMORY
23 #include "system.h"
24 #include "coretypes.h"
25 #include "make-unique.h"
26 #include "tree.h"
27 #include "function.h"
28 #include "basic-block.h"
29 #include "gimple.h"
30 #include "options.h"
31 #include "bitmap.h"
32 #include "diagnostic-path.h"
33 #include "analyzer/analyzer.h"
34 #include "diagnostic-event-id.h"
35 #include "analyzer/analyzer-logging.h"
36 #include "analyzer/sm.h"
37 #include "analyzer/pending-diagnostic.h"
38 #include "analyzer/call-string.h"
39 #include "analyzer/program-point.h"
40 #include "analyzer/store.h"
41 #include "analyzer/region-model.h"
42 #include "analyzer/call-details.h"
43 #include "stringpool.h"
44 #include "attribs.h"
45 #include "analyzer/function-set.h"
46 #include "analyzer/program-state.h"
47 #include "analyzer/checker-event.h"
48 #include "analyzer/exploded-graph.h"
49
50 #if ENABLE_ANALYZER
51
52 namespace ana {
53
54 namespace {
55
56 /* This state machine and its various support classes track allocations
57 and deallocations.
58
59 It has a few standard allocation/deallocation pairs (e.g. new/delete),
60 and also supports user-defined ones via
61 __attribute__ ((malloc(DEALLOCATOR))).
62
63 There can be more than one valid deallocator for a given allocator,
64 for example:
65 __attribute__ ((malloc (fclose)))
66 __attribute__ ((malloc (freopen, 3)))
67 FILE* fopen (const char*, const char*);
68 A deallocator_set represents a particular set of valid deallocators.
69
70 We track the expected deallocator_set for a value, but not the allocation
71 function - there could be more than one allocator per deallocator_set.
72 For example, there could be dozens of allocators for "free" beyond just
73 malloc e.g. calloc, xstrdup, etc. We don't want to explode the number
74 of states by tracking individual allocators in the exploded graph;
75 we merely want to track "this value expects to have 'free' called on it".
76 Perhaps we can reconstruct which allocator was used later, when emitting
77 the path, if it's necessary for precision of wording of diagnostics. */
78
79 class deallocator;
80 class deallocator_set;
81 class malloc_state_machine;
82
83 /* An enum for discriminating between different kinds of allocation_state. */
84
85 enum resource_state
86 {
87 /* States that are independent of allocator/deallocator. */
88
89 /* The start state. */
90 RS_START,
91
92 /* State for a pointer that's been unconditionally dereferenced. */
93 RS_ASSUMED_NON_NULL,
94
95 /* State for a pointer that's known to be NULL. */
96 RS_NULL,
97
98 /* State for a pointer that's known to not be on the heap (e.g. to a local
99 or global). */
100 RS_NON_HEAP,
101
102 /* Stop state, for pointers we don't want to track any more. */
103 RS_STOP,
104
105 /* States that relate to a specific deallocator_set. */
106
107 /* State for a pointer returned from an allocator that hasn't
108 been checked for NULL.
109 It could be a pointer to heap-allocated memory, or could be NULL. */
110 RS_UNCHECKED,
111
112 /* State for a pointer returned from an allocator,
113 known to be non-NULL. */
114 RS_NONNULL,
115
116 /* State for a pointer passed to a deallocator. */
117 RS_FREED
118 };
119
120 /* Custom state subclass, which can optionally refer to an a
121 deallocator_set. */
122
123 struct allocation_state : public state_machine::state
124 {
125 allocation_state (const char *name, unsigned id,
126 enum resource_state rs,
127 const deallocator_set *deallocators,
128 const deallocator *deallocator)
129 : state (name, id), m_rs (rs),
130 m_deallocators (deallocators),
131 m_deallocator (deallocator)
132 {}
133
134 void dump_to_pp (pretty_printer *pp) const override;
135
136 const allocation_state *get_nonnull () const;
137
138 enum resource_state m_rs;
139 const deallocator_set *m_deallocators;
140 const deallocator *m_deallocator;
141 };
142
143 /* Custom state subclass, for the "assumed-non-null" state
144 where the assumption happens in a particular frame. */
145
146 struct assumed_non_null_state : public allocation_state
147 {
148 assumed_non_null_state (const char *name, unsigned id,
149 const frame_region *frame)
150 : allocation_state (name, id, RS_ASSUMED_NON_NULL,
151 NULL, NULL),
152 m_frame (frame)
153 {
154 gcc_assert (m_frame);
155 }
156
157 void dump_to_pp (pretty_printer *pp) const final override;
158
159 const frame_region *m_frame;
160 };
161
162 /* An enum for choosing which wording to use in various diagnostics
163 when describing deallocations. */
164
165 enum wording
166 {
167 WORDING_FREED,
168 WORDING_DELETED,
169 WORDING_DEALLOCATED,
170 WORDING_REALLOCATED
171 };
172
173 /* Base class representing a deallocation function,
174 either a built-in one we know about, or one exposed via
175 __attribute__((malloc(DEALLOCATOR))). */
176
177 struct deallocator
178 {
179 hashval_t hash () const;
180 void dump_to_pp (pretty_printer *pp) const;
181 static int cmp (const deallocator *a, const deallocator *b);
182 static int cmp_ptr_ptr (const void *, const void *);
183
184 /* Name to use in diagnostics. */
185 const char *m_name;
186
187 /* Which wording to use in diagnostics. */
188 enum wording m_wording;
189
190 /* State for a value passed to one of the deallocators. */
191 state_machine::state_t m_freed;
192
193 protected:
194 deallocator (malloc_state_machine *sm,
195 const char *name,
196 enum wording wording);
197 };
198
199 /* Subclass representing a predefined deallocator.
200 e.g. "delete []", without needing a specific FUNCTION_DECL
201 ahead of time. */
202
203 struct standard_deallocator : public deallocator
204 {
205 standard_deallocator (malloc_state_machine *sm,
206 const char *name,
207 enum wording wording);
208 };
209
210 /* Subclass representing a user-defined deallocator
211 via __attribute__((malloc(DEALLOCATOR))) given
212 a specific FUNCTION_DECL. */
213
214 struct custom_deallocator : public deallocator
215 {
216 custom_deallocator (malloc_state_machine *sm,
217 tree deallocator_fndecl,
218 enum wording wording)
219 : deallocator (sm, IDENTIFIER_POINTER (DECL_NAME (deallocator_fndecl)),
220 wording)
221 {
222 }
223 };
224
225 /* Base class representing a set of possible deallocators.
226 Often this will be just a single deallocator, but some
227 allocators have multiple valid deallocators (e.g. the result of
228 "fopen" can be closed by either "fclose" or "freopen"). */
229
230 struct deallocator_set
231 {
232 deallocator_set (malloc_state_machine *sm,
233 enum wording wording);
234 virtual ~deallocator_set () {}
235
236 virtual bool contains_p (const deallocator *d) const = 0;
237 virtual const deallocator *maybe_get_single () const = 0;
238 virtual void dump_to_pp (pretty_printer *pp) const = 0;
239 void dump () const;
240
241 /* Which wording to use in diagnostics. */
242 enum wording m_wording;
243
244 /* Pointers to states.
245 These states are owned by the state_machine base class. */
246
247 /* State for an unchecked result from an allocator using this set. */
248 state_machine::state_t m_unchecked;
249
250 /* State for a known non-NULL result from such an allocator. */
251 state_machine::state_t m_nonnull;
252 };
253
254 /* Subclass of deallocator_set representing a set of deallocators
255 defined by one or more __attribute__((malloc(DEALLOCATOR))). */
256
257 struct custom_deallocator_set : public deallocator_set
258 {
259 typedef const auto_vec <const deallocator *> *key_t;
260
261 custom_deallocator_set (malloc_state_machine *sm,
262 const auto_vec <const deallocator *> *vec,
263 //const char *name,
264 //const char *dealloc_funcname,
265 //unsigned arg_idx,
266 enum wording wording);
267
268 bool contains_p (const deallocator *d) const final override;
269 const deallocator *maybe_get_single () const final override;
270 void dump_to_pp (pretty_printer *pp) const final override;
271
272 auto_vec <const deallocator *> m_deallocator_vec;
273 };
274
275 /* Subclass of deallocator_set representing a set of deallocators
276 with a single standard_deallocator, e.g. "delete []". */
277
278 struct standard_deallocator_set : public deallocator_set
279 {
280 standard_deallocator_set (malloc_state_machine *sm,
281 const char *name,
282 enum wording wording);
283
284 bool contains_p (const deallocator *d) const final override;
285 const deallocator *maybe_get_single () const final override;
286 void dump_to_pp (pretty_printer *pp) const final override;
287
288 standard_deallocator m_deallocator;
289 };
290
291 /* Traits class for ensuring uniqueness of deallocator_sets within
292 malloc_state_machine. */
293
294 struct deallocator_set_map_traits
295 {
296 typedef custom_deallocator_set::key_t key_type;
297 typedef custom_deallocator_set *value_type;
298 typedef custom_deallocator_set *compare_type;
299
300 static inline hashval_t hash (const key_type &k)
301 {
302 gcc_assert (k != NULL);
303 gcc_assert (k != reinterpret_cast<key_type> (1));
304
305 hashval_t result = 0;
306 unsigned i;
307 const deallocator *d;
308 FOR_EACH_VEC_ELT (*k, i, d)
309 result ^= d->hash ();
310 return result;
311 }
312 static inline bool equal_keys (const key_type &k1, const key_type &k2)
313 {
314 if (k1->length () != k2->length ())
315 return false;
316
317 for (unsigned i = 0; i < k1->length (); i++)
318 if ((*k1)[i] != (*k2)[i])
319 return false;
320
321 return true;
322 }
323 template <typename T>
324 static inline void remove (T &)
325 {
326 /* empty; the nodes are handled elsewhere. */
327 }
328 template <typename T>
329 static inline void mark_deleted (T &entry)
330 {
331 entry.m_key = reinterpret_cast<key_type> (1);
332 }
333 template <typename T>
334 static inline void mark_empty (T &entry)
335 {
336 entry.m_key = NULL;
337 }
338 template <typename T>
339 static inline bool is_deleted (const T &entry)
340 {
341 return entry.m_key == reinterpret_cast<key_type> (1);
342 }
343 template <typename T>
344 static inline bool is_empty (const T &entry)
345 {
346 return entry.m_key == NULL;
347 }
348 static const bool empty_zero_p = false;
349 };
350
351 /* A state machine for detecting misuses of the malloc/free API.
352
353 See sm-malloc.dot for an overview (keep this in-sync with that file). */
354
355 class malloc_state_machine : public state_machine
356 {
357 public:
358 typedef allocation_state custom_data_t;
359
360 malloc_state_machine (logger *logger);
361 ~malloc_state_machine ();
362
363 state_t
364 add_state (const char *name, enum resource_state rs,
365 const deallocator_set *deallocators,
366 const deallocator *deallocator);
367
368 bool inherited_state_p () const final override { return false; }
369
370 state_machine::state_t
371 get_default_state (const svalue *sval) const final override
372 {
373 if (tree cst = sval->maybe_get_constant ())
374 {
375 if (zerop (cst))
376 return m_null;
377 }
378 if (const region_svalue *ptr = sval->dyn_cast_region_svalue ())
379 {
380 const region *reg = ptr->get_pointee ();
381 switch (reg->get_memory_space ())
382 {
383 default:
384 break;
385 case MEMSPACE_CODE:
386 case MEMSPACE_GLOBALS:
387 case MEMSPACE_STACK:
388 case MEMSPACE_READONLY_DATA:
389 return m_non_heap;
390 }
391 }
392 return m_start;
393 }
394
395 bool on_stmt (sm_context *sm_ctxt,
396 const supernode *node,
397 const gimple *stmt) const final override;
398
399 void on_phi (sm_context *sm_ctxt,
400 const supernode *node,
401 const gphi *phi,
402 tree rhs) const final override;
403
404 void on_condition (sm_context *sm_ctxt,
405 const supernode *node,
406 const gimple *stmt,
407 const svalue *lhs,
408 enum tree_code op,
409 const svalue *rhs) const final override;
410
411 void on_pop_frame (sm_state_map *smap,
412 const frame_region *) const final override;
413
414 bool can_purge_p (state_t s) const final override;
415 std::unique_ptr<pending_diagnostic> on_leak (tree var) const final override;
416
417 bool reset_when_passed_to_unknown_fn_p (state_t s,
418 bool is_mutable) const final override;
419
420 state_t
421 maybe_get_merged_states_nonequal (state_t state_a,
422 state_t state_b) const final override;
423
424 static bool unaffected_by_call_p (tree fndecl);
425
426 void maybe_assume_non_null (sm_context *sm_ctxt,
427 tree ptr,
428 const gimple *stmt) const;
429
430 void on_realloc_with_move (region_model *model,
431 sm_state_map *smap,
432 const svalue *old_ptr_sval,
433 const svalue *new_ptr_sval,
434 const extrinsic_state &ext_state) const;
435
436 void transition_ptr_sval_non_null (region_model *model,
437 sm_state_map *smap,
438 const svalue *new_ptr_sval,
439 const extrinsic_state &ext_state) const;
440
441 standard_deallocator_set m_free;
442 standard_deallocator_set m_scalar_delete;
443 standard_deallocator_set m_vector_delete;
444
445 standard_deallocator m_realloc;
446
447 /* States that are independent of api. */
448
449 /* States for a pointer that's been unconditionally dereferenced
450 in a particular stack frame. */
451 hash_map<const frame_region *, state_t> m_assumed_non_null;
452
453 /* State for a pointer that's known to be NULL. */
454 state_t m_null;
455
456 /* State for a pointer that's known to not be on the heap (e.g. to a local
457 or global). */
458 state_t m_non_heap; // TODO: or should this be a different state machine?
459 // or do we need child values etc?
460
461 /* Stop state, for pointers we don't want to track any more. */
462 state_t m_stop;
463
464 private:
465 const custom_deallocator_set *
466 get_or_create_custom_deallocator_set (tree allocator_fndecl);
467 custom_deallocator_set *
468 maybe_create_custom_deallocator_set (tree allocator_fndecl);
469 const deallocator *
470 get_or_create_deallocator (tree deallocator_fndecl);
471
472 state_t
473 get_or_create_assumed_non_null_state_for_frame (const frame_region *frame);
474
475 void
476 maybe_complain_about_deref_before_check (sm_context *sm_ctxt,
477 const supernode *node,
478 const gimple *stmt,
479 const assumed_non_null_state *,
480 tree ptr) const;
481
482 void on_allocator_call (sm_context *sm_ctxt,
483 const gcall *call,
484 const deallocator_set *deallocators,
485 bool returns_nonnull = false) const;
486 void handle_free_of_non_heap (sm_context *sm_ctxt,
487 const supernode *node,
488 const gcall *call,
489 tree arg,
490 const deallocator *d) const;
491 void on_deallocator_call (sm_context *sm_ctxt,
492 const supernode *node,
493 const gcall *call,
494 const deallocator *d,
495 unsigned argno) const;
496 void on_realloc_call (sm_context *sm_ctxt,
497 const supernode *node,
498 const gcall *call) const;
499 void on_zero_assignment (sm_context *sm_ctxt,
500 const gimple *stmt,
501 tree lhs) const;
502
503 /* A map for consolidating deallocators so that they are
504 unique per deallocator FUNCTION_DECL. */
505 typedef hash_map<tree, deallocator *> deallocator_map_t;
506 deallocator_map_t m_deallocator_map;
507
508 /* Memoized lookups from FUNCTION_DECL to custom_deallocator_set *. */
509 typedef hash_map<tree, custom_deallocator_set *> deallocator_set_cache_t;
510 deallocator_set_cache_t m_custom_deallocator_set_cache;
511
512 /* A map for consolidating custom_deallocator_set instances. */
513 typedef hash_map<custom_deallocator_set::key_t,
514 custom_deallocator_set *,
515 deallocator_set_map_traits> custom_deallocator_set_map_t;
516 custom_deallocator_set_map_t m_custom_deallocator_set_map;
517
518 /* Record of dynamically-allocated objects, for cleanup. */
519 auto_vec <custom_deallocator_set *> m_dynamic_sets;
520 auto_vec <custom_deallocator *> m_dynamic_deallocators;
521 };
522
523 /* struct deallocator. */
524
525 deallocator::deallocator (malloc_state_machine *sm,
526 const char *name,
527 enum wording wording)
528 : m_name (name),
529 m_wording (wording),
530 m_freed (sm->add_state ("freed", RS_FREED, NULL, this))
531 {
532 }
533
534 hashval_t
535 deallocator::hash () const
536 {
537 return (hashval_t)m_freed->get_id ();
538 }
539
540 void
541 deallocator::dump_to_pp (pretty_printer *pp) const
542 {
543 pp_printf (pp, "%qs", m_name);
544 }
545
546 int
547 deallocator::cmp (const deallocator *a, const deallocator *b)
548 {
549 return (int)a->m_freed->get_id () - (int)b->m_freed->get_id ();
550 }
551
552 int
553 deallocator::cmp_ptr_ptr (const void *a, const void *b)
554 {
555 return cmp (*(const deallocator * const *)a,
556 *(const deallocator * const *)b);
557 }
558
559
560 /* struct standard_deallocator : public deallocator. */
561
562 standard_deallocator::standard_deallocator (malloc_state_machine *sm,
563 const char *name,
564 enum wording wording)
565 : deallocator (sm, name, wording)
566 {
567 }
568
569 /* struct deallocator_set. */
570
571 deallocator_set::deallocator_set (malloc_state_machine *sm,
572 enum wording wording)
573 : m_wording (wording),
574 m_unchecked (sm->add_state ("unchecked", RS_UNCHECKED, this, NULL)),
575 m_nonnull (sm->add_state ("nonnull", RS_NONNULL, this, NULL))
576 {
577 }
578
579 /* Dump a description of this deallocator_set to stderr. */
580
581 DEBUG_FUNCTION void
582 deallocator_set::dump () const
583 {
584 pretty_printer pp;
585 pp_show_color (&pp) = pp_show_color (global_dc->printer);
586 pp.buffer->stream = stderr;
587 dump_to_pp (&pp);
588 pp_newline (&pp);
589 pp_flush (&pp);
590 }
591
592 /* struct custom_deallocator_set : public deallocator_set. */
593
594 custom_deallocator_set::
595 custom_deallocator_set (malloc_state_machine *sm,
596 const auto_vec <const deallocator *> *vec,
597 enum wording wording)
598 : deallocator_set (sm, wording),
599 m_deallocator_vec (vec->length ())
600 {
601 unsigned i;
602 const deallocator *d;
603 FOR_EACH_VEC_ELT (*vec, i, d)
604 m_deallocator_vec.safe_push (d);
605 }
606
607 bool
608 custom_deallocator_set::contains_p (const deallocator *d) const
609 {
610 unsigned i;
611 const deallocator *cd;
612 FOR_EACH_VEC_ELT (m_deallocator_vec, i, cd)
613 if (cd == d)
614 return true;
615 return false;
616 }
617
618 const deallocator *
619 custom_deallocator_set::maybe_get_single () const
620 {
621 if (m_deallocator_vec.length () == 1)
622 return m_deallocator_vec[0];
623 return NULL;
624 }
625
626 void
627 custom_deallocator_set::dump_to_pp (pretty_printer *pp) const
628 {
629 pp_character (pp, '{');
630 unsigned i;
631 const deallocator *d;
632 FOR_EACH_VEC_ELT (m_deallocator_vec, i, d)
633 {
634 if (i > 0)
635 pp_string (pp, ", ");
636 d->dump_to_pp (pp);
637 }
638 pp_character (pp, '}');
639 }
640
641 /* struct standard_deallocator_set : public deallocator_set. */
642
643 standard_deallocator_set::standard_deallocator_set (malloc_state_machine *sm,
644 const char *name,
645 enum wording wording)
646 : deallocator_set (sm, wording),
647 m_deallocator (sm, name, wording)
648 {
649 }
650
651 bool
652 standard_deallocator_set::contains_p (const deallocator *d) const
653 {
654 return d == &m_deallocator;
655 }
656
657 const deallocator *
658 standard_deallocator_set::maybe_get_single () const
659 {
660 return &m_deallocator;
661 }
662
663 void
664 standard_deallocator_set::dump_to_pp (pretty_printer *pp) const
665 {
666 pp_character (pp, '{');
667 pp_string (pp, m_deallocator.m_name);
668 pp_character (pp, '}');
669 }
670
671 /* Return STATE cast to the custom state subclass, or NULL for the start state.
672 Everything should be an allocation_state apart from the start state. */
673
674 static const allocation_state *
675 dyn_cast_allocation_state (state_machine::state_t state)
676 {
677 if (state->get_id () == 0)
678 return NULL;
679 return static_cast <const allocation_state *> (state);
680 }
681
682 /* Return STATE cast to the custom state subclass, for a state that is
683 already known to not be the start state . */
684
685 static const allocation_state *
686 as_a_allocation_state (state_machine::state_t state)
687 {
688 gcc_assert (state->get_id () != 0);
689 return static_cast <const allocation_state *> (state);
690 }
691
692 /* Get the resource_state for STATE. */
693
694 static enum resource_state
695 get_rs (state_machine::state_t state)
696 {
697 if (const allocation_state *astate = dyn_cast_allocation_state (state))
698 return astate->m_rs;
699 else
700 return RS_START;
701 }
702
703 /* Return true if STATE is the start state. */
704
705 static bool
706 start_p (state_machine::state_t state)
707 {
708 return get_rs (state) == RS_START;
709 }
710
711 /* Return true if STATE is an unchecked result from an allocator. */
712
713 static bool
714 unchecked_p (state_machine::state_t state)
715 {
716 return get_rs (state) == RS_UNCHECKED;
717 }
718
719 /* Return true if STATE is a non-null result from an allocator. */
720
721 static bool
722 nonnull_p (state_machine::state_t state)
723 {
724 return get_rs (state) == RS_NONNULL;
725 }
726
727 /* Return true if STATE is a value that has been passed to a deallocator. */
728
729 static bool
730 freed_p (state_machine::state_t state)
731 {
732 return get_rs (state) == RS_FREED;
733 }
734
735 /* Return true if STATE is a value that has been assumed to be non-NULL. */
736
737 static bool
738 assumed_non_null_p (state_machine::state_t state)
739 {
740 return get_rs (state) == RS_ASSUMED_NON_NULL;
741 }
742
743 /* Class for diagnostics relating to malloc_state_machine. */
744
745 class malloc_diagnostic : public pending_diagnostic
746 {
747 public:
748 malloc_diagnostic (const malloc_state_machine &sm, tree arg)
749 : m_sm (sm), m_arg (arg)
750 {}
751
752 bool subclass_equal_p (const pending_diagnostic &base_other) const override
753 {
754 return same_tree_p (m_arg, ((const malloc_diagnostic &)base_other).m_arg);
755 }
756
757 label_text describe_state_change (const evdesc::state_change &change)
758 override
759 {
760 if (change.m_old_state == m_sm.get_start_state ()
761 && (unchecked_p (change.m_new_state) || nonnull_p (change.m_new_state)))
762 // TODO: verify that it's the allocation stmt, not a copy
763 return label_text::borrow ("allocated here");
764 if (unchecked_p (change.m_old_state)
765 && nonnull_p (change.m_new_state))
766 {
767 if (change.m_expr)
768 return change.formatted_print ("assuming %qE is non-NULL",
769 change.m_expr);
770 else
771 return change.formatted_print ("assuming %qs is non-NULL",
772 "<unknown>");
773 }
774 if (change.m_new_state == m_sm.m_null)
775 {
776 if (unchecked_p (change.m_old_state))
777 {
778 if (change.m_expr)
779 return change.formatted_print ("assuming %qE is NULL",
780 change.m_expr);
781 else
782 return change.formatted_print ("assuming %qs is NULL",
783 "<unknown>");
784 }
785 else
786 {
787 if (change.m_expr)
788 return change.formatted_print ("%qE is NULL",
789 change.m_expr);
790 else
791 return change.formatted_print ("%qs is NULL",
792 "<unknown>");
793 }
794 }
795
796 return label_text ();
797 }
798
799 diagnostic_event::meaning
800 get_meaning_for_state_change (const evdesc::state_change &change)
801 const final override
802 {
803 if (change.m_old_state == m_sm.get_start_state ()
804 && unchecked_p (change.m_new_state))
805 return diagnostic_event::meaning (diagnostic_event::VERB_acquire,
806 diagnostic_event::NOUN_memory);
807 if (freed_p (change.m_new_state))
808 return diagnostic_event::meaning (diagnostic_event::VERB_release,
809 diagnostic_event::NOUN_memory);
810 return diagnostic_event::meaning ();
811 }
812
813 protected:
814 const malloc_state_machine &m_sm;
815 tree m_arg;
816 };
817
818 /* Concrete subclass for reporting mismatching allocator/deallocator
819 diagnostics. */
820
821 class mismatching_deallocation : public malloc_diagnostic
822 {
823 public:
824 mismatching_deallocation (const malloc_state_machine &sm, tree arg,
825 const deallocator_set *expected_deallocators,
826 const deallocator *actual_dealloc)
827 : malloc_diagnostic (sm, arg),
828 m_expected_deallocators (expected_deallocators),
829 m_actual_dealloc (actual_dealloc)
830 {}
831
832 const char *get_kind () const final override
833 {
834 return "mismatching_deallocation";
835 }
836
837 int get_controlling_option () const final override
838 {
839 return OPT_Wanalyzer_mismatching_deallocation;
840 }
841
842 bool emit (diagnostic_emission_context &ctxt) final override
843 {
844 auto_diagnostic_group d;
845 ctxt.add_cwe (762); /* CWE-762: Mismatched Memory Management Routines. */
846 if (const deallocator *expected_dealloc
847 = m_expected_deallocators->maybe_get_single ())
848 return ctxt.warn ("%qE should have been deallocated with %qs"
849 " but was deallocated with %qs",
850 m_arg, expected_dealloc->m_name,
851 m_actual_dealloc->m_name);
852 else
853 return ctxt.warn ("%qs called on %qE returned from a mismatched"
854 " allocation function",
855 m_actual_dealloc->m_name, m_arg);
856 }
857
858 label_text describe_state_change (const evdesc::state_change &change)
859 final override
860 {
861 if (unchecked_p (change.m_new_state))
862 {
863 m_alloc_event = change.m_event_id;
864 if (const deallocator *expected_dealloc
865 = m_expected_deallocators->maybe_get_single ())
866 return change.formatted_print ("allocated here"
867 " (expects deallocation with %qs)",
868 expected_dealloc->m_name);
869 else
870 return change.formatted_print ("allocated here");
871 }
872 return malloc_diagnostic::describe_state_change (change);
873 }
874
875 label_text describe_final_event (const evdesc::final_event &ev) final override
876 {
877 if (m_alloc_event.known_p ())
878 {
879 if (const deallocator *expected_dealloc
880 = m_expected_deallocators->maybe_get_single ())
881 return ev.formatted_print
882 ("deallocated with %qs here;"
883 " allocation at %@ expects deallocation with %qs",
884 m_actual_dealloc->m_name, &m_alloc_event,
885 expected_dealloc->m_name);
886 else
887 return ev.formatted_print
888 ("deallocated with %qs here;"
889 " allocated at %@",
890 m_actual_dealloc->m_name, &m_alloc_event);
891 }
892 return ev.formatted_print ("deallocated with %qs here",
893 m_actual_dealloc->m_name);
894 }
895
896 private:
897 diagnostic_event_id_t m_alloc_event;
898 const deallocator_set *m_expected_deallocators;
899 const deallocator *m_actual_dealloc;
900 };
901
902 /* Concrete subclass for reporting double-free diagnostics. */
903
904 class double_free : public malloc_diagnostic
905 {
906 public:
907 double_free (const malloc_state_machine &sm, tree arg, const char *funcname)
908 : malloc_diagnostic (sm, arg), m_funcname (funcname)
909 {}
910
911 const char *get_kind () const final override { return "double_free"; }
912
913 int get_controlling_option () const final override
914 {
915 return OPT_Wanalyzer_double_free;
916 }
917
918 bool emit (diagnostic_emission_context &ctxt) final override
919 {
920 auto_diagnostic_group d;
921 ctxt.add_cwe (415); /* CWE-415: Double Free. */
922 return ctxt.warn ("double-%qs of %qE", m_funcname, m_arg);
923 }
924
925 label_text describe_state_change (const evdesc::state_change &change)
926 final override
927 {
928 if (freed_p (change.m_new_state))
929 {
930 m_first_free_event = change.m_event_id;
931 return change.formatted_print ("first %qs here", m_funcname);
932 }
933 return malloc_diagnostic::describe_state_change (change);
934 }
935
936 label_text describe_call_with_state (const evdesc::call_with_state &info)
937 final override
938 {
939 if (freed_p (info.m_state))
940 return info.formatted_print
941 ("passing freed pointer %qE in call to %qE from %qE",
942 info.m_expr, info.m_callee_fndecl, info.m_caller_fndecl);
943 return label_text ();
944 }
945
946 label_text describe_final_event (const evdesc::final_event &ev) final override
947 {
948 if (m_first_free_event.known_p ())
949 return ev.formatted_print ("second %qs here; first %qs was at %@",
950 m_funcname, m_funcname,
951 &m_first_free_event);
952 return ev.formatted_print ("second %qs here", m_funcname);
953 }
954
955 private:
956 diagnostic_event_id_t m_first_free_event;
957 const char *m_funcname;
958 };
959
960 /* Abstract subclass for describing possible bad uses of NULL.
961 Responsible for describing the call that could return NULL. */
962
963 class possible_null : public malloc_diagnostic
964 {
965 public:
966 possible_null (const malloc_state_machine &sm, tree arg)
967 : malloc_diagnostic (sm, arg)
968 {}
969
970 label_text describe_state_change (const evdesc::state_change &change)
971 final override
972 {
973 if (change.m_old_state == m_sm.get_start_state ()
974 && unchecked_p (change.m_new_state))
975 {
976 m_origin_of_unchecked_event = change.m_event_id;
977 return label_text::borrow ("this call could return NULL");
978 }
979 return malloc_diagnostic::describe_state_change (change);
980 }
981
982 label_text describe_return_of_state (const evdesc::return_of_state &info)
983 final override
984 {
985 if (unchecked_p (info.m_state))
986 return info.formatted_print ("possible return of NULL to %qE from %qE",
987 info.m_caller_fndecl, info.m_callee_fndecl);
988 return label_text ();
989 }
990
991 protected:
992 diagnostic_event_id_t m_origin_of_unchecked_event;
993 };
994
995 /* Concrete subclass for describing dereference of a possible NULL
996 value. */
997
998 class possible_null_deref : public possible_null
999 {
1000 public:
1001 possible_null_deref (const malloc_state_machine &sm, tree arg)
1002 : possible_null (sm, arg)
1003 {}
1004
1005 const char *get_kind () const final override { return "possible_null_deref"; }
1006
1007 int get_controlling_option () const final override
1008 {
1009 return OPT_Wanalyzer_possible_null_dereference;
1010 }
1011
1012 bool emit (diagnostic_emission_context &ctxt) final override
1013 {
1014 /* CWE-690: Unchecked Return Value to NULL Pointer Dereference. */
1015 ctxt.add_cwe (690);
1016 return ctxt.warn ("dereference of possibly-NULL %qE", m_arg);
1017 }
1018
1019 label_text describe_final_event (const evdesc::final_event &ev) final override
1020 {
1021 if (m_origin_of_unchecked_event.known_p ())
1022 return ev.formatted_print ("%qE could be NULL: unchecked value from %@",
1023 ev.m_expr,
1024 &m_origin_of_unchecked_event);
1025 else
1026 return ev.formatted_print ("%qE could be NULL", ev.m_expr);
1027 }
1028
1029 };
1030
1031 /* Return true if FNDECL is a C++ method. */
1032
1033 static bool
1034 method_p (tree fndecl)
1035 {
1036 return TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE;
1037 }
1038
1039 /* Return a 1-based description of ARG_IDX (0-based) of FNDECL.
1040 Compare with %P in the C++ FE (implemented in cp/error.cc: parm_to_string
1041 as called from cp_printer). */
1042
1043 static label_text
1044 describe_argument_index (tree fndecl, int arg_idx)
1045 {
1046 if (method_p (fndecl))
1047 if (arg_idx == 0)
1048 return label_text::borrow ("'this'");
1049 pretty_printer pp;
1050 pp_printf (&pp, "%u", arg_idx + 1 - method_p (fndecl));
1051 return label_text::take (xstrdup (pp_formatted_text (&pp)));
1052 }
1053
1054 /* Subroutine for use by possible_null_arg::emit and null_arg::emit.
1055 Issue a note informing that the pertinent argument must be non-NULL. */
1056
1057 static void
1058 inform_nonnull_attribute (tree fndecl, int arg_idx)
1059 {
1060 label_text arg_desc = describe_argument_index (fndecl, arg_idx);
1061 inform (DECL_SOURCE_LOCATION (fndecl),
1062 "argument %s of %qD must be non-null",
1063 arg_desc.get (), fndecl);
1064 /* Ideally we would use the location of the parm and underline the
1065 attribute also - but we don't have the location_t values at this point
1066 in the middle-end.
1067 For reference, the C and C++ FEs have get_fndecl_argument_location. */
1068 }
1069
1070 /* Concrete subclass for describing passing a possibly-NULL value to a
1071 function marked with __attribute__((nonnull)). */
1072
1073 class possible_null_arg : public possible_null
1074 {
1075 public:
1076 possible_null_arg (const malloc_state_machine &sm, tree arg,
1077 tree fndecl, int arg_idx)
1078 : possible_null (sm, arg),
1079 m_fndecl (fndecl), m_arg_idx (arg_idx)
1080 {}
1081
1082 const char *get_kind () const final override { return "possible_null_arg"; }
1083
1084 bool subclass_equal_p (const pending_diagnostic &base_other)
1085 const final override
1086 {
1087 const possible_null_arg &sub_other
1088 = (const possible_null_arg &)base_other;
1089 return (same_tree_p (m_arg, sub_other.m_arg)
1090 && m_fndecl == sub_other.m_fndecl
1091 && m_arg_idx == sub_other.m_arg_idx);
1092 }
1093
1094 int get_controlling_option () const final override
1095 {
1096 return OPT_Wanalyzer_possible_null_argument;
1097 }
1098
1099 bool emit (diagnostic_emission_context &ctxt) final override
1100 {
1101 /* CWE-690: Unchecked Return Value to NULL Pointer Dereference. */
1102 auto_diagnostic_group d;
1103 ctxt.add_cwe (690);
1104 bool warned
1105 = ctxt.warn ("use of possibly-NULL %qE where non-null expected",
1106 m_arg);
1107 if (warned)
1108 inform_nonnull_attribute (m_fndecl, m_arg_idx);
1109 return warned;
1110 }
1111
1112 label_text describe_final_event (const evdesc::final_event &ev) final override
1113 {
1114 label_text arg_desc = describe_argument_index (m_fndecl, m_arg_idx);
1115 label_text result;
1116 if (m_origin_of_unchecked_event.known_p ())
1117 result = ev.formatted_print ("argument %s (%qE) from %@ could be NULL"
1118 " where non-null expected",
1119 arg_desc.get (), ev.m_expr,
1120 &m_origin_of_unchecked_event);
1121 else
1122 result = ev.formatted_print ("argument %s (%qE) could be NULL"
1123 " where non-null expected",
1124 arg_desc.get (), ev.m_expr);
1125 return result;
1126 }
1127
1128 private:
1129 tree m_fndecl;
1130 int m_arg_idx;
1131 };
1132
1133 /* Concrete subclass for describing a dereference of a NULL value. */
1134
1135 class null_deref : public malloc_diagnostic
1136 {
1137 public:
1138 null_deref (const malloc_state_machine &sm, tree arg)
1139 : malloc_diagnostic (sm, arg) {}
1140
1141 const char *get_kind () const final override { return "null_deref"; }
1142
1143 int get_controlling_option () const final override
1144 {
1145 return OPT_Wanalyzer_null_dereference;
1146 }
1147
1148 bool terminate_path_p () const final override { return true; }
1149
1150 bool emit (diagnostic_emission_context &ctxt) final override
1151 {
1152 /* CWE-476: NULL Pointer Dereference. */
1153 ctxt.add_cwe (476);
1154 return ctxt.warn ("dereference of NULL %qE", m_arg);
1155 }
1156
1157 label_text describe_return_of_state (const evdesc::return_of_state &info)
1158 final override
1159 {
1160 if (info.m_state == m_sm.m_null)
1161 return info.formatted_print ("return of NULL to %qE from %qE",
1162 info.m_caller_fndecl, info.m_callee_fndecl);
1163 return label_text ();
1164 }
1165
1166 label_text describe_final_event (const evdesc::final_event &ev) final override
1167 {
1168 return ev.formatted_print ("dereference of NULL %qE", ev.m_expr);
1169 }
1170
1171 /* Implementation of pending_diagnostic::supercedes_p for
1172 null-deref.
1173
1174 We want null-deref to supercede use-of-unitialized-value,
1175 so that if we have these at the same stmt, we don't emit
1176 a use-of-uninitialized, just the null-deref. */
1177
1178 bool supercedes_p (const pending_diagnostic &other) const final override
1179 {
1180 if (other.use_of_uninit_p ())
1181 return true;
1182
1183 return false;
1184 }
1185 };
1186
1187 /* Concrete subclass for describing passing a NULL value to a
1188 function marked with __attribute__((nonnull)). */
1189
1190 class null_arg : public malloc_diagnostic
1191 {
1192 public:
1193 null_arg (const malloc_state_machine &sm, tree arg,
1194 tree fndecl, int arg_idx)
1195 : malloc_diagnostic (sm, arg),
1196 m_fndecl (fndecl), m_arg_idx (arg_idx)
1197 {}
1198
1199 const char *get_kind () const final override { return "null_arg"; }
1200
1201 bool subclass_equal_p (const pending_diagnostic &base_other)
1202 const final override
1203 {
1204 const null_arg &sub_other
1205 = (const null_arg &)base_other;
1206 return (same_tree_p (m_arg, sub_other.m_arg)
1207 && m_fndecl == sub_other.m_fndecl
1208 && m_arg_idx == sub_other.m_arg_idx);
1209 }
1210
1211 int get_controlling_option () const final override
1212 {
1213 return OPT_Wanalyzer_null_argument;
1214 }
1215
1216 bool terminate_path_p () const final override { return true; }
1217
1218 bool emit (diagnostic_emission_context &ctxt) final override
1219 {
1220 /* CWE-476: NULL Pointer Dereference. */
1221 auto_diagnostic_group d;
1222 ctxt.add_cwe (476);
1223
1224 bool warned;
1225 if (zerop (m_arg))
1226 warned = ctxt.warn ("use of NULL where non-null expected");
1227 else
1228 warned = ctxt.warn ("use of NULL %qE where non-null expected",
1229 m_arg);
1230 if (warned)
1231 inform_nonnull_attribute (m_fndecl, m_arg_idx);
1232 return warned;
1233 }
1234
1235 label_text describe_final_event (const evdesc::final_event &ev) final override
1236 {
1237 label_text arg_desc = describe_argument_index (m_fndecl, m_arg_idx);
1238 label_text result;
1239 if (zerop (ev.m_expr))
1240 result = ev.formatted_print ("argument %s NULL where non-null expected",
1241 arg_desc.get ());
1242 else
1243 result = ev.formatted_print ("argument %s (%qE) NULL"
1244 " where non-null expected",
1245 arg_desc.get (), ev.m_expr);
1246 return result;
1247 }
1248
1249 private:
1250 tree m_fndecl;
1251 int m_arg_idx;
1252 };
1253
1254 class use_after_free : public malloc_diagnostic
1255 {
1256 public:
1257 use_after_free (const malloc_state_machine &sm, tree arg,
1258 const deallocator *deallocator)
1259 : malloc_diagnostic (sm, arg),
1260 m_deallocator (deallocator)
1261 {
1262 gcc_assert (deallocator);
1263 }
1264
1265 const char *get_kind () const final override { return "use_after_free"; }
1266
1267 int get_controlling_option () const final override
1268 {
1269 return OPT_Wanalyzer_use_after_free;
1270 }
1271
1272 bool emit (diagnostic_emission_context &ctxt) final override
1273 {
1274 /* CWE-416: Use After Free. */
1275 ctxt.add_cwe (416);
1276 return ctxt.warn ("use after %<%s%> of %qE",
1277 m_deallocator->m_name, m_arg);
1278 }
1279
1280 label_text describe_state_change (const evdesc::state_change &change)
1281 final override
1282 {
1283 if (freed_p (change.m_new_state))
1284 {
1285 m_free_event = change.m_event_id;
1286 switch (m_deallocator->m_wording)
1287 {
1288 default:
1289 case WORDING_REALLOCATED:
1290 gcc_unreachable ();
1291 case WORDING_FREED:
1292 return label_text::borrow ("freed here");
1293 case WORDING_DELETED:
1294 return label_text::borrow ("deleted here");
1295 case WORDING_DEALLOCATED:
1296 return label_text::borrow ("deallocated here");
1297 }
1298 }
1299 return malloc_diagnostic::describe_state_change (change);
1300 }
1301
1302 label_text describe_final_event (const evdesc::final_event &ev) final override
1303 {
1304 const char *funcname = m_deallocator->m_name;
1305 if (m_free_event.known_p ())
1306 switch (m_deallocator->m_wording)
1307 {
1308 default:
1309 case WORDING_REALLOCATED:
1310 gcc_unreachable ();
1311 case WORDING_FREED:
1312 return ev.formatted_print ("use after %<%s%> of %qE; freed at %@",
1313 funcname, ev.m_expr, &m_free_event);
1314 case WORDING_DELETED:
1315 return ev.formatted_print ("use after %<%s%> of %qE; deleted at %@",
1316 funcname, ev.m_expr, &m_free_event);
1317 case WORDING_DEALLOCATED:
1318 return ev.formatted_print ("use after %<%s%> of %qE;"
1319 " deallocated at %@",
1320 funcname, ev.m_expr, &m_free_event);
1321 }
1322 else
1323 return ev.formatted_print ("use after %<%s%> of %qE",
1324 funcname, ev.m_expr);
1325 }
1326
1327 /* Implementation of pending_diagnostic::supercedes_p for
1328 use_after_free.
1329
1330 We want use-after-free to supercede use-of-unitialized-value,
1331 so that if we have these at the same stmt, we don't emit
1332 a use-of-uninitialized, just the use-after-free.
1333 (this is because we fully purge information about freed
1334 buffers when we free them to avoid state explosions, so
1335 that if they are accessed after the free, it looks like
1336 they are uninitialized). */
1337
1338 bool supercedes_p (const pending_diagnostic &other) const final override
1339 {
1340 if (other.use_of_uninit_p ())
1341 return true;
1342
1343 return false;
1344 }
1345
1346 private:
1347 diagnostic_event_id_t m_free_event;
1348 const deallocator *m_deallocator;
1349 };
1350
1351 class malloc_leak : public malloc_diagnostic
1352 {
1353 public:
1354 malloc_leak (const malloc_state_machine &sm, tree arg)
1355 : malloc_diagnostic (sm, arg) {}
1356
1357 const char *get_kind () const final override { return "malloc_leak"; }
1358
1359 int get_controlling_option () const final override
1360 {
1361 return OPT_Wanalyzer_malloc_leak;
1362 }
1363
1364 bool emit (diagnostic_emission_context &ctxt) final override
1365 {
1366 /* "CWE-401: Missing Release of Memory after Effective Lifetime". */
1367 ctxt.add_cwe (401);
1368 if (m_arg)
1369 return ctxt.warn ("leak of %qE", m_arg);
1370 else
1371 return ctxt.warn ("leak of %qs", "<unknown>");
1372 }
1373
1374 label_text describe_state_change (const evdesc::state_change &change)
1375 final override
1376 {
1377 if (unchecked_p (change.m_new_state)
1378 || (start_p (change.m_old_state) && nonnull_p (change.m_new_state)))
1379 {
1380 m_alloc_event = change.m_event_id;
1381 return label_text::borrow ("allocated here");
1382 }
1383 return malloc_diagnostic::describe_state_change (change);
1384 }
1385
1386 label_text describe_final_event (const evdesc::final_event &ev) final override
1387 {
1388 if (ev.m_expr)
1389 {
1390 if (m_alloc_event.known_p ())
1391 return ev.formatted_print ("%qE leaks here; was allocated at %@",
1392 ev.m_expr, &m_alloc_event);
1393 else
1394 return ev.formatted_print ("%qE leaks here", ev.m_expr);
1395 }
1396 else
1397 {
1398 if (m_alloc_event.known_p ())
1399 return ev.formatted_print ("%qs leaks here; was allocated at %@",
1400 "<unknown>", &m_alloc_event);
1401 else
1402 return ev.formatted_print ("%qs leaks here", "<unknown>");
1403 }
1404 }
1405
1406 private:
1407 diagnostic_event_id_t m_alloc_event;
1408 };
1409
1410 class free_of_non_heap : public malloc_diagnostic
1411 {
1412 public:
1413 free_of_non_heap (const malloc_state_machine &sm, tree arg,
1414 const region *freed_reg,
1415 const char *funcname)
1416 : malloc_diagnostic (sm, arg), m_freed_reg (freed_reg), m_funcname (funcname)
1417 {
1418 }
1419
1420 const char *get_kind () const final override { return "free_of_non_heap"; }
1421
1422 bool subclass_equal_p (const pending_diagnostic &base_other) const
1423 final override
1424 {
1425 const free_of_non_heap &other = (const free_of_non_heap &)base_other;
1426 return (same_tree_p (m_arg, other.m_arg)
1427 && m_freed_reg == other.m_freed_reg);
1428 }
1429
1430 int get_controlling_option () const final override
1431 {
1432 return OPT_Wanalyzer_free_of_non_heap;
1433 }
1434
1435 bool emit (diagnostic_emission_context &ctxt) final override
1436 {
1437 auto_diagnostic_group d;
1438 ctxt.add_cwe (590); /* CWE-590: Free of Memory not on the Heap. */
1439 switch (get_memory_space ())
1440 {
1441 default:
1442 case MEMSPACE_HEAP:
1443 gcc_unreachable ();
1444 case MEMSPACE_UNKNOWN:
1445 case MEMSPACE_CODE:
1446 case MEMSPACE_GLOBALS:
1447 case MEMSPACE_READONLY_DATA:
1448 return ctxt.warn ("%<%s%> of %qE which points to memory"
1449 " not on the heap",
1450 m_funcname, m_arg);
1451 break;
1452 case MEMSPACE_STACK:
1453 return ctxt.warn ("%<%s%> of %qE which points to memory"
1454 " on the stack",
1455 m_funcname, m_arg);
1456 break;
1457 }
1458 }
1459
1460 label_text describe_state_change (const evdesc::state_change &)
1461 final override
1462 {
1463 return label_text::borrow ("pointer is from here");
1464 }
1465
1466 label_text describe_final_event (const evdesc::final_event &ev) final override
1467 {
1468 return ev.formatted_print ("call to %qs here", m_funcname);
1469 }
1470
1471 void mark_interesting_stuff (interesting_t *interest) final override
1472 {
1473 if (m_freed_reg)
1474 interest->add_region_creation (m_freed_reg);
1475 }
1476
1477 private:
1478 enum memory_space get_memory_space () const
1479 {
1480 if (m_freed_reg)
1481 return m_freed_reg->get_memory_space ();
1482 else
1483 return MEMSPACE_UNKNOWN;
1484 }
1485
1486 const region *m_freed_reg;
1487 const char *m_funcname;
1488 };
1489
1490 /* Concrete pending_diagnostic subclass for -Wanalyzer-deref-before-check. */
1491
1492 class deref_before_check : public malloc_diagnostic
1493 {
1494 public:
1495 deref_before_check (const malloc_state_machine &sm, tree arg)
1496 : malloc_diagnostic (sm, arg),
1497 m_deref_enode (NULL),
1498 m_deref_expr (NULL),
1499 m_check_enode (NULL)
1500 {
1501 gcc_assert (arg);
1502 }
1503
1504 const char *get_kind () const final override { return "deref_before_check"; }
1505
1506 int get_controlling_option () const final override
1507 {
1508 return OPT_Wanalyzer_deref_before_check;
1509 }
1510
1511 bool emit (diagnostic_emission_context &ctxt) final override
1512 {
1513 /* Don't emit the warning if we can't show where the deref
1514 and the check occur. */
1515 if (!m_deref_enode)
1516 return false;
1517 if (!m_check_enode)
1518 return false;
1519 /* Only emit the warning for intraprocedural cases. */
1520 const program_point &deref_point = m_deref_enode->get_point ();
1521 const program_point &check_point = m_check_enode->get_point ();
1522
1523 if (!program_point::effectively_intraprocedural_p (deref_point,
1524 check_point))
1525 return false;
1526
1527 /* Reject the warning if the check occurs within a macro defintion.
1528 This avoids false positives for such code as:
1529
1530 #define throw_error \
1531 do { \
1532 if (p) \
1533 cleanup (p); \
1534 return; \
1535 } while (0)
1536
1537 if (p->idx >= n)
1538 throw_error ();
1539
1540 where the usage of "throw_error" implicitly adds a check
1541 on 'p'.
1542
1543 We do warn when the check is in a macro expansion if we can get
1544 at the location of the condition and it is't part of the
1545 definition, so that we warn for checks such as:
1546 if (words[0][0] == '@')
1547 return;
1548 g_assert(words[0] != NULL); <--- here
1549 Unfortunately we don't have locations for individual gimple
1550 arguments, so in:
1551 g_assert (ptr);
1552 we merely have a gimple_cond
1553 if (p_2(D) == 0B)
1554 with no way of getting at the location of the condition separately
1555 from that of the gimple_cond (where the "if" is within the macro
1556 definition). We reject the warning for such cases.
1557
1558 We do warn when the *deref* occurs in a macro, since this can be
1559 a source of real bugs; see e.g. PR 77425. */
1560 location_t check_loc = m_check_enode->get_point ().get_location ();
1561 if (linemap_location_from_macro_definition_p (line_table, check_loc))
1562 return false;
1563
1564 /* Reject if m_deref_expr is sufficiently different from m_arg
1565 for cases where the dereference is spelled differently from
1566 the check, which is probably two different ways to get the
1567 same svalue, and thus not worth reporting. */
1568 if (!m_deref_expr)
1569 return false;
1570 if (!sufficiently_similar_p (m_deref_expr, m_arg))
1571 return false;
1572
1573 /* Reject the warning if the deref's BB doesn't dominate that
1574 of the check, so that we don't warn e.g. for shared cleanup
1575 code that checks a pointer for NULL, when that code is sometimes
1576 used before a deref and sometimes after.
1577 Using the dominance code requires setting cfun. */
1578 auto_cfun sentinel (m_deref_enode->get_function ());
1579 calculate_dominance_info (CDI_DOMINATORS);
1580 if (!dominated_by_p (CDI_DOMINATORS,
1581 m_check_enode->get_supernode ()->m_bb,
1582 m_deref_enode->get_supernode ()->m_bb))
1583 return false;
1584
1585 return ctxt.warn ("check of %qE for NULL after already"
1586 " dereferencing it",
1587 m_arg);
1588 }
1589
1590 label_text describe_state_change (const evdesc::state_change &change)
1591 final override
1592 {
1593 if (change.m_old_state == m_sm.get_start_state ()
1594 && assumed_non_null_p (change.m_new_state))
1595 {
1596 m_first_deref_event = change.m_event_id;
1597 m_deref_enode = change.m_event.get_exploded_node ();
1598 m_deref_expr = change.m_expr;
1599 return change.formatted_print ("pointer %qE is dereferenced here",
1600 m_arg);
1601 }
1602 return malloc_diagnostic::describe_state_change (change);
1603 }
1604
1605 label_text describe_final_event (const evdesc::final_event &ev) final override
1606 {
1607 m_check_enode = ev.m_event.get_exploded_node ();
1608 if (m_first_deref_event.known_p ())
1609 return ev.formatted_print ("pointer %qE is checked for NULL here but"
1610 " it was already dereferenced at %@",
1611 m_arg, &m_first_deref_event);
1612 else
1613 return ev.formatted_print ("pointer %qE is checked for NULL here but"
1614 " it was already dereferenced",
1615 m_arg);
1616 }
1617
1618 private:
1619 static bool sufficiently_similar_p (tree expr_a, tree expr_b)
1620 {
1621 pretty_printer *pp_a = global_dc->printer->clone ();
1622 pretty_printer *pp_b = global_dc->printer->clone ();
1623 pp_printf (pp_a, "%qE", expr_a);
1624 pp_printf (pp_b, "%qE", expr_b);
1625 bool result = (strcmp (pp_formatted_text (pp_a), pp_formatted_text (pp_b))
1626 == 0);
1627 delete pp_a;
1628 delete pp_b;
1629 return result;
1630 }
1631
1632 diagnostic_event_id_t m_first_deref_event;
1633 const exploded_node *m_deref_enode;
1634 tree m_deref_expr;
1635 const exploded_node *m_check_enode;
1636 };
1637
1638 /* struct allocation_state : public state_machine::state. */
1639
1640 /* Implementation of state_machine::state::dump_to_pp vfunc
1641 for allocation_state: append the API that this allocation is
1642 associated with. */
1643
1644 void
1645 allocation_state::dump_to_pp (pretty_printer *pp) const
1646 {
1647 state_machine::state::dump_to_pp (pp);
1648 if (m_deallocators)
1649 {
1650 pp_string (pp, " (");
1651 m_deallocators->dump_to_pp (pp);
1652 pp_character (pp, ')');
1653 }
1654 }
1655
1656 /* Given a allocation_state for a deallocator_set, get the "nonnull" state
1657 for the corresponding allocator(s). */
1658
1659 const allocation_state *
1660 allocation_state::get_nonnull () const
1661 {
1662 gcc_assert (m_deallocators);
1663 return as_a_allocation_state (m_deallocators->m_nonnull);
1664 }
1665
1666 /* struct assumed_non_null_state : public allocation_state. */
1667
1668 void
1669 assumed_non_null_state::dump_to_pp (pretty_printer *pp) const
1670 {
1671 allocation_state::dump_to_pp (pp);
1672 pp_string (pp, " (in ");
1673 m_frame->dump_to_pp (pp, true);
1674 pp_character (pp, ')');
1675 }
1676
1677 /* malloc_state_machine's ctor. */
1678
1679 malloc_state_machine::malloc_state_machine (logger *logger)
1680 : state_machine ("malloc", logger),
1681 m_free (this, "free", WORDING_FREED),
1682 m_scalar_delete (this, "delete", WORDING_DELETED),
1683 m_vector_delete (this, "delete[]", WORDING_DELETED),
1684 m_realloc (this, "realloc", WORDING_REALLOCATED)
1685 {
1686 gcc_assert (m_start->get_id () == 0);
1687 m_null = add_state ("null", RS_FREED, NULL, NULL);
1688 m_non_heap = add_state ("non-heap", RS_NON_HEAP, NULL, NULL);
1689 m_stop = add_state ("stop", RS_STOP, NULL, NULL);
1690 }
1691
1692 malloc_state_machine::~malloc_state_machine ()
1693 {
1694 unsigned i;
1695 custom_deallocator_set *set;
1696 FOR_EACH_VEC_ELT (m_dynamic_sets, i, set)
1697 delete set;
1698 custom_deallocator *d;
1699 FOR_EACH_VEC_ELT (m_dynamic_deallocators, i, d)
1700 delete d;
1701 }
1702
1703 state_machine::state_t
1704 malloc_state_machine::add_state (const char *name, enum resource_state rs,
1705 const deallocator_set *deallocators,
1706 const deallocator *deallocator)
1707 {
1708 return add_custom_state (new allocation_state (name, alloc_state_id (),
1709 rs, deallocators,
1710 deallocator));
1711 }
1712
1713 /* If ALLOCATOR_FNDECL has any "__attribute__((malloc(FOO)))",
1714 return a custom_deallocator_set for them, consolidating them
1715 to ensure uniqueness of the sets.
1716
1717 Return NULL if it has no such attributes. */
1718
1719 const custom_deallocator_set *
1720 malloc_state_machine::
1721 get_or_create_custom_deallocator_set (tree allocator_fndecl)
1722 {
1723 /* Early rejection of decls without attributes. */
1724 tree attrs = DECL_ATTRIBUTES (allocator_fndecl);
1725 if (!attrs)
1726 return NULL;
1727
1728 /* Otherwise, call maybe_create_custom_deallocator_set,
1729 memoizing the result. */
1730 if (custom_deallocator_set **slot
1731 = m_custom_deallocator_set_cache.get (allocator_fndecl))
1732 return *slot;
1733 custom_deallocator_set *set
1734 = maybe_create_custom_deallocator_set (allocator_fndecl);
1735 m_custom_deallocator_set_cache.put (allocator_fndecl, set);
1736 return set;
1737 }
1738
1739 /* Given ALLOCATOR_FNDECL, a FUNCTION_DECL with attributes,
1740 look for any "__attribute__((malloc(FOO)))" and return a
1741 custom_deallocator_set for them, consolidating them
1742 to ensure uniqueness of the sets.
1743
1744 Return NULL if it has no such attributes.
1745
1746 Subroutine of get_or_create_custom_deallocator_set which
1747 memoizes the result. */
1748
1749 custom_deallocator_set *
1750 malloc_state_machine::
1751 maybe_create_custom_deallocator_set (tree allocator_fndecl)
1752 {
1753 tree attrs = DECL_ATTRIBUTES (allocator_fndecl);
1754 gcc_assert (attrs);
1755
1756 /* Look for instances of __attribute__((malloc(FOO))). */
1757 auto_vec<const deallocator *> deallocator_vec;
1758 for (tree allocs = attrs;
1759 (allocs = lookup_attribute ("malloc", allocs));
1760 allocs = TREE_CHAIN (allocs))
1761 {
1762 tree args = TREE_VALUE (allocs);
1763 if (!args)
1764 continue;
1765 if (TREE_VALUE (args))
1766 {
1767 const deallocator *d
1768 = get_or_create_deallocator (TREE_VALUE (args));
1769 deallocator_vec.safe_push (d);
1770 }
1771 }
1772
1773 /* If there weren't any deallocators, bail. */
1774 if (deallocator_vec.length () == 0)
1775 return NULL;
1776
1777 /* Consolidate, so that we reuse existing deallocator_set
1778 instances. */
1779 deallocator_vec.qsort (deallocator::cmp_ptr_ptr);
1780 custom_deallocator_set **slot
1781 = m_custom_deallocator_set_map.get (&deallocator_vec);
1782 if (slot)
1783 return *slot;
1784 custom_deallocator_set *set
1785 = new custom_deallocator_set (this, &deallocator_vec, WORDING_DEALLOCATED);
1786 m_custom_deallocator_set_map.put (&set->m_deallocator_vec, set);
1787 m_dynamic_sets.safe_push (set);
1788 return set;
1789 }
1790
1791 /* Get the deallocator for DEALLOCATOR_FNDECL, creating it if necessary. */
1792
1793 const deallocator *
1794 malloc_state_machine::get_or_create_deallocator (tree deallocator_fndecl)
1795 {
1796 deallocator **slot = m_deallocator_map.get (deallocator_fndecl);
1797 if (slot)
1798 return *slot;
1799
1800 /* Reuse "free". */
1801 deallocator *d;
1802 if (is_named_call_p (deallocator_fndecl, "free")
1803 || is_std_named_call_p (deallocator_fndecl, "free")
1804 || is_named_call_p (deallocator_fndecl, "__builtin_free"))
1805 d = &m_free.m_deallocator;
1806 else
1807 {
1808 custom_deallocator *cd
1809 = new custom_deallocator (this, deallocator_fndecl,
1810 WORDING_DEALLOCATED);
1811 m_dynamic_deallocators.safe_push (cd);
1812 d = cd;
1813 }
1814 m_deallocator_map.put (deallocator_fndecl, d);
1815 return d;
1816 }
1817
1818 /* Get the "assumed-non-null" state for assumptions made within FRAME,
1819 creating it if necessary. */
1820
1821 state_machine::state_t
1822 malloc_state_machine::
1823 get_or_create_assumed_non_null_state_for_frame (const frame_region *frame)
1824 {
1825 if (state_t *slot = m_assumed_non_null.get (frame))
1826 return *slot;
1827 state_machine::state *new_state
1828 = new assumed_non_null_state ("assumed-non-null", alloc_state_id (), frame);
1829 add_custom_state (new_state);
1830 m_assumed_non_null.put (frame, new_state);
1831 return new_state;
1832 }
1833
1834 /* Try to identify the function declaration either by name or as a known malloc
1835 builtin. */
1836
1837 static bool
1838 known_allocator_p (const_tree fndecl, const gcall *call)
1839 {
1840 /* Either it is a function we know by name and number of arguments... */
1841 if (is_named_call_p (fndecl, "malloc", call, 1)
1842 || is_named_call_p (fndecl, "calloc", call, 2)
1843 || is_std_named_call_p (fndecl, "malloc", call, 1)
1844 || is_std_named_call_p (fndecl, "calloc", call, 2)
1845 || is_named_call_p (fndecl, "strdup", call, 1)
1846 || is_named_call_p (fndecl, "strndup", call, 2))
1847 return true;
1848
1849 /* ... or it is a builtin allocator that allocates objects freed with
1850 __builtin_free. */
1851 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1852 switch (DECL_FUNCTION_CODE (fndecl))
1853 {
1854 case BUILT_IN_MALLOC:
1855 case BUILT_IN_CALLOC:
1856 case BUILT_IN_STRDUP:
1857 case BUILT_IN_STRNDUP:
1858 return true;
1859 default:
1860 break;
1861 }
1862
1863 return false;
1864 }
1865
1866 /* If PTR's nullness is not known, transition it to the "assumed-non-null"
1867 state for the current frame. */
1868
1869 void
1870 malloc_state_machine::maybe_assume_non_null (sm_context *sm_ctxt,
1871 tree ptr,
1872 const gimple *stmt) const
1873 {
1874 const region_model *old_model = sm_ctxt->get_old_region_model ();
1875 if (!old_model)
1876 return;
1877
1878 tree null_ptr_cst = build_int_cst (TREE_TYPE (ptr), 0);
1879 tristate known_non_null
1880 = old_model->eval_condition (ptr, NE_EXPR, null_ptr_cst, NULL);
1881 if (known_non_null.is_unknown ())
1882 {
1883 /* Cast away const-ness for cache-like operations. */
1884 malloc_state_machine *mut_this
1885 = const_cast <malloc_state_machine *> (this);
1886 state_t next_state
1887 = mut_this->get_or_create_assumed_non_null_state_for_frame
1888 (old_model->get_current_frame ());
1889 sm_ctxt->set_next_state (stmt, ptr, next_state);
1890 }
1891 }
1892
1893 /* Implementation of state_machine::on_stmt vfunc for malloc_state_machine. */
1894
1895 bool
1896 malloc_state_machine::on_stmt (sm_context *sm_ctxt,
1897 const supernode *node,
1898 const gimple *stmt) const
1899 {
1900 if (const gcall *call = dyn_cast <const gcall *> (stmt))
1901 if (tree callee_fndecl = sm_ctxt->get_fndecl_for_call (call))
1902 {
1903 if (known_allocator_p (callee_fndecl, call))
1904 {
1905 on_allocator_call (sm_ctxt, call, &m_free);
1906 return true;
1907 }
1908
1909 if (!is_placement_new_p (call))
1910 {
1911 bool returns_nonnull = !TREE_NOTHROW (callee_fndecl)
1912 && flag_exceptions;
1913 if (is_named_call_p (callee_fndecl, "operator new"))
1914 on_allocator_call (sm_ctxt, call,
1915 &m_scalar_delete, returns_nonnull);
1916 else if (is_named_call_p (callee_fndecl, "operator new []"))
1917 on_allocator_call (sm_ctxt, call,
1918 &m_vector_delete, returns_nonnull);
1919 }
1920
1921 if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
1922 || is_named_call_p (callee_fndecl, "operator delete", call, 2))
1923 {
1924 on_deallocator_call (sm_ctxt, node, call,
1925 &m_scalar_delete.m_deallocator, 0);
1926 return true;
1927 }
1928 else if (is_named_call_p (callee_fndecl, "operator delete []", call, 1))
1929 {
1930 on_deallocator_call (sm_ctxt, node, call,
1931 &m_vector_delete.m_deallocator, 0);
1932 return true;
1933 }
1934
1935 if (is_named_call_p (callee_fndecl, "alloca", call, 1)
1936 || is_named_call_p (callee_fndecl, "__builtin_alloca", call, 1))
1937 {
1938 tree lhs = gimple_call_lhs (call);
1939 if (lhs)
1940 sm_ctxt->on_transition (node, stmt, lhs, m_start, m_non_heap);
1941 return true;
1942 }
1943
1944 if (is_named_call_p (callee_fndecl, "free", call, 1)
1945 || is_std_named_call_p (callee_fndecl, "free", call, 1)
1946 || is_named_call_p (callee_fndecl, "__builtin_free", call, 1))
1947 {
1948 on_deallocator_call (sm_ctxt, node, call,
1949 &m_free.m_deallocator, 0);
1950 return true;
1951 }
1952
1953 if (is_named_call_p (callee_fndecl, "realloc", call, 2)
1954 || is_named_call_p (callee_fndecl, "__builtin_realloc", call, 2))
1955 {
1956 on_realloc_call (sm_ctxt, node, call);
1957 return true;
1958 }
1959
1960 if (unaffected_by_call_p (callee_fndecl))
1961 return true;
1962
1963 /* Cast away const-ness for cache-like operations. */
1964 malloc_state_machine *mutable_this
1965 = const_cast <malloc_state_machine *> (this);
1966
1967 /* Handle interesting attributes of the callee_fndecl,
1968 or prioritize those of the builtin that callee_fndecl is expected
1969 to be.
1970 Might want this to be controlled by a flag. */
1971 {
1972 tree fndecl = callee_fndecl;
1973 /* If call is recognized as a builtin known_function, use that
1974 builtin's function_decl. */
1975 if (const region_model *old_model = sm_ctxt->get_old_region_model ())
1976 if (const builtin_known_function *builtin_kf
1977 = old_model->get_builtin_kf (call))
1978 fndecl = builtin_kf->builtin_decl ();
1979
1980 /* Handle "__attribute__((malloc(FOO)))". */
1981 if (const deallocator_set *deallocators
1982 = mutable_this->get_or_create_custom_deallocator_set
1983 (fndecl))
1984 {
1985 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (fndecl));
1986 bool returns_nonnull
1987 = lookup_attribute ("returns_nonnull", attrs);
1988 on_allocator_call (sm_ctxt, call, deallocators, returns_nonnull);
1989 }
1990
1991 {
1992 /* Handle "__attribute__((nonnull))". */
1993 tree fntype = TREE_TYPE (fndecl);
1994 bitmap nonnull_args = get_nonnull_args (fntype);
1995 if (nonnull_args)
1996 {
1997 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
1998 {
1999 tree arg = gimple_call_arg (stmt, i);
2000 if (TREE_CODE (TREE_TYPE (arg)) != POINTER_TYPE)
2001 continue;
2002 /* If we have a nonnull-args, and either all pointers, or
2003 just the specified pointers. */
2004 if (bitmap_empty_p (nonnull_args)
2005 || bitmap_bit_p (nonnull_args, i))
2006 {
2007 state_t state = sm_ctxt->get_state (stmt, arg);
2008 /* Can't use a switch as the states are non-const. */
2009 /* Do use the fndecl that caused the warning so that the
2010 misused attributes are printed and the user not
2011 confused. */
2012 if (unchecked_p (state))
2013 {
2014 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2015 sm_ctxt->warn (node, stmt, arg,
2016 make_unique<possible_null_arg>
2017 (*this, diag_arg, fndecl, i));
2018 const allocation_state *astate
2019 = as_a_allocation_state (state);
2020 sm_ctxt->set_next_state (stmt, arg,
2021 astate->get_nonnull ());
2022 }
2023 else if (state == m_null)
2024 {
2025 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2026 sm_ctxt->warn (node, stmt, arg,
2027 make_unique<null_arg>
2028 (*this, diag_arg, fndecl, i));
2029 sm_ctxt->set_next_state (stmt, arg, m_stop);
2030 }
2031 else if (state == m_start)
2032 maybe_assume_non_null (sm_ctxt, arg, stmt);
2033 }
2034 }
2035 BITMAP_FREE (nonnull_args);
2036 }
2037 }
2038
2039 /* Check for this after nonnull, so that if we have both
2040 then we transition to "freed", rather than "checked". */
2041 unsigned dealloc_argno = fndecl_dealloc_argno (fndecl);
2042 if (dealloc_argno != UINT_MAX)
2043 {
2044 const deallocator *d
2045 = mutable_this->get_or_create_deallocator (fndecl);
2046 on_deallocator_call (sm_ctxt, node, call, d, dealloc_argno);
2047 }
2048 }
2049 }
2050
2051 /* Look for pointers explicitly being compared against zero
2052 that are in state assumed_non_null i.e. we already defererenced
2053 them.
2054 We have to do this check here, rather than in on_condition
2055 because we add a constraint that the pointer is non-null when
2056 dereferencing it, and this makes the apply_constraints_for_gcond
2057 find known-true and known-false conditions; on_condition is only
2058 called when adding new constraints. */
2059 if (const gcond *cond_stmt = dyn_cast <const gcond *> (stmt))
2060 {
2061 enum tree_code op = gimple_cond_code (cond_stmt);
2062 if (op == EQ_EXPR || op == NE_EXPR)
2063 {
2064 tree lhs = gimple_cond_lhs (cond_stmt);
2065 tree rhs = gimple_cond_rhs (cond_stmt);
2066 if (any_pointer_p (lhs)
2067 && any_pointer_p (rhs)
2068 && zerop (rhs))
2069 {
2070 state_t state = sm_ctxt->get_state (stmt, lhs);
2071 if (assumed_non_null_p (state))
2072 maybe_complain_about_deref_before_check
2073 (sm_ctxt, node,
2074 stmt,
2075 (const assumed_non_null_state *)state,
2076 lhs);
2077 }
2078 }
2079 }
2080
2081 if (tree lhs = sm_ctxt->is_zero_assignment (stmt))
2082 if (any_pointer_p (lhs))
2083 on_zero_assignment (sm_ctxt, stmt,lhs);
2084
2085 /* Handle dereferences. */
2086 for (unsigned i = 0; i < gimple_num_ops (stmt); i++)
2087 {
2088 tree op = gimple_op (stmt, i);
2089 if (!op)
2090 continue;
2091 if (TREE_CODE (op) == COMPONENT_REF)
2092 op = TREE_OPERAND (op, 0);
2093
2094 if (TREE_CODE (op) == MEM_REF)
2095 {
2096 tree arg = TREE_OPERAND (op, 0);
2097
2098 state_t state = sm_ctxt->get_state (stmt, arg);
2099 if (state == m_start)
2100 maybe_assume_non_null (sm_ctxt, arg, stmt);
2101 else if (unchecked_p (state))
2102 {
2103 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2104 sm_ctxt->warn (node, stmt, arg,
2105 make_unique<possible_null_deref> (*this,
2106 diag_arg));
2107 const allocation_state *astate = as_a_allocation_state (state);
2108 sm_ctxt->set_next_state (stmt, arg, astate->get_nonnull ());
2109 }
2110 else if (state == m_null)
2111 {
2112 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2113 sm_ctxt->warn (node, stmt, arg,
2114 make_unique<null_deref> (*this, diag_arg));
2115 sm_ctxt->set_next_state (stmt, arg, m_stop);
2116 }
2117 else if (freed_p (state))
2118 {
2119 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2120 const allocation_state *astate = as_a_allocation_state (state);
2121 sm_ctxt->warn (node, stmt, arg,
2122 make_unique<use_after_free>
2123 (*this, diag_arg, astate->m_deallocator));
2124 sm_ctxt->set_next_state (stmt, arg, m_stop);
2125 }
2126 }
2127 }
2128 return false;
2129 }
2130
2131 /* Given a check against null of PTR in assumed-non-null state STATE,
2132 potentially add a deref_before_check warning to SM_CTXT. */
2133
2134 void
2135 malloc_state_machine::
2136 maybe_complain_about_deref_before_check (sm_context *sm_ctxt,
2137 const supernode *node,
2138 const gimple *stmt,
2139 const assumed_non_null_state *state,
2140 tree ptr) const
2141 {
2142 const region_model *model = sm_ctxt->get_old_region_model ();
2143 if (!model)
2144 return;
2145
2146 /* Don't complain if the current frame (where the check is occurring) is
2147 deeper than the frame in which the "not null" assumption was made.
2148 This suppress false positives for cases like:
2149
2150 void foo (struct s *p)
2151 {
2152 int val = s->some_field; // deref here
2153 shared_helper (p);
2154 }
2155
2156 where "shared_helper" has:
2157
2158 void shared_helper (struct s *p)
2159 {
2160 if (!p) // check here
2161 return;
2162 // etc
2163 }
2164
2165 since the check in "shared_helper" is OK. */
2166 const frame_region *checked_in_frame = model->get_current_frame ();
2167 const frame_region *assumed_nonnull_in_frame = state->m_frame;
2168 if (checked_in_frame->get_index () > assumed_nonnull_in_frame->get_index ())
2169 return;
2170
2171 tree diag_ptr = sm_ctxt->get_diagnostic_tree (ptr);
2172 if (diag_ptr)
2173 sm_ctxt->warn
2174 (node, stmt, ptr,
2175 make_unique<deref_before_check> (*this, diag_ptr));
2176 sm_ctxt->set_next_state (stmt, ptr, m_stop);
2177 }
2178
2179 /* Handle a call to an allocator.
2180 RETURNS_NONNULL is true if CALL is to a fndecl known to have
2181 __attribute__((returns_nonnull)). */
2182
2183 void
2184 malloc_state_machine::on_allocator_call (sm_context *sm_ctxt,
2185 const gcall *call,
2186 const deallocator_set *deallocators,
2187 bool returns_nonnull) const
2188 {
2189 tree lhs = gimple_call_lhs (call);
2190 if (lhs)
2191 {
2192 if (sm_ctxt->get_state (call, lhs) == m_start)
2193 sm_ctxt->set_next_state (call, lhs,
2194 (returns_nonnull
2195 ? deallocators->m_nonnull
2196 : deallocators->m_unchecked));
2197 }
2198 else
2199 {
2200 /* TODO: report leak. */
2201 }
2202 }
2203
2204 /* Handle deallocations of non-heap pointers.
2205 non-heap -> stop, with warning. */
2206
2207 void
2208 malloc_state_machine::handle_free_of_non_heap (sm_context *sm_ctxt,
2209 const supernode *node,
2210 const gcall *call,
2211 tree arg,
2212 const deallocator *d) const
2213 {
2214 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2215 const region *freed_reg = NULL;
2216 if (const program_state *old_state = sm_ctxt->get_old_program_state ())
2217 {
2218 const region_model *old_model = old_state->m_region_model;
2219 const svalue *ptr_sval = old_model->get_rvalue (arg, NULL);
2220 freed_reg = old_model->deref_rvalue (ptr_sval, arg, NULL);
2221 }
2222 sm_ctxt->warn (node, call, arg,
2223 make_unique<free_of_non_heap>
2224 (*this, diag_arg, freed_reg, d->m_name));
2225 sm_ctxt->set_next_state (call, arg, m_stop);
2226 }
2227
2228 void
2229 malloc_state_machine::on_deallocator_call (sm_context *sm_ctxt,
2230 const supernode *node,
2231 const gcall *call,
2232 const deallocator *d,
2233 unsigned argno) const
2234 {
2235 if (argno >= gimple_call_num_args (call))
2236 return;
2237 tree arg = gimple_call_arg (call, argno);
2238
2239 state_t state = sm_ctxt->get_state (call, arg);
2240
2241 /* start/assumed_non_null/unchecked/nonnull -> freed. */
2242 if (state == m_start || assumed_non_null_p (state))
2243 sm_ctxt->set_next_state (call, arg, d->m_freed);
2244 else if (unchecked_p (state) || nonnull_p (state))
2245 {
2246 const allocation_state *astate = as_a_allocation_state (state);
2247 gcc_assert (astate->m_deallocators);
2248 if (!astate->m_deallocators->contains_p (d))
2249 {
2250 /* Wrong allocator. */
2251 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2252 sm_ctxt->warn (node, call, arg,
2253 make_unique<mismatching_deallocation>
2254 (*this, diag_arg,
2255 astate->m_deallocators,
2256 d));
2257 }
2258 sm_ctxt->set_next_state (call, arg, d->m_freed);
2259 }
2260
2261 /* Keep state "null" as-is, rather than transitioning to "freed";
2262 we don't want to complain about double-free of NULL. */
2263 else if (state == d->m_freed)
2264 {
2265 /* freed -> stop, with warning. */
2266 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2267 sm_ctxt->warn (node, call, arg,
2268 make_unique<double_free> (*this, diag_arg, d->m_name));
2269 sm_ctxt->set_next_state (call, arg, m_stop);
2270 }
2271 else if (state == m_non_heap)
2272 {
2273 /* non-heap -> stop, with warning. */
2274 handle_free_of_non_heap (sm_ctxt, node, call, arg, d);
2275 }
2276 }
2277
2278 /* Handle a call to "realloc".
2279 Check for free of non-heap or mismatching allocators,
2280 transitioning to the "stop" state for such cases.
2281
2282 Otherwise, kf_realloc::impl_call_post will later
2283 get called (which will handle other sm-state transitions
2284 when the state is bifurcated). */
2285
2286 void
2287 malloc_state_machine::on_realloc_call (sm_context *sm_ctxt,
2288 const supernode *node,
2289 const gcall *call) const
2290 {
2291 const unsigned argno = 0;
2292 const deallocator *d = &m_realloc;
2293
2294 tree arg = gimple_call_arg (call, argno);
2295
2296 state_t state = sm_ctxt->get_state (call, arg);
2297
2298 if (unchecked_p (state) || nonnull_p (state))
2299 {
2300 const allocation_state *astate = as_a_allocation_state (state);
2301 gcc_assert (astate->m_deallocators);
2302 if (!astate->m_deallocators->contains_p (&m_free.m_deallocator))
2303 {
2304 /* Wrong allocator. */
2305 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2306 sm_ctxt->warn (node, call, arg,
2307 make_unique<mismatching_deallocation>
2308 (*this, diag_arg,
2309 astate->m_deallocators, d));
2310 sm_ctxt->set_next_state (call, arg, m_stop);
2311 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
2312 path_ctxt->terminate_path ();
2313 }
2314 }
2315 else if (state == m_free.m_deallocator.m_freed)
2316 {
2317 /* freed -> stop, with warning. */
2318 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2319 sm_ctxt->warn (node, call, arg,
2320 make_unique<double_free> (*this, diag_arg, "free"));
2321 sm_ctxt->set_next_state (call, arg, m_stop);
2322 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
2323 path_ctxt->terminate_path ();
2324 }
2325 else if (state == m_non_heap)
2326 {
2327 /* non-heap -> stop, with warning. */
2328 handle_free_of_non_heap (sm_ctxt, node, call, arg, d);
2329 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
2330 path_ctxt->terminate_path ();
2331 }
2332 }
2333
2334 /* Implementation of state_machine::on_phi vfunc for malloc_state_machine. */
2335
2336 void
2337 malloc_state_machine::on_phi (sm_context *sm_ctxt,
2338 const supernode *node ATTRIBUTE_UNUSED,
2339 const gphi *phi,
2340 tree rhs) const
2341 {
2342 if (zerop (rhs))
2343 {
2344 tree lhs = gimple_phi_result (phi);
2345 on_zero_assignment (sm_ctxt, phi, lhs);
2346 }
2347 }
2348
2349 /* Implementation of state_machine::on_condition vfunc for malloc_state_machine.
2350 Potentially transition state 'unchecked' to 'nonnull' or to 'null'. */
2351
2352 void
2353 malloc_state_machine::on_condition (sm_context *sm_ctxt,
2354 const supernode *node ATTRIBUTE_UNUSED,
2355 const gimple *stmt,
2356 const svalue *lhs,
2357 enum tree_code op,
2358 const svalue *rhs) const
2359 {
2360 if (!rhs->all_zeroes_p ())
2361 return;
2362
2363 if (!any_pointer_p (lhs))
2364 return;
2365 if (!any_pointer_p (rhs))
2366 return;
2367
2368 if (op == NE_EXPR)
2369 {
2370 log ("got 'ARG != 0' match");
2371 state_t s = sm_ctxt->get_state (stmt, lhs);
2372 if (unchecked_p (s))
2373 {
2374 const allocation_state *astate = as_a_allocation_state (s);
2375 sm_ctxt->set_next_state (stmt, lhs, astate->get_nonnull ());
2376 }
2377 }
2378 else if (op == EQ_EXPR)
2379 {
2380 log ("got 'ARG == 0' match");
2381 state_t s = sm_ctxt->get_state (stmt, lhs);
2382 if (unchecked_p (s))
2383 sm_ctxt->set_next_state (stmt, lhs, m_null);
2384 }
2385 }
2386
2387 /* Implementation of state_machine::on_pop_frame vfunc for malloc_state_machine.
2388 Clear any "assumed-non-null" state where the assumption happened in
2389 FRAME_REG. */
2390
2391 void
2392 malloc_state_machine::on_pop_frame (sm_state_map *smap,
2393 const frame_region *frame_reg) const
2394 {
2395 hash_set<const svalue *> svals_to_clear;
2396 for (auto kv : *smap)
2397 {
2398 const svalue *sval = kv.first;
2399 state_t state = kv.second.m_state;
2400 if (assumed_non_null_p (state))
2401 {
2402 const assumed_non_null_state *assumed_state
2403 = (const assumed_non_null_state *)state;
2404 if (frame_reg == assumed_state->m_frame)
2405 svals_to_clear.add (sval);
2406 }
2407 }
2408 for (auto sval : svals_to_clear)
2409 smap->clear_any_state (sval);
2410 }
2411
2412 /* Implementation of state_machine::can_purge_p vfunc for malloc_state_machine.
2413 Don't allow purging of pointers in state 'unchecked' or 'nonnull'
2414 (to avoid false leak reports). */
2415
2416 bool
2417 malloc_state_machine::can_purge_p (state_t s) const
2418 {
2419 enum resource_state rs = get_rs (s);
2420 return rs != RS_UNCHECKED && rs != RS_NONNULL;
2421 }
2422
2423 /* Implementation of state_machine::on_leak vfunc for malloc_state_machine
2424 (for complaining about leaks of pointers in state 'unchecked' and
2425 'nonnull'). */
2426
2427 std::unique_ptr<pending_diagnostic>
2428 malloc_state_machine::on_leak (tree var) const
2429 {
2430 return make_unique<malloc_leak> (*this, var);
2431 }
2432
2433 /* Implementation of state_machine::reset_when_passed_to_unknown_fn_p vfunc
2434 for malloc_state_machine. */
2435
2436 bool
2437 malloc_state_machine::reset_when_passed_to_unknown_fn_p (state_t s,
2438 bool is_mutable) const
2439 {
2440 /* An on-stack ptr doesn't stop being stack-allocated when passed to an
2441 unknown fn. */
2442 if (s == m_non_heap)
2443 return false;
2444
2445 /* Otherwise, pointers passed as non-const can be freed. */
2446 return is_mutable;
2447 }
2448
2449 /* Implementation of state_machine::maybe_get_merged_states_nonequal vfunc
2450 for malloc_state_machine.
2451
2452 Support discarding "assumed-non-null" states when merging with
2453 start state. */
2454
2455 state_machine::state_t
2456 malloc_state_machine::maybe_get_merged_states_nonequal (state_t state_a,
2457 state_t state_b) const
2458 {
2459 if (assumed_non_null_p (state_a) && state_b == m_start)
2460 return m_start;
2461 if (state_a == m_start && assumed_non_null_p (state_b))
2462 return m_start;
2463 return NULL;
2464 }
2465
2466 /* Return true if calls to FNDECL are known to not affect this sm-state. */
2467
2468 bool
2469 malloc_state_machine::unaffected_by_call_p (tree fndecl)
2470 {
2471 /* A set of functions that are known to not affect allocation
2472 status, even if we haven't fully modelled the rest of their
2473 behavior yet. */
2474 static const char * const funcnames[] = {
2475 /* This array must be kept sorted. */
2476 "strsep",
2477 };
2478 const size_t count = ARRAY_SIZE (funcnames);
2479 function_set fs (funcnames, count);
2480
2481 if (fs.contains_decl_p (fndecl))
2482 return true;
2483
2484 return false;
2485 }
2486
2487 /* Shared logic for handling GIMPLE_ASSIGNs and GIMPLE_PHIs that
2488 assign zero to LHS. */
2489
2490 void
2491 malloc_state_machine::on_zero_assignment (sm_context *sm_ctxt,
2492 const gimple *stmt,
2493 tree lhs) const
2494 {
2495 state_t s = sm_ctxt->get_state (stmt, lhs);
2496 enum resource_state rs = get_rs (s);
2497 if (rs == RS_START
2498 || rs == RS_UNCHECKED
2499 || rs == RS_NONNULL
2500 || rs == RS_FREED)
2501 sm_ctxt->set_next_state (stmt, lhs, m_null);
2502 }
2503
2504 /* Special-case hook for handling realloc, for the "success with move to
2505 a new buffer" case, marking OLD_PTR_SVAL as freed and NEW_PTR_SVAL as
2506 non-null.
2507
2508 This is similar to on_deallocator_call and on_allocator_call,
2509 but the checks happen in on_realloc_call, and by splitting the states. */
2510
2511 void
2512 malloc_state_machine::
2513 on_realloc_with_move (region_model *model,
2514 sm_state_map *smap,
2515 const svalue *old_ptr_sval,
2516 const svalue *new_ptr_sval,
2517 const extrinsic_state &ext_state) const
2518 {
2519 smap->set_state (model, old_ptr_sval,
2520 m_free.m_deallocator.m_freed,
2521 NULL, ext_state);
2522
2523 smap->set_state (model, new_ptr_sval,
2524 m_free.m_nonnull,
2525 NULL, ext_state);
2526 }
2527
2528 /* Hook for get_or_create_region_for_heap_alloc for the case when we want
2529 ptr_sval to mark a newly created region as assumed non null on malloc SM. */
2530 void
2531 malloc_state_machine::transition_ptr_sval_non_null (region_model *model,
2532 sm_state_map *smap,
2533 const svalue *new_ptr_sval,
2534 const extrinsic_state &ext_state) const
2535 {
2536 smap->set_state (model, new_ptr_sval, m_free.m_nonnull, NULL, ext_state);
2537 }
2538
2539 } // anonymous namespace
2540
2541 /* Internal interface to this file. */
2542
2543 state_machine *
2544 make_malloc_state_machine (logger *logger)
2545 {
2546 return new malloc_state_machine (logger);
2547 }
2548
2549 /* Specialcase hook for handling realloc, for use by
2550 kf_realloc::impl_call_post::success_with_move::update_model. */
2551
2552 void
2553 region_model::on_realloc_with_move (const call_details &cd,
2554 const svalue *old_ptr_sval,
2555 const svalue *new_ptr_sval)
2556 {
2557 region_model_context *ctxt = cd.get_ctxt ();
2558 if (!ctxt)
2559 return;
2560 const extrinsic_state *ext_state = ctxt->get_ext_state ();
2561 if (!ext_state)
2562 return;
2563
2564 sm_state_map *smap;
2565 const state_machine *sm;
2566 unsigned sm_idx;
2567 if (!ctxt->get_malloc_map (&smap, &sm, &sm_idx))
2568 return;
2569
2570 gcc_assert (smap);
2571 gcc_assert (sm);
2572
2573 const malloc_state_machine &malloc_sm
2574 = (const malloc_state_machine &)*sm;
2575
2576 malloc_sm.on_realloc_with_move (this,
2577 smap,
2578 old_ptr_sval,
2579 new_ptr_sval,
2580 *ext_state);
2581 }
2582
2583 /* Moves ptr_sval from start to assumed non-null, for use by
2584 region_model::get_or_create_region_for_heap_alloc. */
2585 void
2586 region_model::transition_ptr_sval_non_null (region_model_context *ctxt,
2587 const svalue *ptr_sval)
2588 {
2589 if (!ctxt)
2590 return;
2591 const extrinsic_state *ext_state = ctxt->get_ext_state ();
2592 if (!ext_state)
2593 return;
2594
2595 sm_state_map *smap;
2596 const state_machine *sm;
2597 unsigned sm_idx;
2598 if (!ctxt->get_malloc_map (&smap, &sm, &sm_idx))
2599 return;
2600
2601 gcc_assert (smap);
2602 gcc_assert (sm);
2603
2604 const malloc_state_machine &malloc_sm = (const malloc_state_machine &)*sm;
2605
2606 malloc_sm.transition_ptr_sval_non_null (this, smap, ptr_sval, *ext_state);
2607 }
2608
2609 } // namespace ana
2610
2611 #endif /* #if ENABLE_ANALYZER */