]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/analyzer/sm-malloc.cc
analyzer: fix false +ves from -Wanalyzer-deref-before-check due to inlining [PR109239]
[thirdparty/gcc.git] / gcc / analyzer / sm-malloc.cc
1 /* A state machine for detecting misuses of the malloc/free API.
2 Copyright (C) 2019-2023 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #define INCLUDE_MEMORY
23 #include "system.h"
24 #include "coretypes.h"
25 #include "make-unique.h"
26 #include "tree.h"
27 #include "function.h"
28 #include "basic-block.h"
29 #include "gimple.h"
30 #include "options.h"
31 #include "bitmap.h"
32 #include "diagnostic-path.h"
33 #include "diagnostic-metadata.h"
34 #include "analyzer/analyzer.h"
35 #include "diagnostic-event-id.h"
36 #include "analyzer/analyzer-logging.h"
37 #include "analyzer/sm.h"
38 #include "analyzer/pending-diagnostic.h"
39 #include "analyzer/call-string.h"
40 #include "analyzer/program-point.h"
41 #include "analyzer/store.h"
42 #include "analyzer/region-model.h"
43 #include "analyzer/call-details.h"
44 #include "stringpool.h"
45 #include "attribs.h"
46 #include "analyzer/function-set.h"
47 #include "analyzer/program-state.h"
48 #include "analyzer/checker-event.h"
49 #include "analyzer/exploded-graph.h"
50
51 #if ENABLE_ANALYZER
52
53 namespace ana {
54
55 namespace {
56
57 /* This state machine and its various support classes track allocations
58 and deallocations.
59
60 It has a few standard allocation/deallocation pairs (e.g. new/delete),
61 and also supports user-defined ones via
62 __attribute__ ((malloc(DEALLOCATOR))).
63
64 There can be more than one valid deallocator for a given allocator,
65 for example:
66 __attribute__ ((malloc (fclose)))
67 __attribute__ ((malloc (freopen, 3)))
68 FILE* fopen (const char*, const char*);
69 A deallocator_set represents a particular set of valid deallocators.
70
71 We track the expected deallocator_set for a value, but not the allocation
72 function - there could be more than one allocator per deallocator_set.
73 For example, there could be dozens of allocators for "free" beyond just
74 malloc e.g. calloc, xstrdup, etc. We don't want to explode the number
75 of states by tracking individual allocators in the exploded graph;
76 we merely want to track "this value expects to have 'free' called on it".
77 Perhaps we can reconstruct which allocator was used later, when emitting
78 the path, if it's necessary for precision of wording of diagnostics. */
79
80 class deallocator;
81 class deallocator_set;
82 class malloc_state_machine;
83
84 /* An enum for discriminating between different kinds of allocation_state. */
85
86 enum resource_state
87 {
88 /* States that are independent of allocator/deallocator. */
89
90 /* The start state. */
91 RS_START,
92
93 /* State for a pointer that's been unconditionally dereferenced. */
94 RS_ASSUMED_NON_NULL,
95
96 /* State for a pointer that's known to be NULL. */
97 RS_NULL,
98
99 /* State for a pointer that's known to not be on the heap (e.g. to a local
100 or global). */
101 RS_NON_HEAP,
102
103 /* Stop state, for pointers we don't want to track any more. */
104 RS_STOP,
105
106 /* States that relate to a specific deallocator_set. */
107
108 /* State for a pointer returned from an allocator that hasn't
109 been checked for NULL.
110 It could be a pointer to heap-allocated memory, or could be NULL. */
111 RS_UNCHECKED,
112
113 /* State for a pointer returned from an allocator,
114 known to be non-NULL. */
115 RS_NONNULL,
116
117 /* State for a pointer passed to a deallocator. */
118 RS_FREED
119 };
120
121 /* Custom state subclass, which can optionally refer to an a
122 deallocator_set. */
123
124 struct allocation_state : public state_machine::state
125 {
126 allocation_state (const char *name, unsigned id,
127 enum resource_state rs,
128 const deallocator_set *deallocators,
129 const deallocator *deallocator)
130 : state (name, id), m_rs (rs),
131 m_deallocators (deallocators),
132 m_deallocator (deallocator)
133 {}
134
135 void dump_to_pp (pretty_printer *pp) const override;
136
137 const allocation_state *get_nonnull () const;
138
139 enum resource_state m_rs;
140 const deallocator_set *m_deallocators;
141 const deallocator *m_deallocator;
142 };
143
144 /* Custom state subclass, for the "assumed-non-null" state
145 where the assumption happens in a particular frame. */
146
147 struct assumed_non_null_state : public allocation_state
148 {
149 assumed_non_null_state (const char *name, unsigned id,
150 const frame_region *frame)
151 : allocation_state (name, id, RS_ASSUMED_NON_NULL,
152 NULL, NULL),
153 m_frame (frame)
154 {
155 gcc_assert (m_frame);
156 }
157
158 void dump_to_pp (pretty_printer *pp) const final override;
159
160 const frame_region *m_frame;
161 };
162
163 /* An enum for choosing which wording to use in various diagnostics
164 when describing deallocations. */
165
166 enum wording
167 {
168 WORDING_FREED,
169 WORDING_DELETED,
170 WORDING_DEALLOCATED,
171 WORDING_REALLOCATED
172 };
173
174 /* Base class representing a deallocation function,
175 either a built-in one we know about, or one exposed via
176 __attribute__((malloc(DEALLOCATOR))). */
177
178 struct deallocator
179 {
180 hashval_t hash () const;
181 void dump_to_pp (pretty_printer *pp) const;
182 static int cmp (const deallocator *a, const deallocator *b);
183 static int cmp_ptr_ptr (const void *, const void *);
184
185 /* Name to use in diagnostics. */
186 const char *m_name;
187
188 /* Which wording to use in diagnostics. */
189 enum wording m_wording;
190
191 /* State for a value passed to one of the deallocators. */
192 state_machine::state_t m_freed;
193
194 protected:
195 deallocator (malloc_state_machine *sm,
196 const char *name,
197 enum wording wording);
198 };
199
200 /* Subclass representing a predefined deallocator.
201 e.g. "delete []", without needing a specific FUNCTION_DECL
202 ahead of time. */
203
204 struct standard_deallocator : public deallocator
205 {
206 standard_deallocator (malloc_state_machine *sm,
207 const char *name,
208 enum wording wording);
209 };
210
211 /* Subclass representing a user-defined deallocator
212 via __attribute__((malloc(DEALLOCATOR))) given
213 a specific FUNCTION_DECL. */
214
215 struct custom_deallocator : public deallocator
216 {
217 custom_deallocator (malloc_state_machine *sm,
218 tree deallocator_fndecl,
219 enum wording wording)
220 : deallocator (sm, IDENTIFIER_POINTER (DECL_NAME (deallocator_fndecl)),
221 wording)
222 {
223 }
224 };
225
226 /* Base class representing a set of possible deallocators.
227 Often this will be just a single deallocator, but some
228 allocators have multiple valid deallocators (e.g. the result of
229 "fopen" can be closed by either "fclose" or "freopen"). */
230
231 struct deallocator_set
232 {
233 deallocator_set (malloc_state_machine *sm,
234 enum wording wording);
235 virtual ~deallocator_set () {}
236
237 virtual bool contains_p (const deallocator *d) const = 0;
238 virtual const deallocator *maybe_get_single () const = 0;
239 virtual void dump_to_pp (pretty_printer *pp) const = 0;
240 void dump () const;
241
242 /* Which wording to use in diagnostics. */
243 enum wording m_wording;
244
245 /* Pointers to states.
246 These states are owned by the state_machine base class. */
247
248 /* State for an unchecked result from an allocator using this set. */
249 state_machine::state_t m_unchecked;
250
251 /* State for a known non-NULL result from such an allocator. */
252 state_machine::state_t m_nonnull;
253 };
254
255 /* Subclass of deallocator_set representing a set of deallocators
256 defined by one or more __attribute__((malloc(DEALLOCATOR))). */
257
258 struct custom_deallocator_set : public deallocator_set
259 {
260 typedef const auto_vec <const deallocator *> *key_t;
261
262 custom_deallocator_set (malloc_state_machine *sm,
263 const auto_vec <const deallocator *> *vec,
264 //const char *name,
265 //const char *dealloc_funcname,
266 //unsigned arg_idx,
267 enum wording wording);
268
269 bool contains_p (const deallocator *d) const final override;
270 const deallocator *maybe_get_single () const final override;
271 void dump_to_pp (pretty_printer *pp) const final override;
272
273 auto_vec <const deallocator *> m_deallocator_vec;
274 };
275
276 /* Subclass of deallocator_set representing a set of deallocators
277 with a single standard_deallocator, e.g. "delete []". */
278
279 struct standard_deallocator_set : public deallocator_set
280 {
281 standard_deallocator_set (malloc_state_machine *sm,
282 const char *name,
283 enum wording wording);
284
285 bool contains_p (const deallocator *d) const final override;
286 const deallocator *maybe_get_single () const final override;
287 void dump_to_pp (pretty_printer *pp) const final override;
288
289 standard_deallocator m_deallocator;
290 };
291
292 /* Traits class for ensuring uniqueness of deallocator_sets within
293 malloc_state_machine. */
294
295 struct deallocator_set_map_traits
296 {
297 typedef custom_deallocator_set::key_t key_type;
298 typedef custom_deallocator_set *value_type;
299 typedef custom_deallocator_set *compare_type;
300
301 static inline hashval_t hash (const key_type &k)
302 {
303 gcc_assert (k != NULL);
304 gcc_assert (k != reinterpret_cast<key_type> (1));
305
306 hashval_t result = 0;
307 unsigned i;
308 const deallocator *d;
309 FOR_EACH_VEC_ELT (*k, i, d)
310 result ^= d->hash ();
311 return result;
312 }
313 static inline bool equal_keys (const key_type &k1, const key_type &k2)
314 {
315 if (k1->length () != k2->length ())
316 return false;
317
318 for (unsigned i = 0; i < k1->length (); i++)
319 if ((*k1)[i] != (*k2)[i])
320 return false;
321
322 return true;
323 }
324 template <typename T>
325 static inline void remove (T &)
326 {
327 /* empty; the nodes are handled elsewhere. */
328 }
329 template <typename T>
330 static inline void mark_deleted (T &entry)
331 {
332 entry.m_key = reinterpret_cast<key_type> (1);
333 }
334 template <typename T>
335 static inline void mark_empty (T &entry)
336 {
337 entry.m_key = NULL;
338 }
339 template <typename T>
340 static inline bool is_deleted (const T &entry)
341 {
342 return entry.m_key == reinterpret_cast<key_type> (1);
343 }
344 template <typename T>
345 static inline bool is_empty (const T &entry)
346 {
347 return entry.m_key == NULL;
348 }
349 static const bool empty_zero_p = false;
350 };
351
352 /* A state machine for detecting misuses of the malloc/free API.
353
354 See sm-malloc.dot for an overview (keep this in-sync with that file). */
355
356 class malloc_state_machine : public state_machine
357 {
358 public:
359 typedef allocation_state custom_data_t;
360
361 malloc_state_machine (logger *logger);
362 ~malloc_state_machine ();
363
364 state_t
365 add_state (const char *name, enum resource_state rs,
366 const deallocator_set *deallocators,
367 const deallocator *deallocator);
368
369 bool inherited_state_p () const final override { return false; }
370
371 state_machine::state_t
372 get_default_state (const svalue *sval) const final override
373 {
374 if (tree cst = sval->maybe_get_constant ())
375 {
376 if (zerop (cst))
377 return m_null;
378 }
379 if (const region_svalue *ptr = sval->dyn_cast_region_svalue ())
380 {
381 const region *reg = ptr->get_pointee ();
382 switch (reg->get_memory_space ())
383 {
384 default:
385 break;
386 case MEMSPACE_CODE:
387 case MEMSPACE_GLOBALS:
388 case MEMSPACE_STACK:
389 case MEMSPACE_READONLY_DATA:
390 return m_non_heap;
391 }
392 }
393 return m_start;
394 }
395
396 bool on_stmt (sm_context *sm_ctxt,
397 const supernode *node,
398 const gimple *stmt) const final override;
399
400 void on_phi (sm_context *sm_ctxt,
401 const supernode *node,
402 const gphi *phi,
403 tree rhs) const final override;
404
405 void on_condition (sm_context *sm_ctxt,
406 const supernode *node,
407 const gimple *stmt,
408 const svalue *lhs,
409 enum tree_code op,
410 const svalue *rhs) const final override;
411
412 void on_pop_frame (sm_state_map *smap,
413 const frame_region *) const final override;
414
415 bool can_purge_p (state_t s) const final override;
416 std::unique_ptr<pending_diagnostic> on_leak (tree var) const final override;
417
418 bool reset_when_passed_to_unknown_fn_p (state_t s,
419 bool is_mutable) const final override;
420
421 state_t
422 maybe_get_merged_states_nonequal (state_t state_a,
423 state_t state_b) const final override;
424
425 static bool unaffected_by_call_p (tree fndecl);
426
427 void maybe_assume_non_null (sm_context *sm_ctxt,
428 tree ptr,
429 const gimple *stmt) const;
430
431 void on_realloc_with_move (region_model *model,
432 sm_state_map *smap,
433 const svalue *old_ptr_sval,
434 const svalue *new_ptr_sval,
435 const extrinsic_state &ext_state) const;
436
437 standard_deallocator_set m_free;
438 standard_deallocator_set m_scalar_delete;
439 standard_deallocator_set m_vector_delete;
440
441 standard_deallocator m_realloc;
442
443 /* States that are independent of api. */
444
445 /* States for a pointer that's been unconditionally dereferenced
446 in a particular stack frame. */
447 hash_map<const frame_region *, state_t> m_assumed_non_null;
448
449 /* State for a pointer that's known to be NULL. */
450 state_t m_null;
451
452 /* State for a pointer that's known to not be on the heap (e.g. to a local
453 or global). */
454 state_t m_non_heap; // TODO: or should this be a different state machine?
455 // or do we need child values etc?
456
457 /* Stop state, for pointers we don't want to track any more. */
458 state_t m_stop;
459
460 private:
461 const custom_deallocator_set *
462 get_or_create_custom_deallocator_set (tree allocator_fndecl);
463 custom_deallocator_set *
464 maybe_create_custom_deallocator_set (tree allocator_fndecl);
465 const deallocator *
466 get_or_create_deallocator (tree deallocator_fndecl);
467
468 state_t
469 get_or_create_assumed_non_null_state_for_frame (const frame_region *frame);
470
471 void
472 maybe_complain_about_deref_before_check (sm_context *sm_ctxt,
473 const supernode *node,
474 const gimple *stmt,
475 const assumed_non_null_state *,
476 tree ptr) const;
477
478 void on_allocator_call (sm_context *sm_ctxt,
479 const gcall *call,
480 const deallocator_set *deallocators,
481 bool returns_nonnull = false) const;
482 void handle_free_of_non_heap (sm_context *sm_ctxt,
483 const supernode *node,
484 const gcall *call,
485 tree arg,
486 const deallocator *d) const;
487 void on_deallocator_call (sm_context *sm_ctxt,
488 const supernode *node,
489 const gcall *call,
490 const deallocator *d,
491 unsigned argno) const;
492 void on_realloc_call (sm_context *sm_ctxt,
493 const supernode *node,
494 const gcall *call) const;
495 void on_zero_assignment (sm_context *sm_ctxt,
496 const gimple *stmt,
497 tree lhs) const;
498
499 /* A map for consolidating deallocators so that they are
500 unique per deallocator FUNCTION_DECL. */
501 typedef hash_map<tree, deallocator *> deallocator_map_t;
502 deallocator_map_t m_deallocator_map;
503
504 /* Memoized lookups from FUNCTION_DECL to custom_deallocator_set *. */
505 typedef hash_map<tree, custom_deallocator_set *> deallocator_set_cache_t;
506 deallocator_set_cache_t m_custom_deallocator_set_cache;
507
508 /* A map for consolidating custom_deallocator_set instances. */
509 typedef hash_map<custom_deallocator_set::key_t,
510 custom_deallocator_set *,
511 deallocator_set_map_traits> custom_deallocator_set_map_t;
512 custom_deallocator_set_map_t m_custom_deallocator_set_map;
513
514 /* Record of dynamically-allocated objects, for cleanup. */
515 auto_vec <custom_deallocator_set *> m_dynamic_sets;
516 auto_vec <custom_deallocator *> m_dynamic_deallocators;
517 };
518
519 /* struct deallocator. */
520
521 deallocator::deallocator (malloc_state_machine *sm,
522 const char *name,
523 enum wording wording)
524 : m_name (name),
525 m_wording (wording),
526 m_freed (sm->add_state ("freed", RS_FREED, NULL, this))
527 {
528 }
529
530 hashval_t
531 deallocator::hash () const
532 {
533 return (hashval_t)m_freed->get_id ();
534 }
535
536 void
537 deallocator::dump_to_pp (pretty_printer *pp) const
538 {
539 pp_printf (pp, "%qs", m_name);
540 }
541
542 int
543 deallocator::cmp (const deallocator *a, const deallocator *b)
544 {
545 return (int)a->m_freed->get_id () - (int)b->m_freed->get_id ();
546 }
547
548 int
549 deallocator::cmp_ptr_ptr (const void *a, const void *b)
550 {
551 return cmp (*(const deallocator * const *)a,
552 *(const deallocator * const *)b);
553 }
554
555
556 /* struct standard_deallocator : public deallocator. */
557
558 standard_deallocator::standard_deallocator (malloc_state_machine *sm,
559 const char *name,
560 enum wording wording)
561 : deallocator (sm, name, wording)
562 {
563 }
564
565 /* struct deallocator_set. */
566
567 deallocator_set::deallocator_set (malloc_state_machine *sm,
568 enum wording wording)
569 : m_wording (wording),
570 m_unchecked (sm->add_state ("unchecked", RS_UNCHECKED, this, NULL)),
571 m_nonnull (sm->add_state ("nonnull", RS_NONNULL, this, NULL))
572 {
573 }
574
575 /* Dump a description of this deallocator_set to stderr. */
576
577 DEBUG_FUNCTION void
578 deallocator_set::dump () const
579 {
580 pretty_printer pp;
581 pp_show_color (&pp) = pp_show_color (global_dc->printer);
582 pp.buffer->stream = stderr;
583 dump_to_pp (&pp);
584 pp_newline (&pp);
585 pp_flush (&pp);
586 }
587
588 /* struct custom_deallocator_set : public deallocator_set. */
589
590 custom_deallocator_set::
591 custom_deallocator_set (malloc_state_machine *sm,
592 const auto_vec <const deallocator *> *vec,
593 enum wording wording)
594 : deallocator_set (sm, wording),
595 m_deallocator_vec (vec->length ())
596 {
597 unsigned i;
598 const deallocator *d;
599 FOR_EACH_VEC_ELT (*vec, i, d)
600 m_deallocator_vec.safe_push (d);
601 }
602
603 bool
604 custom_deallocator_set::contains_p (const deallocator *d) const
605 {
606 unsigned i;
607 const deallocator *cd;
608 FOR_EACH_VEC_ELT (m_deallocator_vec, i, cd)
609 if (cd == d)
610 return true;
611 return false;
612 }
613
614 const deallocator *
615 custom_deallocator_set::maybe_get_single () const
616 {
617 if (m_deallocator_vec.length () == 1)
618 return m_deallocator_vec[0];
619 return NULL;
620 }
621
622 void
623 custom_deallocator_set::dump_to_pp (pretty_printer *pp) const
624 {
625 pp_character (pp, '{');
626 unsigned i;
627 const deallocator *d;
628 FOR_EACH_VEC_ELT (m_deallocator_vec, i, d)
629 {
630 if (i > 0)
631 pp_string (pp, ", ");
632 d->dump_to_pp (pp);
633 }
634 pp_character (pp, '}');
635 }
636
637 /* struct standard_deallocator_set : public deallocator_set. */
638
639 standard_deallocator_set::standard_deallocator_set (malloc_state_machine *sm,
640 const char *name,
641 enum wording wording)
642 : deallocator_set (sm, wording),
643 m_deallocator (sm, name, wording)
644 {
645 }
646
647 bool
648 standard_deallocator_set::contains_p (const deallocator *d) const
649 {
650 return d == &m_deallocator;
651 }
652
653 const deallocator *
654 standard_deallocator_set::maybe_get_single () const
655 {
656 return &m_deallocator;
657 }
658
659 void
660 standard_deallocator_set::dump_to_pp (pretty_printer *pp) const
661 {
662 pp_character (pp, '{');
663 pp_string (pp, m_deallocator.m_name);
664 pp_character (pp, '}');
665 }
666
667 /* Return STATE cast to the custom state subclass, or NULL for the start state.
668 Everything should be an allocation_state apart from the start state. */
669
670 static const allocation_state *
671 dyn_cast_allocation_state (state_machine::state_t state)
672 {
673 if (state->get_id () == 0)
674 return NULL;
675 return static_cast <const allocation_state *> (state);
676 }
677
678 /* Return STATE cast to the custom state subclass, for a state that is
679 already known to not be the start state . */
680
681 static const allocation_state *
682 as_a_allocation_state (state_machine::state_t state)
683 {
684 gcc_assert (state->get_id () != 0);
685 return static_cast <const allocation_state *> (state);
686 }
687
688 /* Get the resource_state for STATE. */
689
690 static enum resource_state
691 get_rs (state_machine::state_t state)
692 {
693 if (const allocation_state *astate = dyn_cast_allocation_state (state))
694 return astate->m_rs;
695 else
696 return RS_START;
697 }
698
699 /* Return true if STATE is the start state. */
700
701 static bool
702 start_p (state_machine::state_t state)
703 {
704 return get_rs (state) == RS_START;
705 }
706
707 /* Return true if STATE is an unchecked result from an allocator. */
708
709 static bool
710 unchecked_p (state_machine::state_t state)
711 {
712 return get_rs (state) == RS_UNCHECKED;
713 }
714
715 /* Return true if STATE is a non-null result from an allocator. */
716
717 static bool
718 nonnull_p (state_machine::state_t state)
719 {
720 return get_rs (state) == RS_NONNULL;
721 }
722
723 /* Return true if STATE is a value that has been passed to a deallocator. */
724
725 static bool
726 freed_p (state_machine::state_t state)
727 {
728 return get_rs (state) == RS_FREED;
729 }
730
731 /* Return true if STATE is a value that has been assumed to be non-NULL. */
732
733 static bool
734 assumed_non_null_p (state_machine::state_t state)
735 {
736 return get_rs (state) == RS_ASSUMED_NON_NULL;
737 }
738
739 /* Class for diagnostics relating to malloc_state_machine. */
740
741 class malloc_diagnostic : public pending_diagnostic
742 {
743 public:
744 malloc_diagnostic (const malloc_state_machine &sm, tree arg)
745 : m_sm (sm), m_arg (arg)
746 {}
747
748 bool subclass_equal_p (const pending_diagnostic &base_other) const override
749 {
750 return same_tree_p (m_arg, ((const malloc_diagnostic &)base_other).m_arg);
751 }
752
753 label_text describe_state_change (const evdesc::state_change &change)
754 override
755 {
756 if (change.m_old_state == m_sm.get_start_state ()
757 && unchecked_p (change.m_new_state))
758 // TODO: verify that it's the allocation stmt, not a copy
759 return label_text::borrow ("allocated here");
760 if (unchecked_p (change.m_old_state)
761 && nonnull_p (change.m_new_state))
762 {
763 if (change.m_expr)
764 return change.formatted_print ("assuming %qE is non-NULL",
765 change.m_expr);
766 else
767 return change.formatted_print ("assuming %qs is non-NULL",
768 "<unknown>");
769 }
770 if (change.m_new_state == m_sm.m_null)
771 {
772 if (unchecked_p (change.m_old_state))
773 {
774 if (change.m_expr)
775 return change.formatted_print ("assuming %qE is NULL",
776 change.m_expr);
777 else
778 return change.formatted_print ("assuming %qs is NULL",
779 "<unknown>");
780 }
781 else
782 {
783 if (change.m_expr)
784 return change.formatted_print ("%qE is NULL",
785 change.m_expr);
786 else
787 return change.formatted_print ("%qs is NULL",
788 "<unknown>");
789 }
790 }
791
792 return label_text ();
793 }
794
795 diagnostic_event::meaning
796 get_meaning_for_state_change (const evdesc::state_change &change)
797 const final override
798 {
799 if (change.m_old_state == m_sm.get_start_state ()
800 && unchecked_p (change.m_new_state))
801 return diagnostic_event::meaning (diagnostic_event::VERB_acquire,
802 diagnostic_event::NOUN_memory);
803 if (freed_p (change.m_new_state))
804 return diagnostic_event::meaning (diagnostic_event::VERB_release,
805 diagnostic_event::NOUN_memory);
806 return diagnostic_event::meaning ();
807 }
808
809 protected:
810 const malloc_state_machine &m_sm;
811 tree m_arg;
812 };
813
814 /* Concrete subclass for reporting mismatching allocator/deallocator
815 diagnostics. */
816
817 class mismatching_deallocation : public malloc_diagnostic
818 {
819 public:
820 mismatching_deallocation (const malloc_state_machine &sm, tree arg,
821 const deallocator_set *expected_deallocators,
822 const deallocator *actual_dealloc)
823 : malloc_diagnostic (sm, arg),
824 m_expected_deallocators (expected_deallocators),
825 m_actual_dealloc (actual_dealloc)
826 {}
827
828 const char *get_kind () const final override
829 {
830 return "mismatching_deallocation";
831 }
832
833 int get_controlling_option () const final override
834 {
835 return OPT_Wanalyzer_mismatching_deallocation;
836 }
837
838 bool emit (rich_location *rich_loc) final override
839 {
840 auto_diagnostic_group d;
841 diagnostic_metadata m;
842 m.add_cwe (762); /* CWE-762: Mismatched Memory Management Routines. */
843 if (const deallocator *expected_dealloc
844 = m_expected_deallocators->maybe_get_single ())
845 return warning_meta (rich_loc, m, get_controlling_option (),
846 "%qE should have been deallocated with %qs"
847 " but was deallocated with %qs",
848 m_arg, expected_dealloc->m_name,
849 m_actual_dealloc->m_name);
850 else
851 return warning_meta (rich_loc, m, get_controlling_option (),
852 "%qs called on %qE returned from a mismatched"
853 " allocation function",
854 m_actual_dealloc->m_name, m_arg);
855 }
856
857 label_text describe_state_change (const evdesc::state_change &change)
858 final override
859 {
860 if (unchecked_p (change.m_new_state))
861 {
862 m_alloc_event = change.m_event_id;
863 if (const deallocator *expected_dealloc
864 = m_expected_deallocators->maybe_get_single ())
865 return change.formatted_print ("allocated here"
866 " (expects deallocation with %qs)",
867 expected_dealloc->m_name);
868 else
869 return change.formatted_print ("allocated here");
870 }
871 return malloc_diagnostic::describe_state_change (change);
872 }
873
874 label_text describe_final_event (const evdesc::final_event &ev) final override
875 {
876 if (m_alloc_event.known_p ())
877 {
878 if (const deallocator *expected_dealloc
879 = m_expected_deallocators->maybe_get_single ())
880 return ev.formatted_print
881 ("deallocated with %qs here;"
882 " allocation at %@ expects deallocation with %qs",
883 m_actual_dealloc->m_name, &m_alloc_event,
884 expected_dealloc->m_name);
885 else
886 return ev.formatted_print
887 ("deallocated with %qs here;"
888 " allocated at %@",
889 m_actual_dealloc->m_name, &m_alloc_event);
890 }
891 return ev.formatted_print ("deallocated with %qs here",
892 m_actual_dealloc->m_name);
893 }
894
895 private:
896 diagnostic_event_id_t m_alloc_event;
897 const deallocator_set *m_expected_deallocators;
898 const deallocator *m_actual_dealloc;
899 };
900
901 /* Concrete subclass for reporting double-free diagnostics. */
902
903 class double_free : public malloc_diagnostic
904 {
905 public:
906 double_free (const malloc_state_machine &sm, tree arg, const char *funcname)
907 : malloc_diagnostic (sm, arg), m_funcname (funcname)
908 {}
909
910 const char *get_kind () const final override { return "double_free"; }
911
912 int get_controlling_option () const final override
913 {
914 return OPT_Wanalyzer_double_free;
915 }
916
917 bool emit (rich_location *rich_loc) final override
918 {
919 auto_diagnostic_group d;
920 diagnostic_metadata m;
921 m.add_cwe (415); /* CWE-415: Double Free. */
922 return warning_meta (rich_loc, m, get_controlling_option (),
923 "double-%qs of %qE", m_funcname, m_arg);
924 }
925
926 label_text describe_state_change (const evdesc::state_change &change)
927 final override
928 {
929 if (freed_p (change.m_new_state))
930 {
931 m_first_free_event = change.m_event_id;
932 return change.formatted_print ("first %qs here", m_funcname);
933 }
934 return malloc_diagnostic::describe_state_change (change);
935 }
936
937 label_text describe_call_with_state (const evdesc::call_with_state &info)
938 final override
939 {
940 if (freed_p (info.m_state))
941 return info.formatted_print
942 ("passing freed pointer %qE in call to %qE from %qE",
943 info.m_expr, info.m_callee_fndecl, info.m_caller_fndecl);
944 return label_text ();
945 }
946
947 label_text describe_final_event (const evdesc::final_event &ev) final override
948 {
949 if (m_first_free_event.known_p ())
950 return ev.formatted_print ("second %qs here; first %qs was at %@",
951 m_funcname, m_funcname,
952 &m_first_free_event);
953 return ev.formatted_print ("second %qs here", m_funcname);
954 }
955
956 private:
957 diagnostic_event_id_t m_first_free_event;
958 const char *m_funcname;
959 };
960
961 /* Abstract subclass for describing possible bad uses of NULL.
962 Responsible for describing the call that could return NULL. */
963
964 class possible_null : public malloc_diagnostic
965 {
966 public:
967 possible_null (const malloc_state_machine &sm, tree arg)
968 : malloc_diagnostic (sm, arg)
969 {}
970
971 label_text describe_state_change (const evdesc::state_change &change)
972 final override
973 {
974 if (change.m_old_state == m_sm.get_start_state ()
975 && unchecked_p (change.m_new_state))
976 {
977 m_origin_of_unchecked_event = change.m_event_id;
978 return label_text::borrow ("this call could return NULL");
979 }
980 return malloc_diagnostic::describe_state_change (change);
981 }
982
983 label_text describe_return_of_state (const evdesc::return_of_state &info)
984 final override
985 {
986 if (unchecked_p (info.m_state))
987 return info.formatted_print ("possible return of NULL to %qE from %qE",
988 info.m_caller_fndecl, info.m_callee_fndecl);
989 return label_text ();
990 }
991
992 protected:
993 diagnostic_event_id_t m_origin_of_unchecked_event;
994 };
995
996 /* Concrete subclass for describing dereference of a possible NULL
997 value. */
998
999 class possible_null_deref : public possible_null
1000 {
1001 public:
1002 possible_null_deref (const malloc_state_machine &sm, tree arg)
1003 : possible_null (sm, arg)
1004 {}
1005
1006 const char *get_kind () const final override { return "possible_null_deref"; }
1007
1008 int get_controlling_option () const final override
1009 {
1010 return OPT_Wanalyzer_possible_null_dereference;
1011 }
1012
1013 bool emit (rich_location *rich_loc) final override
1014 {
1015 /* CWE-690: Unchecked Return Value to NULL Pointer Dereference. */
1016 diagnostic_metadata m;
1017 m.add_cwe (690);
1018 return warning_meta (rich_loc, m, get_controlling_option (),
1019 "dereference of possibly-NULL %qE", m_arg);
1020 }
1021
1022 label_text describe_final_event (const evdesc::final_event &ev) final override
1023 {
1024 if (m_origin_of_unchecked_event.known_p ())
1025 return ev.formatted_print ("%qE could be NULL: unchecked value from %@",
1026 ev.m_expr,
1027 &m_origin_of_unchecked_event);
1028 else
1029 return ev.formatted_print ("%qE could be NULL", ev.m_expr);
1030 }
1031
1032 };
1033
1034 /* Return true if FNDECL is a C++ method. */
1035
1036 static bool
1037 method_p (tree fndecl)
1038 {
1039 return TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE;
1040 }
1041
1042 /* Return a 1-based description of ARG_IDX (0-based) of FNDECL.
1043 Compare with %P in the C++ FE (implemented in cp/error.cc: parm_to_string
1044 as called from cp_printer). */
1045
1046 static label_text
1047 describe_argument_index (tree fndecl, int arg_idx)
1048 {
1049 if (method_p (fndecl))
1050 if (arg_idx == 0)
1051 return label_text::borrow ("'this'");
1052 pretty_printer pp;
1053 pp_printf (&pp, "%u", arg_idx + 1 - method_p (fndecl));
1054 return label_text::take (xstrdup (pp_formatted_text (&pp)));
1055 }
1056
1057 /* Subroutine for use by possible_null_arg::emit and null_arg::emit.
1058 Issue a note informing that the pertinent argument must be non-NULL. */
1059
1060 static void
1061 inform_nonnull_attribute (tree fndecl, int arg_idx)
1062 {
1063 label_text arg_desc = describe_argument_index (fndecl, arg_idx);
1064 inform (DECL_SOURCE_LOCATION (fndecl),
1065 "argument %s of %qD must be non-null",
1066 arg_desc.get (), fndecl);
1067 /* Ideally we would use the location of the parm and underline the
1068 attribute also - but we don't have the location_t values at this point
1069 in the middle-end.
1070 For reference, the C and C++ FEs have get_fndecl_argument_location. */
1071 }
1072
1073 /* Concrete subclass for describing passing a possibly-NULL value to a
1074 function marked with __attribute__((nonnull)). */
1075
1076 class possible_null_arg : public possible_null
1077 {
1078 public:
1079 possible_null_arg (const malloc_state_machine &sm, tree arg,
1080 tree fndecl, int arg_idx)
1081 : possible_null (sm, arg),
1082 m_fndecl (fndecl), m_arg_idx (arg_idx)
1083 {}
1084
1085 const char *get_kind () const final override { return "possible_null_arg"; }
1086
1087 bool subclass_equal_p (const pending_diagnostic &base_other)
1088 const final override
1089 {
1090 const possible_null_arg &sub_other
1091 = (const possible_null_arg &)base_other;
1092 return (same_tree_p (m_arg, sub_other.m_arg)
1093 && m_fndecl == sub_other.m_fndecl
1094 && m_arg_idx == sub_other.m_arg_idx);
1095 }
1096
1097 int get_controlling_option () const final override
1098 {
1099 return OPT_Wanalyzer_possible_null_argument;
1100 }
1101
1102 bool emit (rich_location *rich_loc) final override
1103 {
1104 /* CWE-690: Unchecked Return Value to NULL Pointer Dereference. */
1105 auto_diagnostic_group d;
1106 diagnostic_metadata m;
1107 m.add_cwe (690);
1108 bool warned
1109 = warning_meta (rich_loc, m, get_controlling_option (),
1110 "use of possibly-NULL %qE where non-null expected",
1111 m_arg);
1112 if (warned)
1113 inform_nonnull_attribute (m_fndecl, m_arg_idx);
1114 return warned;
1115 }
1116
1117 label_text describe_final_event (const evdesc::final_event &ev) final override
1118 {
1119 label_text arg_desc = describe_argument_index (m_fndecl, m_arg_idx);
1120 label_text result;
1121 if (m_origin_of_unchecked_event.known_p ())
1122 result = ev.formatted_print ("argument %s (%qE) from %@ could be NULL"
1123 " where non-null expected",
1124 arg_desc.get (), ev.m_expr,
1125 &m_origin_of_unchecked_event);
1126 else
1127 result = ev.formatted_print ("argument %s (%qE) could be NULL"
1128 " where non-null expected",
1129 arg_desc.get (), ev.m_expr);
1130 return result;
1131 }
1132
1133 private:
1134 tree m_fndecl;
1135 int m_arg_idx;
1136 };
1137
1138 /* Concrete subclass for describing a dereference of a NULL value. */
1139
1140 class null_deref : public malloc_diagnostic
1141 {
1142 public:
1143 null_deref (const malloc_state_machine &sm, tree arg)
1144 : malloc_diagnostic (sm, arg) {}
1145
1146 const char *get_kind () const final override { return "null_deref"; }
1147
1148 int get_controlling_option () const final override
1149 {
1150 return OPT_Wanalyzer_null_dereference;
1151 }
1152
1153 bool terminate_path_p () const final override { return true; }
1154
1155 bool emit (rich_location *rich_loc) final override
1156 {
1157 /* CWE-476: NULL Pointer Dereference. */
1158 diagnostic_metadata m;
1159 m.add_cwe (476);
1160 return warning_meta (rich_loc, m, get_controlling_option (),
1161 "dereference of NULL %qE", m_arg);
1162 }
1163
1164 label_text describe_return_of_state (const evdesc::return_of_state &info)
1165 final override
1166 {
1167 if (info.m_state == m_sm.m_null)
1168 return info.formatted_print ("return of NULL to %qE from %qE",
1169 info.m_caller_fndecl, info.m_callee_fndecl);
1170 return label_text ();
1171 }
1172
1173 label_text describe_final_event (const evdesc::final_event &ev) final override
1174 {
1175 return ev.formatted_print ("dereference of NULL %qE", ev.m_expr);
1176 }
1177 };
1178
1179 /* Concrete subclass for describing passing a NULL value to a
1180 function marked with __attribute__((nonnull)). */
1181
1182 class null_arg : public malloc_diagnostic
1183 {
1184 public:
1185 null_arg (const malloc_state_machine &sm, tree arg,
1186 tree fndecl, int arg_idx)
1187 : malloc_diagnostic (sm, arg),
1188 m_fndecl (fndecl), m_arg_idx (arg_idx)
1189 {}
1190
1191 const char *get_kind () const final override { return "null_arg"; }
1192
1193 bool subclass_equal_p (const pending_diagnostic &base_other)
1194 const final override
1195 {
1196 const null_arg &sub_other
1197 = (const null_arg &)base_other;
1198 return (same_tree_p (m_arg, sub_other.m_arg)
1199 && m_fndecl == sub_other.m_fndecl
1200 && m_arg_idx == sub_other.m_arg_idx);
1201 }
1202
1203 int get_controlling_option () const final override
1204 {
1205 return OPT_Wanalyzer_null_argument;
1206 }
1207
1208 bool terminate_path_p () const final override { return true; }
1209
1210 bool emit (rich_location *rich_loc) final override
1211 {
1212 /* CWE-476: NULL Pointer Dereference. */
1213 auto_diagnostic_group d;
1214 diagnostic_metadata m;
1215 m.add_cwe (476);
1216
1217 bool warned;
1218 if (zerop (m_arg))
1219 warned = warning_meta (rich_loc, m, get_controlling_option (),
1220 "use of NULL where non-null expected");
1221 else
1222 warned = warning_meta (rich_loc, m, get_controlling_option (),
1223 "use of NULL %qE where non-null expected",
1224 m_arg);
1225 if (warned)
1226 inform_nonnull_attribute (m_fndecl, m_arg_idx);
1227 return warned;
1228 }
1229
1230 label_text describe_final_event (const evdesc::final_event &ev) final override
1231 {
1232 label_text arg_desc = describe_argument_index (m_fndecl, m_arg_idx);
1233 label_text result;
1234 if (zerop (ev.m_expr))
1235 result = ev.formatted_print ("argument %s NULL where non-null expected",
1236 arg_desc.get ());
1237 else
1238 result = ev.formatted_print ("argument %s (%qE) NULL"
1239 " where non-null expected",
1240 arg_desc.get (), ev.m_expr);
1241 return result;
1242 }
1243
1244 private:
1245 tree m_fndecl;
1246 int m_arg_idx;
1247 };
1248
1249 class use_after_free : public malloc_diagnostic
1250 {
1251 public:
1252 use_after_free (const malloc_state_machine &sm, tree arg,
1253 const deallocator *deallocator)
1254 : malloc_diagnostic (sm, arg),
1255 m_deallocator (deallocator)
1256 {
1257 gcc_assert (deallocator);
1258 }
1259
1260 const char *get_kind () const final override { return "use_after_free"; }
1261
1262 int get_controlling_option () const final override
1263 {
1264 return OPT_Wanalyzer_use_after_free;
1265 }
1266
1267 bool emit (rich_location *rich_loc) final override
1268 {
1269 /* CWE-416: Use After Free. */
1270 diagnostic_metadata m;
1271 m.add_cwe (416);
1272 return warning_meta (rich_loc, m, get_controlling_option (),
1273 "use after %<%s%> of %qE",
1274 m_deallocator->m_name, m_arg);
1275 }
1276
1277 label_text describe_state_change (const evdesc::state_change &change)
1278 final override
1279 {
1280 if (freed_p (change.m_new_state))
1281 {
1282 m_free_event = change.m_event_id;
1283 switch (m_deallocator->m_wording)
1284 {
1285 default:
1286 case WORDING_REALLOCATED:
1287 gcc_unreachable ();
1288 case WORDING_FREED:
1289 return label_text::borrow ("freed here");
1290 case WORDING_DELETED:
1291 return label_text::borrow ("deleted here");
1292 case WORDING_DEALLOCATED:
1293 return label_text::borrow ("deallocated here");
1294 }
1295 }
1296 return malloc_diagnostic::describe_state_change (change);
1297 }
1298
1299 label_text describe_final_event (const evdesc::final_event &ev) final override
1300 {
1301 const char *funcname = m_deallocator->m_name;
1302 if (m_free_event.known_p ())
1303 switch (m_deallocator->m_wording)
1304 {
1305 default:
1306 case WORDING_REALLOCATED:
1307 gcc_unreachable ();
1308 case WORDING_FREED:
1309 return ev.formatted_print ("use after %<%s%> of %qE; freed at %@",
1310 funcname, ev.m_expr, &m_free_event);
1311 case WORDING_DELETED:
1312 return ev.formatted_print ("use after %<%s%> of %qE; deleted at %@",
1313 funcname, ev.m_expr, &m_free_event);
1314 case WORDING_DEALLOCATED:
1315 return ev.formatted_print ("use after %<%s%> of %qE;"
1316 " deallocated at %@",
1317 funcname, ev.m_expr, &m_free_event);
1318 }
1319 else
1320 return ev.formatted_print ("use after %<%s%> of %qE",
1321 funcname, ev.m_expr);
1322 }
1323
1324 /* Implementation of pending_diagnostic::supercedes_p for
1325 use_after_free.
1326
1327 We want use-after-free to supercede use-of-unitialized-value,
1328 so that if we have these at the same stmt, we don't emit
1329 a use-of-uninitialized, just the use-after-free.
1330 (this is because we fully purge information about freed
1331 buffers when we free them to avoid state explosions, so
1332 that if they are accessed after the free, it looks like
1333 they are uninitialized). */
1334
1335 bool supercedes_p (const pending_diagnostic &other) const final override
1336 {
1337 if (other.use_of_uninit_p ())
1338 return true;
1339
1340 return false;
1341 }
1342
1343 private:
1344 diagnostic_event_id_t m_free_event;
1345 const deallocator *m_deallocator;
1346 };
1347
1348 class malloc_leak : public malloc_diagnostic
1349 {
1350 public:
1351 malloc_leak (const malloc_state_machine &sm, tree arg)
1352 : malloc_diagnostic (sm, arg) {}
1353
1354 const char *get_kind () const final override { return "malloc_leak"; }
1355
1356 int get_controlling_option () const final override
1357 {
1358 return OPT_Wanalyzer_malloc_leak;
1359 }
1360
1361 bool emit (rich_location *rich_loc) final override
1362 {
1363 /* "CWE-401: Missing Release of Memory after Effective Lifetime". */
1364 diagnostic_metadata m;
1365 m.add_cwe (401);
1366 if (m_arg)
1367 return warning_meta (rich_loc, m, get_controlling_option (),
1368 "leak of %qE", m_arg);
1369 else
1370 return warning_meta (rich_loc, m, get_controlling_option (),
1371 "leak of %qs", "<unknown>");
1372 }
1373
1374 label_text describe_state_change (const evdesc::state_change &change)
1375 final override
1376 {
1377 if (unchecked_p (change.m_new_state)
1378 || (start_p (change.m_old_state) && nonnull_p (change.m_new_state)))
1379 {
1380 m_alloc_event = change.m_event_id;
1381 return label_text::borrow ("allocated here");
1382 }
1383 return malloc_diagnostic::describe_state_change (change);
1384 }
1385
1386 label_text describe_final_event (const evdesc::final_event &ev) final override
1387 {
1388 if (ev.m_expr)
1389 {
1390 if (m_alloc_event.known_p ())
1391 return ev.formatted_print ("%qE leaks here; was allocated at %@",
1392 ev.m_expr, &m_alloc_event);
1393 else
1394 return ev.formatted_print ("%qE leaks here", ev.m_expr);
1395 }
1396 else
1397 {
1398 if (m_alloc_event.known_p ())
1399 return ev.formatted_print ("%qs leaks here; was allocated at %@",
1400 "<unknown>", &m_alloc_event);
1401 else
1402 return ev.formatted_print ("%qs leaks here", "<unknown>");
1403 }
1404 }
1405
1406 private:
1407 diagnostic_event_id_t m_alloc_event;
1408 };
1409
1410 class free_of_non_heap : public malloc_diagnostic
1411 {
1412 public:
1413 free_of_non_heap (const malloc_state_machine &sm, tree arg,
1414 const region *freed_reg,
1415 const char *funcname)
1416 : malloc_diagnostic (sm, arg), m_freed_reg (freed_reg), m_funcname (funcname)
1417 {
1418 }
1419
1420 const char *get_kind () const final override { return "free_of_non_heap"; }
1421
1422 bool subclass_equal_p (const pending_diagnostic &base_other) const
1423 final override
1424 {
1425 const free_of_non_heap &other = (const free_of_non_heap &)base_other;
1426 return (same_tree_p (m_arg, other.m_arg)
1427 && m_freed_reg == other.m_freed_reg);
1428 }
1429
1430 int get_controlling_option () const final override
1431 {
1432 return OPT_Wanalyzer_free_of_non_heap;
1433 }
1434
1435 bool emit (rich_location *rich_loc) final override
1436 {
1437 auto_diagnostic_group d;
1438 diagnostic_metadata m;
1439 m.add_cwe (590); /* CWE-590: Free of Memory not on the Heap. */
1440 switch (get_memory_space ())
1441 {
1442 default:
1443 case MEMSPACE_HEAP:
1444 gcc_unreachable ();
1445 case MEMSPACE_UNKNOWN:
1446 case MEMSPACE_CODE:
1447 case MEMSPACE_GLOBALS:
1448 case MEMSPACE_READONLY_DATA:
1449 return warning_meta (rich_loc, m, get_controlling_option (),
1450 "%<%s%> of %qE which points to memory"
1451 " not on the heap",
1452 m_funcname, m_arg);
1453 break;
1454 case MEMSPACE_STACK:
1455 return warning_meta (rich_loc, m, get_controlling_option (),
1456 "%<%s%> of %qE which points to memory"
1457 " on the stack",
1458 m_funcname, m_arg);
1459 break;
1460 }
1461 }
1462
1463 label_text describe_state_change (const evdesc::state_change &)
1464 final override
1465 {
1466 return label_text::borrow ("pointer is from here");
1467 }
1468
1469 label_text describe_final_event (const evdesc::final_event &ev) final override
1470 {
1471 return ev.formatted_print ("call to %qs here", m_funcname);
1472 }
1473
1474 void mark_interesting_stuff (interesting_t *interest) final override
1475 {
1476 if (m_freed_reg)
1477 interest->add_region_creation (m_freed_reg);
1478 }
1479
1480 private:
1481 enum memory_space get_memory_space () const
1482 {
1483 if (m_freed_reg)
1484 return m_freed_reg->get_memory_space ();
1485 else
1486 return MEMSPACE_UNKNOWN;
1487 }
1488
1489 const region *m_freed_reg;
1490 const char *m_funcname;
1491 };
1492
1493 /* Concrete pending_diagnostic subclass for -Wanalyzer-deref-before-check. */
1494
1495 class deref_before_check : public malloc_diagnostic
1496 {
1497 public:
1498 deref_before_check (const malloc_state_machine &sm, tree arg)
1499 : malloc_diagnostic (sm, arg),
1500 m_deref_enode (NULL),
1501 m_deref_expr (NULL),
1502 m_check_enode (NULL)
1503 {
1504 gcc_assert (arg);
1505 }
1506
1507 const char *get_kind () const final override { return "deref_before_check"; }
1508
1509 int get_controlling_option () const final override
1510 {
1511 return OPT_Wanalyzer_deref_before_check;
1512 }
1513
1514 bool emit (rich_location *rich_loc) final override
1515 {
1516 /* Don't emit the warning if we can't show where the deref
1517 and the check occur. */
1518 if (!m_deref_enode)
1519 return false;
1520 if (!m_check_enode)
1521 return false;
1522 /* Only emit the warning for intraprocedural cases. */
1523 const program_point &deref_point = m_deref_enode->get_point ();
1524 const program_point &check_point = m_check_enode->get_point ();
1525
1526 if (!program_point::effectively_intraprocedural_p (deref_point,
1527 check_point))
1528 return false;
1529
1530 /* Reject the warning if the check occurs within a macro defintion.
1531 This avoids false positives for such code as:
1532
1533 #define throw_error \
1534 do { \
1535 if (p) \
1536 cleanup (p); \
1537 return; \
1538 } while (0)
1539
1540 if (p->idx >= n)
1541 throw_error ();
1542
1543 where the usage of "throw_error" implicitly adds a check
1544 on 'p'.
1545
1546 We do warn when the check is in a macro expansion if we can get
1547 at the location of the condition and it is't part of the
1548 definition, so that we warn for checks such as:
1549 if (words[0][0] == '@')
1550 return;
1551 g_assert(words[0] != NULL); <--- here
1552 Unfortunately we don't have locations for individual gimple
1553 arguments, so in:
1554 g_assert (ptr);
1555 we merely have a gimple_cond
1556 if (p_2(D) == 0B)
1557 with no way of getting at the location of the condition separately
1558 from that of the gimple_cond (where the "if" is within the macro
1559 definition). We reject the warning for such cases.
1560
1561 We do warn when the *deref* occurs in a macro, since this can be
1562 a source of real bugs; see e.g. PR 77425. */
1563 location_t check_loc = m_check_enode->get_point ().get_location ();
1564 if (linemap_location_from_macro_definition_p (line_table, check_loc))
1565 return false;
1566
1567 /* Reject if m_deref_expr is sufficiently different from m_arg
1568 for cases where the dereference is spelled differently from
1569 the check, which is probably two different ways to get the
1570 same svalue, and thus not worth reporting. */
1571 if (!m_deref_expr)
1572 return false;
1573 if (!sufficiently_similar_p (m_deref_expr, m_arg))
1574 return false;
1575
1576 /* Reject the warning if the deref's BB doesn't dominate that
1577 of the check, so that we don't warn e.g. for shared cleanup
1578 code that checks a pointer for NULL, when that code is sometimes
1579 used before a deref and sometimes after.
1580 Using the dominance code requires setting cfun. */
1581 auto_cfun sentinel (m_deref_enode->get_function ());
1582 calculate_dominance_info (CDI_DOMINATORS);
1583 if (!dominated_by_p (CDI_DOMINATORS,
1584 m_check_enode->get_supernode ()->m_bb,
1585 m_deref_enode->get_supernode ()->m_bb))
1586 return false;
1587
1588 return warning_at (rich_loc, get_controlling_option (),
1589 "check of %qE for NULL after already"
1590 " dereferencing it",
1591 m_arg);
1592 }
1593
1594 label_text describe_state_change (const evdesc::state_change &change)
1595 final override
1596 {
1597 if (change.m_old_state == m_sm.get_start_state ()
1598 && assumed_non_null_p (change.m_new_state))
1599 {
1600 m_first_deref_event = change.m_event_id;
1601 m_deref_enode = change.m_event.get_exploded_node ();
1602 m_deref_expr = change.m_expr;
1603 return change.formatted_print ("pointer %qE is dereferenced here",
1604 m_arg);
1605 }
1606 return malloc_diagnostic::describe_state_change (change);
1607 }
1608
1609 label_text describe_final_event (const evdesc::final_event &ev) final override
1610 {
1611 m_check_enode = ev.m_event.get_exploded_node ();
1612 if (m_first_deref_event.known_p ())
1613 return ev.formatted_print ("pointer %qE is checked for NULL here but"
1614 " it was already dereferenced at %@",
1615 m_arg, &m_first_deref_event);
1616 else
1617 return ev.formatted_print ("pointer %qE is checked for NULL here but"
1618 " it was already dereferenced",
1619 m_arg);
1620 }
1621
1622 private:
1623 static bool sufficiently_similar_p (tree expr_a, tree expr_b)
1624 {
1625 pretty_printer *pp_a = global_dc->printer->clone ();
1626 pretty_printer *pp_b = global_dc->printer->clone ();
1627 pp_printf (pp_a, "%qE", expr_a);
1628 pp_printf (pp_b, "%qE", expr_b);
1629 bool result = (strcmp (pp_formatted_text (pp_a), pp_formatted_text (pp_b))
1630 == 0);
1631 delete pp_a;
1632 delete pp_b;
1633 return result;
1634 }
1635
1636 diagnostic_event_id_t m_first_deref_event;
1637 const exploded_node *m_deref_enode;
1638 tree m_deref_expr;
1639 const exploded_node *m_check_enode;
1640 };
1641
1642 /* struct allocation_state : public state_machine::state. */
1643
1644 /* Implementation of state_machine::state::dump_to_pp vfunc
1645 for allocation_state: append the API that this allocation is
1646 associated with. */
1647
1648 void
1649 allocation_state::dump_to_pp (pretty_printer *pp) const
1650 {
1651 state_machine::state::dump_to_pp (pp);
1652 if (m_deallocators)
1653 {
1654 pp_string (pp, " (");
1655 m_deallocators->dump_to_pp (pp);
1656 pp_character (pp, ')');
1657 }
1658 }
1659
1660 /* Given a allocation_state for a deallocator_set, get the "nonnull" state
1661 for the corresponding allocator(s). */
1662
1663 const allocation_state *
1664 allocation_state::get_nonnull () const
1665 {
1666 gcc_assert (m_deallocators);
1667 return as_a_allocation_state (m_deallocators->m_nonnull);
1668 }
1669
1670 /* struct assumed_non_null_state : public allocation_state. */
1671
1672 void
1673 assumed_non_null_state::dump_to_pp (pretty_printer *pp) const
1674 {
1675 allocation_state::dump_to_pp (pp);
1676 pp_string (pp, " (in ");
1677 m_frame->dump_to_pp (pp, true);
1678 pp_character (pp, ')');
1679 }
1680
1681 /* malloc_state_machine's ctor. */
1682
1683 malloc_state_machine::malloc_state_machine (logger *logger)
1684 : state_machine ("malloc", logger),
1685 m_free (this, "free", WORDING_FREED),
1686 m_scalar_delete (this, "delete", WORDING_DELETED),
1687 m_vector_delete (this, "delete[]", WORDING_DELETED),
1688 m_realloc (this, "realloc", WORDING_REALLOCATED)
1689 {
1690 gcc_assert (m_start->get_id () == 0);
1691 m_null = add_state ("null", RS_FREED, NULL, NULL);
1692 m_non_heap = add_state ("non-heap", RS_NON_HEAP, NULL, NULL);
1693 m_stop = add_state ("stop", RS_STOP, NULL, NULL);
1694 }
1695
1696 malloc_state_machine::~malloc_state_machine ()
1697 {
1698 unsigned i;
1699 custom_deallocator_set *set;
1700 FOR_EACH_VEC_ELT (m_dynamic_sets, i, set)
1701 delete set;
1702 custom_deallocator *d;
1703 FOR_EACH_VEC_ELT (m_dynamic_deallocators, i, d)
1704 delete d;
1705 }
1706
1707 state_machine::state_t
1708 malloc_state_machine::add_state (const char *name, enum resource_state rs,
1709 const deallocator_set *deallocators,
1710 const deallocator *deallocator)
1711 {
1712 return add_custom_state (new allocation_state (name, alloc_state_id (),
1713 rs, deallocators,
1714 deallocator));
1715 }
1716
1717 /* If ALLOCATOR_FNDECL has any "__attribute__((malloc(FOO)))",
1718 return a custom_deallocator_set for them, consolidating them
1719 to ensure uniqueness of the sets.
1720
1721 Return NULL if it has no such attributes. */
1722
1723 const custom_deallocator_set *
1724 malloc_state_machine::
1725 get_or_create_custom_deallocator_set (tree allocator_fndecl)
1726 {
1727 /* Early rejection of decls without attributes. */
1728 tree attrs = DECL_ATTRIBUTES (allocator_fndecl);
1729 if (!attrs)
1730 return NULL;
1731
1732 /* Otherwise, call maybe_create_custom_deallocator_set,
1733 memoizing the result. */
1734 if (custom_deallocator_set **slot
1735 = m_custom_deallocator_set_cache.get (allocator_fndecl))
1736 return *slot;
1737 custom_deallocator_set *set
1738 = maybe_create_custom_deallocator_set (allocator_fndecl);
1739 m_custom_deallocator_set_cache.put (allocator_fndecl, set);
1740 return set;
1741 }
1742
1743 /* Given ALLOCATOR_FNDECL, a FUNCTION_DECL with attributes,
1744 look for any "__attribute__((malloc(FOO)))" and return a
1745 custom_deallocator_set for them, consolidating them
1746 to ensure uniqueness of the sets.
1747
1748 Return NULL if it has no such attributes.
1749
1750 Subroutine of get_or_create_custom_deallocator_set which
1751 memoizes the result. */
1752
1753 custom_deallocator_set *
1754 malloc_state_machine::
1755 maybe_create_custom_deallocator_set (tree allocator_fndecl)
1756 {
1757 tree attrs = DECL_ATTRIBUTES (allocator_fndecl);
1758 gcc_assert (attrs);
1759
1760 /* Look for instances of __attribute__((malloc(FOO))). */
1761 auto_vec<const deallocator *> deallocator_vec;
1762 for (tree allocs = attrs;
1763 (allocs = lookup_attribute ("malloc", allocs));
1764 allocs = TREE_CHAIN (allocs))
1765 {
1766 tree args = TREE_VALUE (allocs);
1767 if (!args)
1768 continue;
1769 if (TREE_VALUE (args))
1770 {
1771 const deallocator *d
1772 = get_or_create_deallocator (TREE_VALUE (args));
1773 deallocator_vec.safe_push (d);
1774 }
1775 }
1776
1777 /* If there weren't any deallocators, bail. */
1778 if (deallocator_vec.length () == 0)
1779 return NULL;
1780
1781 /* Consolidate, so that we reuse existing deallocator_set
1782 instances. */
1783 deallocator_vec.qsort (deallocator::cmp_ptr_ptr);
1784 custom_deallocator_set **slot
1785 = m_custom_deallocator_set_map.get (&deallocator_vec);
1786 if (slot)
1787 return *slot;
1788 custom_deallocator_set *set
1789 = new custom_deallocator_set (this, &deallocator_vec, WORDING_DEALLOCATED);
1790 m_custom_deallocator_set_map.put (&set->m_deallocator_vec, set);
1791 m_dynamic_sets.safe_push (set);
1792 return set;
1793 }
1794
1795 /* Get the deallocator for DEALLOCATOR_FNDECL, creating it if necessary. */
1796
1797 const deallocator *
1798 malloc_state_machine::get_or_create_deallocator (tree deallocator_fndecl)
1799 {
1800 deallocator **slot = m_deallocator_map.get (deallocator_fndecl);
1801 if (slot)
1802 return *slot;
1803
1804 /* Reuse "free". */
1805 deallocator *d;
1806 if (is_named_call_p (deallocator_fndecl, "free")
1807 || is_std_named_call_p (deallocator_fndecl, "free")
1808 || is_named_call_p (deallocator_fndecl, "__builtin_free"))
1809 d = &m_free.m_deallocator;
1810 else
1811 {
1812 custom_deallocator *cd
1813 = new custom_deallocator (this, deallocator_fndecl,
1814 WORDING_DEALLOCATED);
1815 m_dynamic_deallocators.safe_push (cd);
1816 d = cd;
1817 }
1818 m_deallocator_map.put (deallocator_fndecl, d);
1819 return d;
1820 }
1821
1822 /* Get the "assumed-non-null" state for assumptions made within FRAME,
1823 creating it if necessary. */
1824
1825 state_machine::state_t
1826 malloc_state_machine::
1827 get_or_create_assumed_non_null_state_for_frame (const frame_region *frame)
1828 {
1829 if (state_t *slot = m_assumed_non_null.get (frame))
1830 return *slot;
1831 state_machine::state *new_state
1832 = new assumed_non_null_state ("assumed-non-null", alloc_state_id (), frame);
1833 add_custom_state (new_state);
1834 m_assumed_non_null.put (frame, new_state);
1835 return new_state;
1836 }
1837
1838 /* Try to identify the function declaration either by name or as a known malloc
1839 builtin. */
1840
1841 static bool
1842 known_allocator_p (const_tree fndecl, const gcall *call)
1843 {
1844 /* Either it is a function we know by name and number of arguments... */
1845 if (is_named_call_p (fndecl, "malloc", call, 1)
1846 || is_named_call_p (fndecl, "calloc", call, 2)
1847 || is_std_named_call_p (fndecl, "malloc", call, 1)
1848 || is_std_named_call_p (fndecl, "calloc", call, 2)
1849 || is_named_call_p (fndecl, "strdup", call, 1)
1850 || is_named_call_p (fndecl, "strndup", call, 2))
1851 return true;
1852
1853 /* ... or it is a builtin allocator that allocates objects freed with
1854 __builtin_free. */
1855 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1856 switch (DECL_FUNCTION_CODE (fndecl))
1857 {
1858 case BUILT_IN_MALLOC:
1859 case BUILT_IN_CALLOC:
1860 case BUILT_IN_STRDUP:
1861 case BUILT_IN_STRNDUP:
1862 return true;
1863 default:
1864 break;
1865 }
1866
1867 return false;
1868 }
1869
1870 /* If PTR's nullness is not known, transition it to the "assumed-non-null"
1871 state for the current frame. */
1872
1873 void
1874 malloc_state_machine::maybe_assume_non_null (sm_context *sm_ctxt,
1875 tree ptr,
1876 const gimple *stmt) const
1877 {
1878 const region_model *old_model = sm_ctxt->get_old_region_model ();
1879 if (!old_model)
1880 return;
1881
1882 tree null_ptr_cst = build_int_cst (TREE_TYPE (ptr), 0);
1883 tristate known_non_null
1884 = old_model->eval_condition (ptr, NE_EXPR, null_ptr_cst, NULL);
1885 if (known_non_null.is_unknown ())
1886 {
1887 /* Cast away const-ness for cache-like operations. */
1888 malloc_state_machine *mut_this
1889 = const_cast <malloc_state_machine *> (this);
1890 state_t next_state
1891 = mut_this->get_or_create_assumed_non_null_state_for_frame
1892 (old_model->get_current_frame ());
1893 sm_ctxt->set_next_state (stmt, ptr, next_state);
1894 }
1895 }
1896
1897 /* Implementation of state_machine::on_stmt vfunc for malloc_state_machine. */
1898
1899 bool
1900 malloc_state_machine::on_stmt (sm_context *sm_ctxt,
1901 const supernode *node,
1902 const gimple *stmt) const
1903 {
1904 if (const gcall *call = dyn_cast <const gcall *> (stmt))
1905 if (tree callee_fndecl = sm_ctxt->get_fndecl_for_call (call))
1906 {
1907 if (known_allocator_p (callee_fndecl, call))
1908 {
1909 on_allocator_call (sm_ctxt, call, &m_free);
1910 return true;
1911 }
1912
1913 if (is_named_call_p (callee_fndecl, "operator new", call, 1))
1914 on_allocator_call (sm_ctxt, call, &m_scalar_delete);
1915 else if (is_named_call_p (callee_fndecl, "operator new []", call, 1))
1916 on_allocator_call (sm_ctxt, call, &m_vector_delete);
1917 else if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
1918 || is_named_call_p (callee_fndecl, "operator delete", call, 2))
1919 {
1920 on_deallocator_call (sm_ctxt, node, call,
1921 &m_scalar_delete.m_deallocator, 0);
1922 return true;
1923 }
1924 else if (is_named_call_p (callee_fndecl, "operator delete []", call, 1))
1925 {
1926 on_deallocator_call (sm_ctxt, node, call,
1927 &m_vector_delete.m_deallocator, 0);
1928 return true;
1929 }
1930
1931 if (is_named_call_p (callee_fndecl, "alloca", call, 1)
1932 || is_named_call_p (callee_fndecl, "__builtin_alloca", call, 1))
1933 {
1934 tree lhs = gimple_call_lhs (call);
1935 if (lhs)
1936 sm_ctxt->on_transition (node, stmt, lhs, m_start, m_non_heap);
1937 return true;
1938 }
1939
1940 if (is_named_call_p (callee_fndecl, "free", call, 1)
1941 || is_std_named_call_p (callee_fndecl, "free", call, 1)
1942 || is_named_call_p (callee_fndecl, "__builtin_free", call, 1))
1943 {
1944 on_deallocator_call (sm_ctxt, node, call,
1945 &m_free.m_deallocator, 0);
1946 return true;
1947 }
1948
1949 if (is_named_call_p (callee_fndecl, "realloc", call, 2)
1950 || is_named_call_p (callee_fndecl, "__builtin_realloc", call, 2))
1951 {
1952 on_realloc_call (sm_ctxt, node, call);
1953 return true;
1954 }
1955
1956 if (unaffected_by_call_p (callee_fndecl))
1957 return true;
1958
1959 /* Cast away const-ness for cache-like operations. */
1960 malloc_state_machine *mutable_this
1961 = const_cast <malloc_state_machine *> (this);
1962
1963 /* Handle "__attribute__((malloc(FOO)))". */
1964 if (const deallocator_set *deallocators
1965 = mutable_this->get_or_create_custom_deallocator_set
1966 (callee_fndecl))
1967 {
1968 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee_fndecl));
1969 bool returns_nonnull
1970 = lookup_attribute ("returns_nonnull", attrs);
1971 on_allocator_call (sm_ctxt, call, deallocators, returns_nonnull);
1972 }
1973
1974 /* Handle "__attribute__((nonnull))". */
1975 {
1976 tree fntype = TREE_TYPE (callee_fndecl);
1977 bitmap nonnull_args = get_nonnull_args (fntype);
1978 if (nonnull_args)
1979 {
1980 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
1981 {
1982 tree arg = gimple_call_arg (stmt, i);
1983 if (TREE_CODE (TREE_TYPE (arg)) != POINTER_TYPE)
1984 continue;
1985 /* If we have a nonnull-args, and either all pointers, or just
1986 the specified pointers. */
1987 if (bitmap_empty_p (nonnull_args)
1988 || bitmap_bit_p (nonnull_args, i))
1989 {
1990 state_t state = sm_ctxt->get_state (stmt, arg);
1991 /* Can't use a switch as the states are non-const. */
1992 if (unchecked_p (state))
1993 {
1994 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1995 sm_ctxt->warn (node, stmt, arg,
1996 make_unique<possible_null_arg>
1997 (*this, diag_arg, callee_fndecl, i));
1998 const allocation_state *astate
1999 = as_a_allocation_state (state);
2000 sm_ctxt->set_next_state (stmt, arg,
2001 astate->get_nonnull ());
2002 }
2003 else if (state == m_null)
2004 {
2005 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2006 sm_ctxt->warn (node, stmt, arg,
2007 make_unique<null_arg>
2008 (*this, diag_arg, callee_fndecl, i));
2009 sm_ctxt->set_next_state (stmt, arg, m_stop);
2010 }
2011 else if (state == m_start)
2012 maybe_assume_non_null (sm_ctxt, arg, stmt);
2013 }
2014 }
2015 BITMAP_FREE (nonnull_args);
2016 }
2017 }
2018
2019 /* Check for this after nonnull, so that if we have both
2020 then we transition to "freed", rather than "checked". */
2021 unsigned dealloc_argno = fndecl_dealloc_argno (callee_fndecl);
2022 if (dealloc_argno != UINT_MAX)
2023 {
2024 const deallocator *d
2025 = mutable_this->get_or_create_deallocator (callee_fndecl);
2026 on_deallocator_call (sm_ctxt, node, call, d, dealloc_argno);
2027 }
2028 }
2029
2030 /* Look for pointers explicitly being compared against zero
2031 that are in state assumed_non_null i.e. we already defererenced
2032 them.
2033 We have to do this check here, rather than in on_condition
2034 because we add a constraint that the pointer is non-null when
2035 dereferencing it, and this makes the apply_constraints_for_gcond
2036 find known-true and known-false conditions; on_condition is only
2037 called when adding new constraints. */
2038 if (const gcond *cond_stmt = dyn_cast <const gcond *> (stmt))
2039 {
2040 enum tree_code op = gimple_cond_code (cond_stmt);
2041 if (op == EQ_EXPR || op == NE_EXPR)
2042 {
2043 tree lhs = gimple_cond_lhs (cond_stmt);
2044 tree rhs = gimple_cond_rhs (cond_stmt);
2045 if (any_pointer_p (lhs)
2046 && any_pointer_p (rhs)
2047 && zerop (rhs))
2048 {
2049 state_t state = sm_ctxt->get_state (stmt, lhs);
2050 if (assumed_non_null_p (state))
2051 maybe_complain_about_deref_before_check
2052 (sm_ctxt, node,
2053 stmt,
2054 (const assumed_non_null_state *)state,
2055 lhs);
2056 }
2057 }
2058 }
2059
2060 if (tree lhs = sm_ctxt->is_zero_assignment (stmt))
2061 if (any_pointer_p (lhs))
2062 on_zero_assignment (sm_ctxt, stmt,lhs);
2063
2064 /* Handle dereferences. */
2065 for (unsigned i = 0; i < gimple_num_ops (stmt); i++)
2066 {
2067 tree op = gimple_op (stmt, i);
2068 if (!op)
2069 continue;
2070 if (TREE_CODE (op) == COMPONENT_REF)
2071 op = TREE_OPERAND (op, 0);
2072
2073 if (TREE_CODE (op) == MEM_REF)
2074 {
2075 tree arg = TREE_OPERAND (op, 0);
2076
2077 state_t state = sm_ctxt->get_state (stmt, arg);
2078 if (state == m_start)
2079 maybe_assume_non_null (sm_ctxt, arg, stmt);
2080 else if (unchecked_p (state))
2081 {
2082 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2083 sm_ctxt->warn (node, stmt, arg,
2084 make_unique<possible_null_deref> (*this,
2085 diag_arg));
2086 const allocation_state *astate = as_a_allocation_state (state);
2087 sm_ctxt->set_next_state (stmt, arg, astate->get_nonnull ());
2088 }
2089 else if (state == m_null)
2090 {
2091 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2092 sm_ctxt->warn (node, stmt, arg,
2093 make_unique<null_deref> (*this, diag_arg));
2094 sm_ctxt->set_next_state (stmt, arg, m_stop);
2095 }
2096 else if (freed_p (state))
2097 {
2098 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2099 const allocation_state *astate = as_a_allocation_state (state);
2100 sm_ctxt->warn (node, stmt, arg,
2101 make_unique<use_after_free>
2102 (*this, diag_arg, astate->m_deallocator));
2103 sm_ctxt->set_next_state (stmt, arg, m_stop);
2104 }
2105 }
2106 }
2107 return false;
2108 }
2109
2110 /* Given a check against null of PTR in assumed-non-null state STATE,
2111 potentially add a deref_before_check warning to SM_CTXT. */
2112
2113 void
2114 malloc_state_machine::
2115 maybe_complain_about_deref_before_check (sm_context *sm_ctxt,
2116 const supernode *node,
2117 const gimple *stmt,
2118 const assumed_non_null_state *state,
2119 tree ptr) const
2120 {
2121 const region_model *model = sm_ctxt->get_old_region_model ();
2122 if (!model)
2123 return;
2124
2125 /* Don't complain if the current frame (where the check is occurring) is
2126 deeper than the frame in which the "not null" assumption was made.
2127 This suppress false positives for cases like:
2128
2129 void foo (struct s *p)
2130 {
2131 int val = s->some_field; // deref here
2132 shared_helper (p);
2133 }
2134
2135 where "shared_helper" has:
2136
2137 void shared_helper (struct s *p)
2138 {
2139 if (!p) // check here
2140 return;
2141 // etc
2142 }
2143
2144 since the check in "shared_helper" is OK. */
2145 const frame_region *checked_in_frame = model->get_current_frame ();
2146 const frame_region *assumed_nonnull_in_frame = state->m_frame;
2147 if (checked_in_frame->get_index () > assumed_nonnull_in_frame->get_index ())
2148 return;
2149
2150 tree diag_ptr = sm_ctxt->get_diagnostic_tree (ptr);
2151 if (diag_ptr)
2152 sm_ctxt->warn
2153 (node, stmt, ptr,
2154 make_unique<deref_before_check> (*this, diag_ptr));
2155 sm_ctxt->set_next_state (stmt, ptr, m_stop);
2156 }
2157
2158 /* Handle a call to an allocator.
2159 RETURNS_NONNULL is true if CALL is to a fndecl known to have
2160 __attribute__((returns_nonnull)). */
2161
2162 void
2163 malloc_state_machine::on_allocator_call (sm_context *sm_ctxt,
2164 const gcall *call,
2165 const deallocator_set *deallocators,
2166 bool returns_nonnull) const
2167 {
2168 tree lhs = gimple_call_lhs (call);
2169 if (lhs)
2170 {
2171 if (sm_ctxt->get_state (call, lhs) == m_start)
2172 sm_ctxt->set_next_state (call, lhs,
2173 (returns_nonnull
2174 ? deallocators->m_nonnull
2175 : deallocators->m_unchecked));
2176 }
2177 else
2178 {
2179 /* TODO: report leak. */
2180 }
2181 }
2182
2183 /* Handle deallocations of non-heap pointers.
2184 non-heap -> stop, with warning. */
2185
2186 void
2187 malloc_state_machine::handle_free_of_non_heap (sm_context *sm_ctxt,
2188 const supernode *node,
2189 const gcall *call,
2190 tree arg,
2191 const deallocator *d) const
2192 {
2193 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2194 const region *freed_reg = NULL;
2195 if (const program_state *old_state = sm_ctxt->get_old_program_state ())
2196 {
2197 const region_model *old_model = old_state->m_region_model;
2198 const svalue *ptr_sval = old_model->get_rvalue (arg, NULL);
2199 freed_reg = old_model->deref_rvalue (ptr_sval, arg, NULL);
2200 }
2201 sm_ctxt->warn (node, call, arg,
2202 make_unique<free_of_non_heap>
2203 (*this, diag_arg, freed_reg, d->m_name));
2204 sm_ctxt->set_next_state (call, arg, m_stop);
2205 }
2206
2207 void
2208 malloc_state_machine::on_deallocator_call (sm_context *sm_ctxt,
2209 const supernode *node,
2210 const gcall *call,
2211 const deallocator *d,
2212 unsigned argno) const
2213 {
2214 if (argno >= gimple_call_num_args (call))
2215 return;
2216 tree arg = gimple_call_arg (call, argno);
2217
2218 state_t state = sm_ctxt->get_state (call, arg);
2219
2220 /* start/assumed_non_null/unchecked/nonnull -> freed. */
2221 if (state == m_start || assumed_non_null_p (state))
2222 sm_ctxt->set_next_state (call, arg, d->m_freed);
2223 else if (unchecked_p (state) || nonnull_p (state))
2224 {
2225 const allocation_state *astate = as_a_allocation_state (state);
2226 gcc_assert (astate->m_deallocators);
2227 if (!astate->m_deallocators->contains_p (d))
2228 {
2229 /* Wrong allocator. */
2230 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2231 sm_ctxt->warn (node, call, arg,
2232 make_unique<mismatching_deallocation>
2233 (*this, diag_arg,
2234 astate->m_deallocators,
2235 d));
2236 }
2237 sm_ctxt->set_next_state (call, arg, d->m_freed);
2238 }
2239
2240 /* Keep state "null" as-is, rather than transitioning to "freed";
2241 we don't want to complain about double-free of NULL. */
2242 else if (state == d->m_freed)
2243 {
2244 /* freed -> stop, with warning. */
2245 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2246 sm_ctxt->warn (node, call, arg,
2247 make_unique<double_free> (*this, diag_arg, d->m_name));
2248 sm_ctxt->set_next_state (call, arg, m_stop);
2249 }
2250 else if (state == m_non_heap)
2251 {
2252 /* non-heap -> stop, with warning. */
2253 handle_free_of_non_heap (sm_ctxt, node, call, arg, d);
2254 }
2255 }
2256
2257 /* Handle a call to "realloc".
2258 Check for free of non-heap or mismatching allocators,
2259 transitioning to the "stop" state for such cases.
2260
2261 Otherwise, kf_realloc::impl_call_post will later
2262 get called (which will handle other sm-state transitions
2263 when the state is bifurcated). */
2264
2265 void
2266 malloc_state_machine::on_realloc_call (sm_context *sm_ctxt,
2267 const supernode *node,
2268 const gcall *call) const
2269 {
2270 const unsigned argno = 0;
2271 const deallocator *d = &m_realloc;
2272
2273 tree arg = gimple_call_arg (call, argno);
2274
2275 state_t state = sm_ctxt->get_state (call, arg);
2276
2277 if (unchecked_p (state) || nonnull_p (state))
2278 {
2279 const allocation_state *astate = as_a_allocation_state (state);
2280 gcc_assert (astate->m_deallocators);
2281 if (!astate->m_deallocators->contains_p (&m_free.m_deallocator))
2282 {
2283 /* Wrong allocator. */
2284 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2285 sm_ctxt->warn (node, call, arg,
2286 make_unique<mismatching_deallocation>
2287 (*this, diag_arg,
2288 astate->m_deallocators, d));
2289 sm_ctxt->set_next_state (call, arg, m_stop);
2290 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
2291 path_ctxt->terminate_path ();
2292 }
2293 }
2294 else if (state == m_free.m_deallocator.m_freed)
2295 {
2296 /* freed -> stop, with warning. */
2297 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2298 sm_ctxt->warn (node, call, arg,
2299 make_unique<double_free> (*this, diag_arg, "free"));
2300 sm_ctxt->set_next_state (call, arg, m_stop);
2301 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
2302 path_ctxt->terminate_path ();
2303 }
2304 else if (state == m_non_heap)
2305 {
2306 /* non-heap -> stop, with warning. */
2307 handle_free_of_non_heap (sm_ctxt, node, call, arg, d);
2308 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
2309 path_ctxt->terminate_path ();
2310 }
2311 }
2312
2313 /* Implementation of state_machine::on_phi vfunc for malloc_state_machine. */
2314
2315 void
2316 malloc_state_machine::on_phi (sm_context *sm_ctxt,
2317 const supernode *node ATTRIBUTE_UNUSED,
2318 const gphi *phi,
2319 tree rhs) const
2320 {
2321 if (zerop (rhs))
2322 {
2323 tree lhs = gimple_phi_result (phi);
2324 on_zero_assignment (sm_ctxt, phi, lhs);
2325 }
2326 }
2327
2328 /* Implementation of state_machine::on_condition vfunc for malloc_state_machine.
2329 Potentially transition state 'unchecked' to 'nonnull' or to 'null'. */
2330
2331 void
2332 malloc_state_machine::on_condition (sm_context *sm_ctxt,
2333 const supernode *node ATTRIBUTE_UNUSED,
2334 const gimple *stmt,
2335 const svalue *lhs,
2336 enum tree_code op,
2337 const svalue *rhs) const
2338 {
2339 if (!rhs->all_zeroes_p ())
2340 return;
2341
2342 if (!any_pointer_p (lhs))
2343 return;
2344 if (!any_pointer_p (rhs))
2345 return;
2346
2347 if (op == NE_EXPR)
2348 {
2349 log ("got 'ARG != 0' match");
2350 state_t s = sm_ctxt->get_state (stmt, lhs);
2351 if (unchecked_p (s))
2352 {
2353 const allocation_state *astate = as_a_allocation_state (s);
2354 sm_ctxt->set_next_state (stmt, lhs, astate->get_nonnull ());
2355 }
2356 }
2357 else if (op == EQ_EXPR)
2358 {
2359 log ("got 'ARG == 0' match");
2360 state_t s = sm_ctxt->get_state (stmt, lhs);
2361 if (unchecked_p (s))
2362 sm_ctxt->set_next_state (stmt, lhs, m_null);
2363 }
2364 }
2365
2366 /* Implementation of state_machine::on_pop_frame vfunc for malloc_state_machine.
2367 Clear any "assumed-non-null" state where the assumption happened in
2368 FRAME_REG. */
2369
2370 void
2371 malloc_state_machine::on_pop_frame (sm_state_map *smap,
2372 const frame_region *frame_reg) const
2373 {
2374 hash_set<const svalue *> svals_to_clear;
2375 for (auto kv : *smap)
2376 {
2377 const svalue *sval = kv.first;
2378 state_t state = kv.second.m_state;
2379 if (assumed_non_null_p (state))
2380 {
2381 const assumed_non_null_state *assumed_state
2382 = (const assumed_non_null_state *)state;
2383 if (frame_reg == assumed_state->m_frame)
2384 svals_to_clear.add (sval);
2385 }
2386 }
2387 for (auto sval : svals_to_clear)
2388 smap->clear_any_state (sval);
2389 }
2390
2391 /* Implementation of state_machine::can_purge_p vfunc for malloc_state_machine.
2392 Don't allow purging of pointers in state 'unchecked' or 'nonnull'
2393 (to avoid false leak reports). */
2394
2395 bool
2396 malloc_state_machine::can_purge_p (state_t s) const
2397 {
2398 enum resource_state rs = get_rs (s);
2399 return rs != RS_UNCHECKED && rs != RS_NONNULL;
2400 }
2401
2402 /* Implementation of state_machine::on_leak vfunc for malloc_state_machine
2403 (for complaining about leaks of pointers in state 'unchecked' and
2404 'nonnull'). */
2405
2406 std::unique_ptr<pending_diagnostic>
2407 malloc_state_machine::on_leak (tree var) const
2408 {
2409 return make_unique<malloc_leak> (*this, var);
2410 }
2411
2412 /* Implementation of state_machine::reset_when_passed_to_unknown_fn_p vfunc
2413 for malloc_state_machine. */
2414
2415 bool
2416 malloc_state_machine::reset_when_passed_to_unknown_fn_p (state_t s,
2417 bool is_mutable) const
2418 {
2419 /* An on-stack ptr doesn't stop being stack-allocated when passed to an
2420 unknown fn. */
2421 if (s == m_non_heap)
2422 return false;
2423
2424 /* Otherwise, pointers passed as non-const can be freed. */
2425 return is_mutable;
2426 }
2427
2428 /* Implementation of state_machine::maybe_get_merged_states_nonequal vfunc
2429 for malloc_state_machine.
2430
2431 Support discarding "assumed-non-null" states when merging with
2432 start state. */
2433
2434 state_machine::state_t
2435 malloc_state_machine::maybe_get_merged_states_nonequal (state_t state_a,
2436 state_t state_b) const
2437 {
2438 if (assumed_non_null_p (state_a) && state_b == m_start)
2439 return m_start;
2440 if (state_a == m_start && assumed_non_null_p (state_b))
2441 return m_start;
2442 return NULL;
2443 }
2444
2445 /* Return true if calls to FNDECL are known to not affect this sm-state. */
2446
2447 bool
2448 malloc_state_machine::unaffected_by_call_p (tree fndecl)
2449 {
2450 /* A set of functions that are known to not affect allocation
2451 status, even if we haven't fully modelled the rest of their
2452 behavior yet. */
2453 static const char * const funcnames[] = {
2454 /* This array must be kept sorted. */
2455 "strsep",
2456 };
2457 const size_t count = ARRAY_SIZE (funcnames);
2458 function_set fs (funcnames, count);
2459
2460 if (fs.contains_decl_p (fndecl))
2461 return true;
2462
2463 return false;
2464 }
2465
2466 /* Shared logic for handling GIMPLE_ASSIGNs and GIMPLE_PHIs that
2467 assign zero to LHS. */
2468
2469 void
2470 malloc_state_machine::on_zero_assignment (sm_context *sm_ctxt,
2471 const gimple *stmt,
2472 tree lhs) const
2473 {
2474 state_t s = sm_ctxt->get_state (stmt, lhs);
2475 enum resource_state rs = get_rs (s);
2476 if (rs == RS_START
2477 || rs == RS_UNCHECKED
2478 || rs == RS_NONNULL
2479 || rs == RS_FREED)
2480 sm_ctxt->set_next_state (stmt, lhs, m_null);
2481 }
2482
2483 /* Special-case hook for handling realloc, for the "success with move to
2484 a new buffer" case, marking OLD_PTR_SVAL as freed and NEW_PTR_SVAL as
2485 non-null.
2486
2487 This is similar to on_deallocator_call and on_allocator_call,
2488 but the checks happen in on_realloc_call, and by splitting the states. */
2489
2490 void
2491 malloc_state_machine::
2492 on_realloc_with_move (region_model *model,
2493 sm_state_map *smap,
2494 const svalue *old_ptr_sval,
2495 const svalue *new_ptr_sval,
2496 const extrinsic_state &ext_state) const
2497 {
2498 smap->set_state (model, old_ptr_sval,
2499 m_free.m_deallocator.m_freed,
2500 NULL, ext_state);
2501
2502 smap->set_state (model, new_ptr_sval,
2503 m_free.m_nonnull,
2504 NULL, ext_state);
2505 }
2506
2507 } // anonymous namespace
2508
2509 /* Internal interface to this file. */
2510
2511 state_machine *
2512 make_malloc_state_machine (logger *logger)
2513 {
2514 return new malloc_state_machine (logger);
2515 }
2516
2517 /* Specialcase hook for handling realloc, for use by
2518 kf_realloc::impl_call_post::success_with_move::update_model. */
2519
2520 void
2521 region_model::on_realloc_with_move (const call_details &cd,
2522 const svalue *old_ptr_sval,
2523 const svalue *new_ptr_sval)
2524 {
2525 region_model_context *ctxt = cd.get_ctxt ();
2526 if (!ctxt)
2527 return;
2528 const extrinsic_state *ext_state = ctxt->get_ext_state ();
2529 if (!ext_state)
2530 return;
2531
2532 sm_state_map *smap;
2533 const state_machine *sm;
2534 unsigned sm_idx;
2535 if (!ctxt->get_malloc_map (&smap, &sm, &sm_idx))
2536 return;
2537
2538 gcc_assert (smap);
2539 gcc_assert (sm);
2540
2541 const malloc_state_machine &malloc_sm
2542 = (const malloc_state_machine &)*sm;
2543
2544 malloc_sm.on_realloc_with_move (this,
2545 smap,
2546 old_ptr_sval,
2547 new_ptr_sval,
2548 *ext_state);
2549 }
2550
2551 } // namespace ana
2552
2553 #endif /* #if ENABLE_ANALYZER */