]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/analyzer/sm-malloc.cc
analyzer: fix deref-before-check false +ves seen in haproxy [PR108475,PR109060]
[thirdparty/gcc.git] / gcc / analyzer / sm-malloc.cc
1 /* A state machine for detecting misuses of the malloc/free API.
2 Copyright (C) 2019-2023 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #define INCLUDE_MEMORY
23 #include "system.h"
24 #include "coretypes.h"
25 #include "make-unique.h"
26 #include "tree.h"
27 #include "function.h"
28 #include "basic-block.h"
29 #include "gimple.h"
30 #include "options.h"
31 #include "bitmap.h"
32 #include "diagnostic-path.h"
33 #include "diagnostic-metadata.h"
34 #include "analyzer/analyzer.h"
35 #include "diagnostic-event-id.h"
36 #include "analyzer/analyzer-logging.h"
37 #include "analyzer/sm.h"
38 #include "analyzer/pending-diagnostic.h"
39 #include "analyzer/call-string.h"
40 #include "analyzer/program-point.h"
41 #include "analyzer/store.h"
42 #include "analyzer/region-model.h"
43 #include "analyzer/call-details.h"
44 #include "stringpool.h"
45 #include "attribs.h"
46 #include "analyzer/function-set.h"
47 #include "analyzer/program-state.h"
48 #include "analyzer/checker-event.h"
49 #include "analyzer/exploded-graph.h"
50
51 #if ENABLE_ANALYZER
52
53 namespace ana {
54
55 namespace {
56
57 /* This state machine and its various support classes track allocations
58 and deallocations.
59
60 It has a few standard allocation/deallocation pairs (e.g. new/delete),
61 and also supports user-defined ones via
62 __attribute__ ((malloc(DEALLOCATOR))).
63
64 There can be more than one valid deallocator for a given allocator,
65 for example:
66 __attribute__ ((malloc (fclose)))
67 __attribute__ ((malloc (freopen, 3)))
68 FILE* fopen (const char*, const char*);
69 A deallocator_set represents a particular set of valid deallocators.
70
71 We track the expected deallocator_set for a value, but not the allocation
72 function - there could be more than one allocator per deallocator_set.
73 For example, there could be dozens of allocators for "free" beyond just
74 malloc e.g. calloc, xstrdup, etc. We don't want to explode the number
75 of states by tracking individual allocators in the exploded graph;
76 we merely want to track "this value expects to have 'free' called on it".
77 Perhaps we can reconstruct which allocator was used later, when emitting
78 the path, if it's necessary for precision of wording of diagnostics. */
79
80 class deallocator;
81 class deallocator_set;
82 class malloc_state_machine;
83
84 /* An enum for discriminating between different kinds of allocation_state. */
85
86 enum resource_state
87 {
88 /* States that are independent of allocator/deallocator. */
89
90 /* The start state. */
91 RS_START,
92
93 /* State for a pointer that's been unconditionally dereferenced. */
94 RS_ASSUMED_NON_NULL,
95
96 /* State for a pointer that's known to be NULL. */
97 RS_NULL,
98
99 /* State for a pointer that's known to not be on the heap (e.g. to a local
100 or global). */
101 RS_NON_HEAP,
102
103 /* Stop state, for pointers we don't want to track any more. */
104 RS_STOP,
105
106 /* States that relate to a specific deallocator_set. */
107
108 /* State for a pointer returned from an allocator that hasn't
109 been checked for NULL.
110 It could be a pointer to heap-allocated memory, or could be NULL. */
111 RS_UNCHECKED,
112
113 /* State for a pointer returned from an allocator,
114 known to be non-NULL. */
115 RS_NONNULL,
116
117 /* State for a pointer passed to a deallocator. */
118 RS_FREED
119 };
120
121 /* Custom state subclass, which can optionally refer to an a
122 deallocator_set. */
123
124 struct allocation_state : public state_machine::state
125 {
126 allocation_state (const char *name, unsigned id,
127 enum resource_state rs,
128 const deallocator_set *deallocators,
129 const deallocator *deallocator)
130 : state (name, id), m_rs (rs),
131 m_deallocators (deallocators),
132 m_deallocator (deallocator)
133 {}
134
135 void dump_to_pp (pretty_printer *pp) const override;
136
137 const allocation_state *get_nonnull () const;
138
139 enum resource_state m_rs;
140 const deallocator_set *m_deallocators;
141 const deallocator *m_deallocator;
142 };
143
144 /* Custom state subclass, for the "assumed-non-null" state
145 where the assumption happens in a particular frame. */
146
147 struct assumed_non_null_state : public allocation_state
148 {
149 assumed_non_null_state (const char *name, unsigned id,
150 const frame_region *frame)
151 : allocation_state (name, id, RS_ASSUMED_NON_NULL,
152 NULL, NULL),
153 m_frame (frame)
154 {
155 gcc_assert (m_frame);
156 }
157
158 void dump_to_pp (pretty_printer *pp) const final override;
159
160 const frame_region *m_frame;
161 };
162
163 /* An enum for choosing which wording to use in various diagnostics
164 when describing deallocations. */
165
166 enum wording
167 {
168 WORDING_FREED,
169 WORDING_DELETED,
170 WORDING_DEALLOCATED,
171 WORDING_REALLOCATED
172 };
173
174 /* Base class representing a deallocation function,
175 either a built-in one we know about, or one exposed via
176 __attribute__((malloc(DEALLOCATOR))). */
177
178 struct deallocator
179 {
180 hashval_t hash () const;
181 void dump_to_pp (pretty_printer *pp) const;
182 static int cmp (const deallocator *a, const deallocator *b);
183 static int cmp_ptr_ptr (const void *, const void *);
184
185 /* Name to use in diagnostics. */
186 const char *m_name;
187
188 /* Which wording to use in diagnostics. */
189 enum wording m_wording;
190
191 /* State for a value passed to one of the deallocators. */
192 state_machine::state_t m_freed;
193
194 protected:
195 deallocator (malloc_state_machine *sm,
196 const char *name,
197 enum wording wording);
198 };
199
200 /* Subclass representing a predefined deallocator.
201 e.g. "delete []", without needing a specific FUNCTION_DECL
202 ahead of time. */
203
204 struct standard_deallocator : public deallocator
205 {
206 standard_deallocator (malloc_state_machine *sm,
207 const char *name,
208 enum wording wording);
209 };
210
211 /* Subclass representing a user-defined deallocator
212 via __attribute__((malloc(DEALLOCATOR))) given
213 a specific FUNCTION_DECL. */
214
215 struct custom_deallocator : public deallocator
216 {
217 custom_deallocator (malloc_state_machine *sm,
218 tree deallocator_fndecl,
219 enum wording wording)
220 : deallocator (sm, IDENTIFIER_POINTER (DECL_NAME (deallocator_fndecl)),
221 wording)
222 {
223 }
224 };
225
226 /* Base class representing a set of possible deallocators.
227 Often this will be just a single deallocator, but some
228 allocators have multiple valid deallocators (e.g. the result of
229 "fopen" can be closed by either "fclose" or "freopen"). */
230
231 struct deallocator_set
232 {
233 deallocator_set (malloc_state_machine *sm,
234 enum wording wording);
235 virtual ~deallocator_set () {}
236
237 virtual bool contains_p (const deallocator *d) const = 0;
238 virtual const deallocator *maybe_get_single () const = 0;
239 virtual void dump_to_pp (pretty_printer *pp) const = 0;
240 void dump () const;
241
242 /* Which wording to use in diagnostics. */
243 enum wording m_wording;
244
245 /* Pointers to states.
246 These states are owned by the state_machine base class. */
247
248 /* State for an unchecked result from an allocator using this set. */
249 state_machine::state_t m_unchecked;
250
251 /* State for a known non-NULL result from such an allocator. */
252 state_machine::state_t m_nonnull;
253 };
254
255 /* Subclass of deallocator_set representing a set of deallocators
256 defined by one or more __attribute__((malloc(DEALLOCATOR))). */
257
258 struct custom_deallocator_set : public deallocator_set
259 {
260 typedef const auto_vec <const deallocator *> *key_t;
261
262 custom_deallocator_set (malloc_state_machine *sm,
263 const auto_vec <const deallocator *> *vec,
264 //const char *name,
265 //const char *dealloc_funcname,
266 //unsigned arg_idx,
267 enum wording wording);
268
269 bool contains_p (const deallocator *d) const final override;
270 const deallocator *maybe_get_single () const final override;
271 void dump_to_pp (pretty_printer *pp) const final override;
272
273 auto_vec <const deallocator *> m_deallocator_vec;
274 };
275
276 /* Subclass of deallocator_set representing a set of deallocators
277 with a single standard_deallocator, e.g. "delete []". */
278
279 struct standard_deallocator_set : public deallocator_set
280 {
281 standard_deallocator_set (malloc_state_machine *sm,
282 const char *name,
283 enum wording wording);
284
285 bool contains_p (const deallocator *d) const final override;
286 const deallocator *maybe_get_single () const final override;
287 void dump_to_pp (pretty_printer *pp) const final override;
288
289 standard_deallocator m_deallocator;
290 };
291
292 /* Traits class for ensuring uniqueness of deallocator_sets within
293 malloc_state_machine. */
294
295 struct deallocator_set_map_traits
296 {
297 typedef custom_deallocator_set::key_t key_type;
298 typedef custom_deallocator_set *value_type;
299 typedef custom_deallocator_set *compare_type;
300
301 static inline hashval_t hash (const key_type &k)
302 {
303 gcc_assert (k != NULL);
304 gcc_assert (k != reinterpret_cast<key_type> (1));
305
306 hashval_t result = 0;
307 unsigned i;
308 const deallocator *d;
309 FOR_EACH_VEC_ELT (*k, i, d)
310 result ^= d->hash ();
311 return result;
312 }
313 static inline bool equal_keys (const key_type &k1, const key_type &k2)
314 {
315 if (k1->length () != k2->length ())
316 return false;
317
318 for (unsigned i = 0; i < k1->length (); i++)
319 if ((*k1)[i] != (*k2)[i])
320 return false;
321
322 return true;
323 }
324 template <typename T>
325 static inline void remove (T &)
326 {
327 /* empty; the nodes are handled elsewhere. */
328 }
329 template <typename T>
330 static inline void mark_deleted (T &entry)
331 {
332 entry.m_key = reinterpret_cast<key_type> (1);
333 }
334 template <typename T>
335 static inline void mark_empty (T &entry)
336 {
337 entry.m_key = NULL;
338 }
339 template <typename T>
340 static inline bool is_deleted (const T &entry)
341 {
342 return entry.m_key == reinterpret_cast<key_type> (1);
343 }
344 template <typename T>
345 static inline bool is_empty (const T &entry)
346 {
347 return entry.m_key == NULL;
348 }
349 static const bool empty_zero_p = false;
350 };
351
352 /* A state machine for detecting misuses of the malloc/free API.
353
354 See sm-malloc.dot for an overview (keep this in-sync with that file). */
355
356 class malloc_state_machine : public state_machine
357 {
358 public:
359 typedef allocation_state custom_data_t;
360
361 malloc_state_machine (logger *logger);
362 ~malloc_state_machine ();
363
364 state_t
365 add_state (const char *name, enum resource_state rs,
366 const deallocator_set *deallocators,
367 const deallocator *deallocator);
368
369 bool inherited_state_p () const final override { return false; }
370
371 state_machine::state_t
372 get_default_state (const svalue *sval) const final override
373 {
374 if (tree cst = sval->maybe_get_constant ())
375 {
376 if (zerop (cst))
377 return m_null;
378 }
379 if (const region_svalue *ptr = sval->dyn_cast_region_svalue ())
380 {
381 const region *reg = ptr->get_pointee ();
382 switch (reg->get_memory_space ())
383 {
384 default:
385 break;
386 case MEMSPACE_CODE:
387 case MEMSPACE_GLOBALS:
388 case MEMSPACE_STACK:
389 case MEMSPACE_READONLY_DATA:
390 return m_non_heap;
391 }
392 }
393 return m_start;
394 }
395
396 bool on_stmt (sm_context *sm_ctxt,
397 const supernode *node,
398 const gimple *stmt) const final override;
399
400 void on_phi (sm_context *sm_ctxt,
401 const supernode *node,
402 const gphi *phi,
403 tree rhs) const final override;
404
405 void on_condition (sm_context *sm_ctxt,
406 const supernode *node,
407 const gimple *stmt,
408 const svalue *lhs,
409 enum tree_code op,
410 const svalue *rhs) const final override;
411
412 void on_pop_frame (sm_state_map *smap,
413 const frame_region *) const final override;
414
415 bool can_purge_p (state_t s) const final override;
416 std::unique_ptr<pending_diagnostic> on_leak (tree var) const final override;
417
418 bool reset_when_passed_to_unknown_fn_p (state_t s,
419 bool is_mutable) const final override;
420
421 state_t
422 maybe_get_merged_states_nonequal (state_t state_a,
423 state_t state_b) const final override;
424
425 static bool unaffected_by_call_p (tree fndecl);
426
427 void maybe_assume_non_null (sm_context *sm_ctxt,
428 tree ptr,
429 const gimple *stmt) const;
430
431 void on_realloc_with_move (region_model *model,
432 sm_state_map *smap,
433 const svalue *old_ptr_sval,
434 const svalue *new_ptr_sval,
435 const extrinsic_state &ext_state) const;
436
437 standard_deallocator_set m_free;
438 standard_deallocator_set m_scalar_delete;
439 standard_deallocator_set m_vector_delete;
440
441 standard_deallocator m_realloc;
442
443 /* States that are independent of api. */
444
445 /* States for a pointer that's been unconditionally dereferenced
446 in a particular stack frame. */
447 hash_map<const frame_region *, state_t> m_assumed_non_null;
448
449 /* State for a pointer that's known to be NULL. */
450 state_t m_null;
451
452 /* State for a pointer that's known to not be on the heap (e.g. to a local
453 or global). */
454 state_t m_non_heap; // TODO: or should this be a different state machine?
455 // or do we need child values etc?
456
457 /* Stop state, for pointers we don't want to track any more. */
458 state_t m_stop;
459
460 private:
461 const custom_deallocator_set *
462 get_or_create_custom_deallocator_set (tree allocator_fndecl);
463 custom_deallocator_set *
464 maybe_create_custom_deallocator_set (tree allocator_fndecl);
465 const deallocator *
466 get_or_create_deallocator (tree deallocator_fndecl);
467
468 state_t
469 get_or_create_assumed_non_null_state_for_frame (const frame_region *frame);
470
471 void
472 maybe_complain_about_deref_before_check (sm_context *sm_ctxt,
473 const supernode *node,
474 const gimple *stmt,
475 const assumed_non_null_state *,
476 tree ptr) const;
477
478 void on_allocator_call (sm_context *sm_ctxt,
479 const gcall *call,
480 const deallocator_set *deallocators,
481 bool returns_nonnull = false) const;
482 void handle_free_of_non_heap (sm_context *sm_ctxt,
483 const supernode *node,
484 const gcall *call,
485 tree arg,
486 const deallocator *d) const;
487 void on_deallocator_call (sm_context *sm_ctxt,
488 const supernode *node,
489 const gcall *call,
490 const deallocator *d,
491 unsigned argno) const;
492 void on_realloc_call (sm_context *sm_ctxt,
493 const supernode *node,
494 const gcall *call) const;
495 void on_zero_assignment (sm_context *sm_ctxt,
496 const gimple *stmt,
497 tree lhs) const;
498
499 /* A map for consolidating deallocators so that they are
500 unique per deallocator FUNCTION_DECL. */
501 typedef hash_map<tree, deallocator *> deallocator_map_t;
502 deallocator_map_t m_deallocator_map;
503
504 /* Memoized lookups from FUNCTION_DECL to custom_deallocator_set *. */
505 typedef hash_map<tree, custom_deallocator_set *> deallocator_set_cache_t;
506 deallocator_set_cache_t m_custom_deallocator_set_cache;
507
508 /* A map for consolidating custom_deallocator_set instances. */
509 typedef hash_map<custom_deallocator_set::key_t,
510 custom_deallocator_set *,
511 deallocator_set_map_traits> custom_deallocator_set_map_t;
512 custom_deallocator_set_map_t m_custom_deallocator_set_map;
513
514 /* Record of dynamically-allocated objects, for cleanup. */
515 auto_vec <custom_deallocator_set *> m_dynamic_sets;
516 auto_vec <custom_deallocator *> m_dynamic_deallocators;
517 };
518
519 /* struct deallocator. */
520
521 deallocator::deallocator (malloc_state_machine *sm,
522 const char *name,
523 enum wording wording)
524 : m_name (name),
525 m_wording (wording),
526 m_freed (sm->add_state ("freed", RS_FREED, NULL, this))
527 {
528 }
529
530 hashval_t
531 deallocator::hash () const
532 {
533 return (hashval_t)m_freed->get_id ();
534 }
535
536 void
537 deallocator::dump_to_pp (pretty_printer *pp) const
538 {
539 pp_printf (pp, "%qs", m_name);
540 }
541
542 int
543 deallocator::cmp (const deallocator *a, const deallocator *b)
544 {
545 return (int)a->m_freed->get_id () - (int)b->m_freed->get_id ();
546 }
547
548 int
549 deallocator::cmp_ptr_ptr (const void *a, const void *b)
550 {
551 return cmp (*(const deallocator * const *)a,
552 *(const deallocator * const *)b);
553 }
554
555
556 /* struct standard_deallocator : public deallocator. */
557
558 standard_deallocator::standard_deallocator (malloc_state_machine *sm,
559 const char *name,
560 enum wording wording)
561 : deallocator (sm, name, wording)
562 {
563 }
564
565 /* struct deallocator_set. */
566
567 deallocator_set::deallocator_set (malloc_state_machine *sm,
568 enum wording wording)
569 : m_wording (wording),
570 m_unchecked (sm->add_state ("unchecked", RS_UNCHECKED, this, NULL)),
571 m_nonnull (sm->add_state ("nonnull", RS_NONNULL, this, NULL))
572 {
573 }
574
575 /* Dump a description of this deallocator_set to stderr. */
576
577 DEBUG_FUNCTION void
578 deallocator_set::dump () const
579 {
580 pretty_printer pp;
581 pp_show_color (&pp) = pp_show_color (global_dc->printer);
582 pp.buffer->stream = stderr;
583 dump_to_pp (&pp);
584 pp_newline (&pp);
585 pp_flush (&pp);
586 }
587
588 /* struct custom_deallocator_set : public deallocator_set. */
589
590 custom_deallocator_set::
591 custom_deallocator_set (malloc_state_machine *sm,
592 const auto_vec <const deallocator *> *vec,
593 enum wording wording)
594 : deallocator_set (sm, wording),
595 m_deallocator_vec (vec->length ())
596 {
597 unsigned i;
598 const deallocator *d;
599 FOR_EACH_VEC_ELT (*vec, i, d)
600 m_deallocator_vec.safe_push (d);
601 }
602
603 bool
604 custom_deallocator_set::contains_p (const deallocator *d) const
605 {
606 unsigned i;
607 const deallocator *cd;
608 FOR_EACH_VEC_ELT (m_deallocator_vec, i, cd)
609 if (cd == d)
610 return true;
611 return false;
612 }
613
614 const deallocator *
615 custom_deallocator_set::maybe_get_single () const
616 {
617 if (m_deallocator_vec.length () == 1)
618 return m_deallocator_vec[0];
619 return NULL;
620 }
621
622 void
623 custom_deallocator_set::dump_to_pp (pretty_printer *pp) const
624 {
625 pp_character (pp, '{');
626 unsigned i;
627 const deallocator *d;
628 FOR_EACH_VEC_ELT (m_deallocator_vec, i, d)
629 {
630 if (i > 0)
631 pp_string (pp, ", ");
632 d->dump_to_pp (pp);
633 }
634 pp_character (pp, '}');
635 }
636
637 /* struct standard_deallocator_set : public deallocator_set. */
638
639 standard_deallocator_set::standard_deallocator_set (malloc_state_machine *sm,
640 const char *name,
641 enum wording wording)
642 : deallocator_set (sm, wording),
643 m_deallocator (sm, name, wording)
644 {
645 }
646
647 bool
648 standard_deallocator_set::contains_p (const deallocator *d) const
649 {
650 return d == &m_deallocator;
651 }
652
653 const deallocator *
654 standard_deallocator_set::maybe_get_single () const
655 {
656 return &m_deallocator;
657 }
658
659 void
660 standard_deallocator_set::dump_to_pp (pretty_printer *pp) const
661 {
662 pp_character (pp, '{');
663 pp_string (pp, m_deallocator.m_name);
664 pp_character (pp, '}');
665 }
666
667 /* Return STATE cast to the custom state subclass, or NULL for the start state.
668 Everything should be an allocation_state apart from the start state. */
669
670 static const allocation_state *
671 dyn_cast_allocation_state (state_machine::state_t state)
672 {
673 if (state->get_id () == 0)
674 return NULL;
675 return static_cast <const allocation_state *> (state);
676 }
677
678 /* Return STATE cast to the custom state subclass, for a state that is
679 already known to not be the start state . */
680
681 static const allocation_state *
682 as_a_allocation_state (state_machine::state_t state)
683 {
684 gcc_assert (state->get_id () != 0);
685 return static_cast <const allocation_state *> (state);
686 }
687
688 /* Get the resource_state for STATE. */
689
690 static enum resource_state
691 get_rs (state_machine::state_t state)
692 {
693 if (const allocation_state *astate = dyn_cast_allocation_state (state))
694 return astate->m_rs;
695 else
696 return RS_START;
697 }
698
699 /* Return true if STATE is the start state. */
700
701 static bool
702 start_p (state_machine::state_t state)
703 {
704 return get_rs (state) == RS_START;
705 }
706
707 /* Return true if STATE is an unchecked result from an allocator. */
708
709 static bool
710 unchecked_p (state_machine::state_t state)
711 {
712 return get_rs (state) == RS_UNCHECKED;
713 }
714
715 /* Return true if STATE is a non-null result from an allocator. */
716
717 static bool
718 nonnull_p (state_machine::state_t state)
719 {
720 return get_rs (state) == RS_NONNULL;
721 }
722
723 /* Return true if STATE is a value that has been passed to a deallocator. */
724
725 static bool
726 freed_p (state_machine::state_t state)
727 {
728 return get_rs (state) == RS_FREED;
729 }
730
731 /* Return true if STATE is a value that has been assumed to be non-NULL. */
732
733 static bool
734 assumed_non_null_p (state_machine::state_t state)
735 {
736 return get_rs (state) == RS_ASSUMED_NON_NULL;
737 }
738
739 /* Class for diagnostics relating to malloc_state_machine. */
740
741 class malloc_diagnostic : public pending_diagnostic
742 {
743 public:
744 malloc_diagnostic (const malloc_state_machine &sm, tree arg)
745 : m_sm (sm), m_arg (arg)
746 {}
747
748 bool subclass_equal_p (const pending_diagnostic &base_other) const override
749 {
750 return same_tree_p (m_arg, ((const malloc_diagnostic &)base_other).m_arg);
751 }
752
753 label_text describe_state_change (const evdesc::state_change &change)
754 override
755 {
756 if (change.m_old_state == m_sm.get_start_state ()
757 && unchecked_p (change.m_new_state))
758 // TODO: verify that it's the allocation stmt, not a copy
759 return label_text::borrow ("allocated here");
760 if (unchecked_p (change.m_old_state)
761 && nonnull_p (change.m_new_state))
762 {
763 if (change.m_expr)
764 return change.formatted_print ("assuming %qE is non-NULL",
765 change.m_expr);
766 else
767 return change.formatted_print ("assuming %qs is non-NULL",
768 "<unknown>");
769 }
770 if (change.m_new_state == m_sm.m_null)
771 {
772 if (unchecked_p (change.m_old_state))
773 {
774 if (change.m_expr)
775 return change.formatted_print ("assuming %qE is NULL",
776 change.m_expr);
777 else
778 return change.formatted_print ("assuming %qs is NULL",
779 "<unknown>");
780 }
781 else
782 {
783 if (change.m_expr)
784 return change.formatted_print ("%qE is NULL",
785 change.m_expr);
786 else
787 return change.formatted_print ("%qs is NULL",
788 "<unknown>");
789 }
790 }
791
792 return label_text ();
793 }
794
795 diagnostic_event::meaning
796 get_meaning_for_state_change (const evdesc::state_change &change)
797 const final override
798 {
799 if (change.m_old_state == m_sm.get_start_state ()
800 && unchecked_p (change.m_new_state))
801 return diagnostic_event::meaning (diagnostic_event::VERB_acquire,
802 diagnostic_event::NOUN_memory);
803 if (freed_p (change.m_new_state))
804 return diagnostic_event::meaning (diagnostic_event::VERB_release,
805 diagnostic_event::NOUN_memory);
806 return diagnostic_event::meaning ();
807 }
808
809 protected:
810 const malloc_state_machine &m_sm;
811 tree m_arg;
812 };
813
814 /* Concrete subclass for reporting mismatching allocator/deallocator
815 diagnostics. */
816
817 class mismatching_deallocation : public malloc_diagnostic
818 {
819 public:
820 mismatching_deallocation (const malloc_state_machine &sm, tree arg,
821 const deallocator_set *expected_deallocators,
822 const deallocator *actual_dealloc)
823 : malloc_diagnostic (sm, arg),
824 m_expected_deallocators (expected_deallocators),
825 m_actual_dealloc (actual_dealloc)
826 {}
827
828 const char *get_kind () const final override
829 {
830 return "mismatching_deallocation";
831 }
832
833 int get_controlling_option () const final override
834 {
835 return OPT_Wanalyzer_mismatching_deallocation;
836 }
837
838 bool emit (rich_location *rich_loc) final override
839 {
840 auto_diagnostic_group d;
841 diagnostic_metadata m;
842 m.add_cwe (762); /* CWE-762: Mismatched Memory Management Routines. */
843 if (const deallocator *expected_dealloc
844 = m_expected_deallocators->maybe_get_single ())
845 return warning_meta (rich_loc, m, get_controlling_option (),
846 "%qE should have been deallocated with %qs"
847 " but was deallocated with %qs",
848 m_arg, expected_dealloc->m_name,
849 m_actual_dealloc->m_name);
850 else
851 return warning_meta (rich_loc, m, get_controlling_option (),
852 "%qs called on %qE returned from a mismatched"
853 " allocation function",
854 m_actual_dealloc->m_name, m_arg);
855 }
856
857 label_text describe_state_change (const evdesc::state_change &change)
858 final override
859 {
860 if (unchecked_p (change.m_new_state))
861 {
862 m_alloc_event = change.m_event_id;
863 if (const deallocator *expected_dealloc
864 = m_expected_deallocators->maybe_get_single ())
865 return change.formatted_print ("allocated here"
866 " (expects deallocation with %qs)",
867 expected_dealloc->m_name);
868 else
869 return change.formatted_print ("allocated here");
870 }
871 return malloc_diagnostic::describe_state_change (change);
872 }
873
874 label_text describe_final_event (const evdesc::final_event &ev) final override
875 {
876 if (m_alloc_event.known_p ())
877 {
878 if (const deallocator *expected_dealloc
879 = m_expected_deallocators->maybe_get_single ())
880 return ev.formatted_print
881 ("deallocated with %qs here;"
882 " allocation at %@ expects deallocation with %qs",
883 m_actual_dealloc->m_name, &m_alloc_event,
884 expected_dealloc->m_name);
885 else
886 return ev.formatted_print
887 ("deallocated with %qs here;"
888 " allocated at %@",
889 m_actual_dealloc->m_name, &m_alloc_event);
890 }
891 return ev.formatted_print ("deallocated with %qs here",
892 m_actual_dealloc->m_name);
893 }
894
895 private:
896 diagnostic_event_id_t m_alloc_event;
897 const deallocator_set *m_expected_deallocators;
898 const deallocator *m_actual_dealloc;
899 };
900
901 /* Concrete subclass for reporting double-free diagnostics. */
902
903 class double_free : public malloc_diagnostic
904 {
905 public:
906 double_free (const malloc_state_machine &sm, tree arg, const char *funcname)
907 : malloc_diagnostic (sm, arg), m_funcname (funcname)
908 {}
909
910 const char *get_kind () const final override { return "double_free"; }
911
912 int get_controlling_option () const final override
913 {
914 return OPT_Wanalyzer_double_free;
915 }
916
917 bool emit (rich_location *rich_loc) final override
918 {
919 auto_diagnostic_group d;
920 diagnostic_metadata m;
921 m.add_cwe (415); /* CWE-415: Double Free. */
922 return warning_meta (rich_loc, m, get_controlling_option (),
923 "double-%qs of %qE", m_funcname, m_arg);
924 }
925
926 label_text describe_state_change (const evdesc::state_change &change)
927 final override
928 {
929 if (freed_p (change.m_new_state))
930 {
931 m_first_free_event = change.m_event_id;
932 return change.formatted_print ("first %qs here", m_funcname);
933 }
934 return malloc_diagnostic::describe_state_change (change);
935 }
936
937 label_text describe_call_with_state (const evdesc::call_with_state &info)
938 final override
939 {
940 if (freed_p (info.m_state))
941 return info.formatted_print
942 ("passing freed pointer %qE in call to %qE from %qE",
943 info.m_expr, info.m_callee_fndecl, info.m_caller_fndecl);
944 return label_text ();
945 }
946
947 label_text describe_final_event (const evdesc::final_event &ev) final override
948 {
949 if (m_first_free_event.known_p ())
950 return ev.formatted_print ("second %qs here; first %qs was at %@",
951 m_funcname, m_funcname,
952 &m_first_free_event);
953 return ev.formatted_print ("second %qs here", m_funcname);
954 }
955
956 private:
957 diagnostic_event_id_t m_first_free_event;
958 const char *m_funcname;
959 };
960
961 /* Abstract subclass for describing possible bad uses of NULL.
962 Responsible for describing the call that could return NULL. */
963
964 class possible_null : public malloc_diagnostic
965 {
966 public:
967 possible_null (const malloc_state_machine &sm, tree arg)
968 : malloc_diagnostic (sm, arg)
969 {}
970
971 label_text describe_state_change (const evdesc::state_change &change)
972 final override
973 {
974 if (change.m_old_state == m_sm.get_start_state ()
975 && unchecked_p (change.m_new_state))
976 {
977 m_origin_of_unchecked_event = change.m_event_id;
978 return label_text::borrow ("this call could return NULL");
979 }
980 return malloc_diagnostic::describe_state_change (change);
981 }
982
983 label_text describe_return_of_state (const evdesc::return_of_state &info)
984 final override
985 {
986 if (unchecked_p (info.m_state))
987 return info.formatted_print ("possible return of NULL to %qE from %qE",
988 info.m_caller_fndecl, info.m_callee_fndecl);
989 return label_text ();
990 }
991
992 protected:
993 diagnostic_event_id_t m_origin_of_unchecked_event;
994 };
995
996 /* Concrete subclass for describing dereference of a possible NULL
997 value. */
998
999 class possible_null_deref : public possible_null
1000 {
1001 public:
1002 possible_null_deref (const malloc_state_machine &sm, tree arg)
1003 : possible_null (sm, arg)
1004 {}
1005
1006 const char *get_kind () const final override { return "possible_null_deref"; }
1007
1008 int get_controlling_option () const final override
1009 {
1010 return OPT_Wanalyzer_possible_null_dereference;
1011 }
1012
1013 bool emit (rich_location *rich_loc) final override
1014 {
1015 /* CWE-690: Unchecked Return Value to NULL Pointer Dereference. */
1016 diagnostic_metadata m;
1017 m.add_cwe (690);
1018 return warning_meta (rich_loc, m, get_controlling_option (),
1019 "dereference of possibly-NULL %qE", m_arg);
1020 }
1021
1022 label_text describe_final_event (const evdesc::final_event &ev) final override
1023 {
1024 if (m_origin_of_unchecked_event.known_p ())
1025 return ev.formatted_print ("%qE could be NULL: unchecked value from %@",
1026 ev.m_expr,
1027 &m_origin_of_unchecked_event);
1028 else
1029 return ev.formatted_print ("%qE could be NULL", ev.m_expr);
1030 }
1031
1032 };
1033
1034 /* Return true if FNDECL is a C++ method. */
1035
1036 static bool
1037 method_p (tree fndecl)
1038 {
1039 return TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE;
1040 }
1041
1042 /* Return a 1-based description of ARG_IDX (0-based) of FNDECL.
1043 Compare with %P in the C++ FE (implemented in cp/error.cc: parm_to_string
1044 as called from cp_printer). */
1045
1046 static label_text
1047 describe_argument_index (tree fndecl, int arg_idx)
1048 {
1049 if (method_p (fndecl))
1050 if (arg_idx == 0)
1051 return label_text::borrow ("'this'");
1052 pretty_printer pp;
1053 pp_printf (&pp, "%u", arg_idx + 1 - method_p (fndecl));
1054 return label_text::take (xstrdup (pp_formatted_text (&pp)));
1055 }
1056
1057 /* Subroutine for use by possible_null_arg::emit and null_arg::emit.
1058 Issue a note informing that the pertinent argument must be non-NULL. */
1059
1060 static void
1061 inform_nonnull_attribute (tree fndecl, int arg_idx)
1062 {
1063 label_text arg_desc = describe_argument_index (fndecl, arg_idx);
1064 inform (DECL_SOURCE_LOCATION (fndecl),
1065 "argument %s of %qD must be non-null",
1066 arg_desc.get (), fndecl);
1067 /* Ideally we would use the location of the parm and underline the
1068 attribute also - but we don't have the location_t values at this point
1069 in the middle-end.
1070 For reference, the C and C++ FEs have get_fndecl_argument_location. */
1071 }
1072
1073 /* Concrete subclass for describing passing a possibly-NULL value to a
1074 function marked with __attribute__((nonnull)). */
1075
1076 class possible_null_arg : public possible_null
1077 {
1078 public:
1079 possible_null_arg (const malloc_state_machine &sm, tree arg,
1080 tree fndecl, int arg_idx)
1081 : possible_null (sm, arg),
1082 m_fndecl (fndecl), m_arg_idx (arg_idx)
1083 {}
1084
1085 const char *get_kind () const final override { return "possible_null_arg"; }
1086
1087 bool subclass_equal_p (const pending_diagnostic &base_other)
1088 const final override
1089 {
1090 const possible_null_arg &sub_other
1091 = (const possible_null_arg &)base_other;
1092 return (same_tree_p (m_arg, sub_other.m_arg)
1093 && m_fndecl == sub_other.m_fndecl
1094 && m_arg_idx == sub_other.m_arg_idx);
1095 }
1096
1097 int get_controlling_option () const final override
1098 {
1099 return OPT_Wanalyzer_possible_null_argument;
1100 }
1101
1102 bool emit (rich_location *rich_loc) final override
1103 {
1104 /* CWE-690: Unchecked Return Value to NULL Pointer Dereference. */
1105 auto_diagnostic_group d;
1106 diagnostic_metadata m;
1107 m.add_cwe (690);
1108 bool warned
1109 = warning_meta (rich_loc, m, get_controlling_option (),
1110 "use of possibly-NULL %qE where non-null expected",
1111 m_arg);
1112 if (warned)
1113 inform_nonnull_attribute (m_fndecl, m_arg_idx);
1114 return warned;
1115 }
1116
1117 label_text describe_final_event (const evdesc::final_event &ev) final override
1118 {
1119 label_text arg_desc = describe_argument_index (m_fndecl, m_arg_idx);
1120 label_text result;
1121 if (m_origin_of_unchecked_event.known_p ())
1122 result = ev.formatted_print ("argument %s (%qE) from %@ could be NULL"
1123 " where non-null expected",
1124 arg_desc.get (), ev.m_expr,
1125 &m_origin_of_unchecked_event);
1126 else
1127 result = ev.formatted_print ("argument %s (%qE) could be NULL"
1128 " where non-null expected",
1129 arg_desc.get (), ev.m_expr);
1130 return result;
1131 }
1132
1133 private:
1134 tree m_fndecl;
1135 int m_arg_idx;
1136 };
1137
1138 /* Concrete subclass for describing a dereference of a NULL value. */
1139
1140 class null_deref : public malloc_diagnostic
1141 {
1142 public:
1143 null_deref (const malloc_state_machine &sm, tree arg)
1144 : malloc_diagnostic (sm, arg) {}
1145
1146 const char *get_kind () const final override { return "null_deref"; }
1147
1148 int get_controlling_option () const final override
1149 {
1150 return OPT_Wanalyzer_null_dereference;
1151 }
1152
1153 bool terminate_path_p () const final override { return true; }
1154
1155 bool emit (rich_location *rich_loc) final override
1156 {
1157 /* CWE-476: NULL Pointer Dereference. */
1158 diagnostic_metadata m;
1159 m.add_cwe (476);
1160 return warning_meta (rich_loc, m, get_controlling_option (),
1161 "dereference of NULL %qE", m_arg);
1162 }
1163
1164 label_text describe_return_of_state (const evdesc::return_of_state &info)
1165 final override
1166 {
1167 if (info.m_state == m_sm.m_null)
1168 return info.formatted_print ("return of NULL to %qE from %qE",
1169 info.m_caller_fndecl, info.m_callee_fndecl);
1170 return label_text ();
1171 }
1172
1173 label_text describe_final_event (const evdesc::final_event &ev) final override
1174 {
1175 return ev.formatted_print ("dereference of NULL %qE", ev.m_expr);
1176 }
1177 };
1178
1179 /* Concrete subclass for describing passing a NULL value to a
1180 function marked with __attribute__((nonnull)). */
1181
1182 class null_arg : public malloc_diagnostic
1183 {
1184 public:
1185 null_arg (const malloc_state_machine &sm, tree arg,
1186 tree fndecl, int arg_idx)
1187 : malloc_diagnostic (sm, arg),
1188 m_fndecl (fndecl), m_arg_idx (arg_idx)
1189 {}
1190
1191 const char *get_kind () const final override { return "null_arg"; }
1192
1193 bool subclass_equal_p (const pending_diagnostic &base_other)
1194 const final override
1195 {
1196 const null_arg &sub_other
1197 = (const null_arg &)base_other;
1198 return (same_tree_p (m_arg, sub_other.m_arg)
1199 && m_fndecl == sub_other.m_fndecl
1200 && m_arg_idx == sub_other.m_arg_idx);
1201 }
1202
1203 int get_controlling_option () const final override
1204 {
1205 return OPT_Wanalyzer_null_argument;
1206 }
1207
1208 bool terminate_path_p () const final override { return true; }
1209
1210 bool emit (rich_location *rich_loc) final override
1211 {
1212 /* CWE-476: NULL Pointer Dereference. */
1213 auto_diagnostic_group d;
1214 diagnostic_metadata m;
1215 m.add_cwe (476);
1216
1217 bool warned;
1218 if (zerop (m_arg))
1219 warned = warning_meta (rich_loc, m, get_controlling_option (),
1220 "use of NULL where non-null expected");
1221 else
1222 warned = warning_meta (rich_loc, m, get_controlling_option (),
1223 "use of NULL %qE where non-null expected",
1224 m_arg);
1225 if (warned)
1226 inform_nonnull_attribute (m_fndecl, m_arg_idx);
1227 return warned;
1228 }
1229
1230 label_text describe_final_event (const evdesc::final_event &ev) final override
1231 {
1232 label_text arg_desc = describe_argument_index (m_fndecl, m_arg_idx);
1233 label_text result;
1234 if (zerop (ev.m_expr))
1235 result = ev.formatted_print ("argument %s NULL where non-null expected",
1236 arg_desc.get ());
1237 else
1238 result = ev.formatted_print ("argument %s (%qE) NULL"
1239 " where non-null expected",
1240 arg_desc.get (), ev.m_expr);
1241 return result;
1242 }
1243
1244 private:
1245 tree m_fndecl;
1246 int m_arg_idx;
1247 };
1248
1249 class use_after_free : public malloc_diagnostic
1250 {
1251 public:
1252 use_after_free (const malloc_state_machine &sm, tree arg,
1253 const deallocator *deallocator)
1254 : malloc_diagnostic (sm, arg),
1255 m_deallocator (deallocator)
1256 {
1257 gcc_assert (deallocator);
1258 }
1259
1260 const char *get_kind () const final override { return "use_after_free"; }
1261
1262 int get_controlling_option () const final override
1263 {
1264 return OPT_Wanalyzer_use_after_free;
1265 }
1266
1267 bool emit (rich_location *rich_loc) final override
1268 {
1269 /* CWE-416: Use After Free. */
1270 diagnostic_metadata m;
1271 m.add_cwe (416);
1272 return warning_meta (rich_loc, m, get_controlling_option (),
1273 "use after %<%s%> of %qE",
1274 m_deallocator->m_name, m_arg);
1275 }
1276
1277 label_text describe_state_change (const evdesc::state_change &change)
1278 final override
1279 {
1280 if (freed_p (change.m_new_state))
1281 {
1282 m_free_event = change.m_event_id;
1283 switch (m_deallocator->m_wording)
1284 {
1285 default:
1286 case WORDING_REALLOCATED:
1287 gcc_unreachable ();
1288 case WORDING_FREED:
1289 return label_text::borrow ("freed here");
1290 case WORDING_DELETED:
1291 return label_text::borrow ("deleted here");
1292 case WORDING_DEALLOCATED:
1293 return label_text::borrow ("deallocated here");
1294 }
1295 }
1296 return malloc_diagnostic::describe_state_change (change);
1297 }
1298
1299 label_text describe_final_event (const evdesc::final_event &ev) final override
1300 {
1301 const char *funcname = m_deallocator->m_name;
1302 if (m_free_event.known_p ())
1303 switch (m_deallocator->m_wording)
1304 {
1305 default:
1306 case WORDING_REALLOCATED:
1307 gcc_unreachable ();
1308 case WORDING_FREED:
1309 return ev.formatted_print ("use after %<%s%> of %qE; freed at %@",
1310 funcname, ev.m_expr, &m_free_event);
1311 case WORDING_DELETED:
1312 return ev.formatted_print ("use after %<%s%> of %qE; deleted at %@",
1313 funcname, ev.m_expr, &m_free_event);
1314 case WORDING_DEALLOCATED:
1315 return ev.formatted_print ("use after %<%s%> of %qE;"
1316 " deallocated at %@",
1317 funcname, ev.m_expr, &m_free_event);
1318 }
1319 else
1320 return ev.formatted_print ("use after %<%s%> of %qE",
1321 funcname, ev.m_expr);
1322 }
1323
1324 /* Implementation of pending_diagnostic::supercedes_p for
1325 use_after_free.
1326
1327 We want use-after-free to supercede use-of-unitialized-value,
1328 so that if we have these at the same stmt, we don't emit
1329 a use-of-uninitialized, just the use-after-free.
1330 (this is because we fully purge information about freed
1331 buffers when we free them to avoid state explosions, so
1332 that if they are accessed after the free, it looks like
1333 they are uninitialized). */
1334
1335 bool supercedes_p (const pending_diagnostic &other) const final override
1336 {
1337 if (other.use_of_uninit_p ())
1338 return true;
1339
1340 return false;
1341 }
1342
1343 private:
1344 diagnostic_event_id_t m_free_event;
1345 const deallocator *m_deallocator;
1346 };
1347
1348 class malloc_leak : public malloc_diagnostic
1349 {
1350 public:
1351 malloc_leak (const malloc_state_machine &sm, tree arg)
1352 : malloc_diagnostic (sm, arg) {}
1353
1354 const char *get_kind () const final override { return "malloc_leak"; }
1355
1356 int get_controlling_option () const final override
1357 {
1358 return OPT_Wanalyzer_malloc_leak;
1359 }
1360
1361 bool emit (rich_location *rich_loc) final override
1362 {
1363 /* "CWE-401: Missing Release of Memory after Effective Lifetime". */
1364 diagnostic_metadata m;
1365 m.add_cwe (401);
1366 if (m_arg)
1367 return warning_meta (rich_loc, m, get_controlling_option (),
1368 "leak of %qE", m_arg);
1369 else
1370 return warning_meta (rich_loc, m, get_controlling_option (),
1371 "leak of %qs", "<unknown>");
1372 }
1373
1374 label_text describe_state_change (const evdesc::state_change &change)
1375 final override
1376 {
1377 if (unchecked_p (change.m_new_state)
1378 || (start_p (change.m_old_state) && nonnull_p (change.m_new_state)))
1379 {
1380 m_alloc_event = change.m_event_id;
1381 return label_text::borrow ("allocated here");
1382 }
1383 return malloc_diagnostic::describe_state_change (change);
1384 }
1385
1386 label_text describe_final_event (const evdesc::final_event &ev) final override
1387 {
1388 if (ev.m_expr)
1389 {
1390 if (m_alloc_event.known_p ())
1391 return ev.formatted_print ("%qE leaks here; was allocated at %@",
1392 ev.m_expr, &m_alloc_event);
1393 else
1394 return ev.formatted_print ("%qE leaks here", ev.m_expr);
1395 }
1396 else
1397 {
1398 if (m_alloc_event.known_p ())
1399 return ev.formatted_print ("%qs leaks here; was allocated at %@",
1400 "<unknown>", &m_alloc_event);
1401 else
1402 return ev.formatted_print ("%qs leaks here", "<unknown>");
1403 }
1404 }
1405
1406 private:
1407 diagnostic_event_id_t m_alloc_event;
1408 };
1409
1410 class free_of_non_heap : public malloc_diagnostic
1411 {
1412 public:
1413 free_of_non_heap (const malloc_state_machine &sm, tree arg,
1414 const region *freed_reg,
1415 const char *funcname)
1416 : malloc_diagnostic (sm, arg), m_freed_reg (freed_reg), m_funcname (funcname)
1417 {
1418 }
1419
1420 const char *get_kind () const final override { return "free_of_non_heap"; }
1421
1422 bool subclass_equal_p (const pending_diagnostic &base_other) const
1423 final override
1424 {
1425 const free_of_non_heap &other = (const free_of_non_heap &)base_other;
1426 return (same_tree_p (m_arg, other.m_arg)
1427 && m_freed_reg == other.m_freed_reg);
1428 }
1429
1430 int get_controlling_option () const final override
1431 {
1432 return OPT_Wanalyzer_free_of_non_heap;
1433 }
1434
1435 bool emit (rich_location *rich_loc) final override
1436 {
1437 auto_diagnostic_group d;
1438 diagnostic_metadata m;
1439 m.add_cwe (590); /* CWE-590: Free of Memory not on the Heap. */
1440 switch (get_memory_space ())
1441 {
1442 default:
1443 case MEMSPACE_HEAP:
1444 gcc_unreachable ();
1445 case MEMSPACE_UNKNOWN:
1446 case MEMSPACE_CODE:
1447 case MEMSPACE_GLOBALS:
1448 case MEMSPACE_READONLY_DATA:
1449 return warning_meta (rich_loc, m, get_controlling_option (),
1450 "%<%s%> of %qE which points to memory"
1451 " not on the heap",
1452 m_funcname, m_arg);
1453 break;
1454 case MEMSPACE_STACK:
1455 return warning_meta (rich_loc, m, get_controlling_option (),
1456 "%<%s%> of %qE which points to memory"
1457 " on the stack",
1458 m_funcname, m_arg);
1459 break;
1460 }
1461 }
1462
1463 label_text describe_state_change (const evdesc::state_change &)
1464 final override
1465 {
1466 return label_text::borrow ("pointer is from here");
1467 }
1468
1469 label_text describe_final_event (const evdesc::final_event &ev) final override
1470 {
1471 return ev.formatted_print ("call to %qs here", m_funcname);
1472 }
1473
1474 void mark_interesting_stuff (interesting_t *interest) final override
1475 {
1476 if (m_freed_reg)
1477 interest->add_region_creation (m_freed_reg);
1478 }
1479
1480 private:
1481 enum memory_space get_memory_space () const
1482 {
1483 if (m_freed_reg)
1484 return m_freed_reg->get_memory_space ();
1485 else
1486 return MEMSPACE_UNKNOWN;
1487 }
1488
1489 const region *m_freed_reg;
1490 const char *m_funcname;
1491 };
1492
1493 /* Concrete pending_diagnostic subclass for -Wanalyzer-deref-before-check. */
1494
1495 class deref_before_check : public malloc_diagnostic
1496 {
1497 public:
1498 deref_before_check (const malloc_state_machine &sm, tree arg)
1499 : malloc_diagnostic (sm, arg),
1500 m_deref_enode (NULL),
1501 m_deref_expr (NULL),
1502 m_check_enode (NULL)
1503 {
1504 gcc_assert (arg);
1505 }
1506
1507 const char *get_kind () const final override { return "deref_before_check"; }
1508
1509 int get_controlling_option () const final override
1510 {
1511 return OPT_Wanalyzer_deref_before_check;
1512 }
1513
1514 bool emit (rich_location *rich_loc) final override
1515 {
1516 /* Don't emit the warning if we can't show where the deref
1517 and the check occur. */
1518 if (!m_deref_enode)
1519 return false;
1520 if (!m_check_enode)
1521 return false;
1522 /* Only emit the warning for intraprocedural cases. */
1523 if (m_deref_enode->get_function () != m_check_enode->get_function ())
1524 return false;
1525 if (&m_deref_enode->get_point ().get_call_string ()
1526 != &m_check_enode->get_point ().get_call_string ())
1527 return false;
1528
1529 /* Reject the warning if the check occurs within a macro defintion.
1530 This avoids false positives for such code as:
1531
1532 #define throw_error \
1533 do { \
1534 if (p) \
1535 cleanup (p); \
1536 return; \
1537 } while (0)
1538
1539 if (p->idx >= n)
1540 throw_error ();
1541
1542 where the usage of "throw_error" implicitly adds a check
1543 on 'p'.
1544
1545 We do warn when the check is in a macro expansion if we can get
1546 at the location of the condition and it is't part of the
1547 definition, so that we warn for checks such as:
1548 if (words[0][0] == '@')
1549 return;
1550 g_assert(words[0] != NULL); <--- here
1551 Unfortunately we don't have locations for individual gimple
1552 arguments, so in:
1553 g_assert (ptr);
1554 we merely have a gimple_cond
1555 if (p_2(D) == 0B)
1556 with no way of getting at the location of the condition separately
1557 from that of the gimple_cond (where the "if" is within the macro
1558 definition). We reject the warning for such cases.
1559
1560 We do warn when the *deref* occurs in a macro, since this can be
1561 a source of real bugs; see e.g. PR 77425. */
1562 location_t check_loc = m_check_enode->get_point ().get_location ();
1563 if (linemap_location_from_macro_definition_p (line_table, check_loc))
1564 return false;
1565
1566 /* Reject if m_deref_expr is sufficiently different from m_arg
1567 for cases where the dereference is spelled differently from
1568 the check, which is probably two different ways to get the
1569 same svalue, and thus not worth reporting. */
1570 if (!m_deref_expr)
1571 return false;
1572 if (!sufficiently_similar_p (m_deref_expr, m_arg))
1573 return false;
1574
1575 /* Reject the warning if the deref's BB doesn't dominate that
1576 of the check, so that we don't warn e.g. for shared cleanup
1577 code that checks a pointer for NULL, when that code is sometimes
1578 used before a deref and sometimes after.
1579 Using the dominance code requires setting cfun. */
1580 auto_cfun sentinel (m_deref_enode->get_function ());
1581 calculate_dominance_info (CDI_DOMINATORS);
1582 if (!dominated_by_p (CDI_DOMINATORS,
1583 m_check_enode->get_supernode ()->m_bb,
1584 m_deref_enode->get_supernode ()->m_bb))
1585 return false;
1586
1587 return warning_at (rich_loc, get_controlling_option (),
1588 "check of %qE for NULL after already"
1589 " dereferencing it",
1590 m_arg);
1591 }
1592
1593 label_text describe_state_change (const evdesc::state_change &change)
1594 final override
1595 {
1596 if (change.m_old_state == m_sm.get_start_state ()
1597 && assumed_non_null_p (change.m_new_state))
1598 {
1599 m_first_deref_event = change.m_event_id;
1600 m_deref_enode = change.m_event.get_exploded_node ();
1601 m_deref_expr = change.m_expr;
1602 return change.formatted_print ("pointer %qE is dereferenced here",
1603 m_arg);
1604 }
1605 return malloc_diagnostic::describe_state_change (change);
1606 }
1607
1608 label_text describe_final_event (const evdesc::final_event &ev) final override
1609 {
1610 m_check_enode = ev.m_event.get_exploded_node ();
1611 if (m_first_deref_event.known_p ())
1612 return ev.formatted_print ("pointer %qE is checked for NULL here but"
1613 " it was already dereferenced at %@",
1614 m_arg, &m_first_deref_event);
1615 else
1616 return ev.formatted_print ("pointer %qE is checked for NULL here but"
1617 " it was already dereferenced",
1618 m_arg);
1619 }
1620
1621 private:
1622 static bool sufficiently_similar_p (tree expr_a, tree expr_b)
1623 {
1624 pretty_printer *pp_a = global_dc->printer->clone ();
1625 pretty_printer *pp_b = global_dc->printer->clone ();
1626 pp_printf (pp_a, "%qE", expr_a);
1627 pp_printf (pp_b, "%qE", expr_b);
1628 bool result = (strcmp (pp_formatted_text (pp_a), pp_formatted_text (pp_b))
1629 == 0);
1630 delete pp_a;
1631 delete pp_b;
1632 return result;
1633 }
1634
1635 diagnostic_event_id_t m_first_deref_event;
1636 const exploded_node *m_deref_enode;
1637 tree m_deref_expr;
1638 const exploded_node *m_check_enode;
1639 };
1640
1641 /* struct allocation_state : public state_machine::state. */
1642
1643 /* Implementation of state_machine::state::dump_to_pp vfunc
1644 for allocation_state: append the API that this allocation is
1645 associated with. */
1646
1647 void
1648 allocation_state::dump_to_pp (pretty_printer *pp) const
1649 {
1650 state_machine::state::dump_to_pp (pp);
1651 if (m_deallocators)
1652 {
1653 pp_string (pp, " (");
1654 m_deallocators->dump_to_pp (pp);
1655 pp_character (pp, ')');
1656 }
1657 }
1658
1659 /* Given a allocation_state for a deallocator_set, get the "nonnull" state
1660 for the corresponding allocator(s). */
1661
1662 const allocation_state *
1663 allocation_state::get_nonnull () const
1664 {
1665 gcc_assert (m_deallocators);
1666 return as_a_allocation_state (m_deallocators->m_nonnull);
1667 }
1668
1669 /* struct assumed_non_null_state : public allocation_state. */
1670
1671 void
1672 assumed_non_null_state::dump_to_pp (pretty_printer *pp) const
1673 {
1674 allocation_state::dump_to_pp (pp);
1675 pp_string (pp, " (in ");
1676 m_frame->dump_to_pp (pp, true);
1677 pp_character (pp, ')');
1678 }
1679
1680 /* malloc_state_machine's ctor. */
1681
1682 malloc_state_machine::malloc_state_machine (logger *logger)
1683 : state_machine ("malloc", logger),
1684 m_free (this, "free", WORDING_FREED),
1685 m_scalar_delete (this, "delete", WORDING_DELETED),
1686 m_vector_delete (this, "delete[]", WORDING_DELETED),
1687 m_realloc (this, "realloc", WORDING_REALLOCATED)
1688 {
1689 gcc_assert (m_start->get_id () == 0);
1690 m_null = add_state ("null", RS_FREED, NULL, NULL);
1691 m_non_heap = add_state ("non-heap", RS_NON_HEAP, NULL, NULL);
1692 m_stop = add_state ("stop", RS_STOP, NULL, NULL);
1693 }
1694
1695 malloc_state_machine::~malloc_state_machine ()
1696 {
1697 unsigned i;
1698 custom_deallocator_set *set;
1699 FOR_EACH_VEC_ELT (m_dynamic_sets, i, set)
1700 delete set;
1701 custom_deallocator *d;
1702 FOR_EACH_VEC_ELT (m_dynamic_deallocators, i, d)
1703 delete d;
1704 }
1705
1706 state_machine::state_t
1707 malloc_state_machine::add_state (const char *name, enum resource_state rs,
1708 const deallocator_set *deallocators,
1709 const deallocator *deallocator)
1710 {
1711 return add_custom_state (new allocation_state (name, alloc_state_id (),
1712 rs, deallocators,
1713 deallocator));
1714 }
1715
1716 /* If ALLOCATOR_FNDECL has any "__attribute__((malloc(FOO)))",
1717 return a custom_deallocator_set for them, consolidating them
1718 to ensure uniqueness of the sets.
1719
1720 Return NULL if it has no such attributes. */
1721
1722 const custom_deallocator_set *
1723 malloc_state_machine::
1724 get_or_create_custom_deallocator_set (tree allocator_fndecl)
1725 {
1726 /* Early rejection of decls without attributes. */
1727 tree attrs = DECL_ATTRIBUTES (allocator_fndecl);
1728 if (!attrs)
1729 return NULL;
1730
1731 /* Otherwise, call maybe_create_custom_deallocator_set,
1732 memoizing the result. */
1733 if (custom_deallocator_set **slot
1734 = m_custom_deallocator_set_cache.get (allocator_fndecl))
1735 return *slot;
1736 custom_deallocator_set *set
1737 = maybe_create_custom_deallocator_set (allocator_fndecl);
1738 m_custom_deallocator_set_cache.put (allocator_fndecl, set);
1739 return set;
1740 }
1741
1742 /* Given ALLOCATOR_FNDECL, a FUNCTION_DECL with attributes,
1743 look for any "__attribute__((malloc(FOO)))" and return a
1744 custom_deallocator_set for them, consolidating them
1745 to ensure uniqueness of the sets.
1746
1747 Return NULL if it has no such attributes.
1748
1749 Subroutine of get_or_create_custom_deallocator_set which
1750 memoizes the result. */
1751
1752 custom_deallocator_set *
1753 malloc_state_machine::
1754 maybe_create_custom_deallocator_set (tree allocator_fndecl)
1755 {
1756 tree attrs = DECL_ATTRIBUTES (allocator_fndecl);
1757 gcc_assert (attrs);
1758
1759 /* Look for instances of __attribute__((malloc(FOO))). */
1760 auto_vec<const deallocator *> deallocator_vec;
1761 for (tree allocs = attrs;
1762 (allocs = lookup_attribute ("malloc", allocs));
1763 allocs = TREE_CHAIN (allocs))
1764 {
1765 tree args = TREE_VALUE (allocs);
1766 if (!args)
1767 continue;
1768 if (TREE_VALUE (args))
1769 {
1770 const deallocator *d
1771 = get_or_create_deallocator (TREE_VALUE (args));
1772 deallocator_vec.safe_push (d);
1773 }
1774 }
1775
1776 /* If there weren't any deallocators, bail. */
1777 if (deallocator_vec.length () == 0)
1778 return NULL;
1779
1780 /* Consolidate, so that we reuse existing deallocator_set
1781 instances. */
1782 deallocator_vec.qsort (deallocator::cmp_ptr_ptr);
1783 custom_deallocator_set **slot
1784 = m_custom_deallocator_set_map.get (&deallocator_vec);
1785 if (slot)
1786 return *slot;
1787 custom_deallocator_set *set
1788 = new custom_deallocator_set (this, &deallocator_vec, WORDING_DEALLOCATED);
1789 m_custom_deallocator_set_map.put (&set->m_deallocator_vec, set);
1790 m_dynamic_sets.safe_push (set);
1791 return set;
1792 }
1793
1794 /* Get the deallocator for DEALLOCATOR_FNDECL, creating it if necessary. */
1795
1796 const deallocator *
1797 malloc_state_machine::get_or_create_deallocator (tree deallocator_fndecl)
1798 {
1799 deallocator **slot = m_deallocator_map.get (deallocator_fndecl);
1800 if (slot)
1801 return *slot;
1802
1803 /* Reuse "free". */
1804 deallocator *d;
1805 if (is_named_call_p (deallocator_fndecl, "free")
1806 || is_std_named_call_p (deallocator_fndecl, "free")
1807 || is_named_call_p (deallocator_fndecl, "__builtin_free"))
1808 d = &m_free.m_deallocator;
1809 else
1810 {
1811 custom_deallocator *cd
1812 = new custom_deallocator (this, deallocator_fndecl,
1813 WORDING_DEALLOCATED);
1814 m_dynamic_deallocators.safe_push (cd);
1815 d = cd;
1816 }
1817 m_deallocator_map.put (deallocator_fndecl, d);
1818 return d;
1819 }
1820
1821 /* Get the "assumed-non-null" state for assumptions made within FRAME,
1822 creating it if necessary. */
1823
1824 state_machine::state_t
1825 malloc_state_machine::
1826 get_or_create_assumed_non_null_state_for_frame (const frame_region *frame)
1827 {
1828 if (state_t *slot = m_assumed_non_null.get (frame))
1829 return *slot;
1830 state_machine::state *new_state
1831 = new assumed_non_null_state ("assumed-non-null", alloc_state_id (), frame);
1832 add_custom_state (new_state);
1833 m_assumed_non_null.put (frame, new_state);
1834 return new_state;
1835 }
1836
1837 /* Try to identify the function declaration either by name or as a known malloc
1838 builtin. */
1839
1840 static bool
1841 known_allocator_p (const_tree fndecl, const gcall *call)
1842 {
1843 /* Either it is a function we know by name and number of arguments... */
1844 if (is_named_call_p (fndecl, "malloc", call, 1)
1845 || is_named_call_p (fndecl, "calloc", call, 2)
1846 || is_std_named_call_p (fndecl, "malloc", call, 1)
1847 || is_std_named_call_p (fndecl, "calloc", call, 2)
1848 || is_named_call_p (fndecl, "strdup", call, 1)
1849 || is_named_call_p (fndecl, "strndup", call, 2))
1850 return true;
1851
1852 /* ... or it is a builtin allocator that allocates objects freed with
1853 __builtin_free. */
1854 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1855 switch (DECL_FUNCTION_CODE (fndecl))
1856 {
1857 case BUILT_IN_MALLOC:
1858 case BUILT_IN_CALLOC:
1859 case BUILT_IN_STRDUP:
1860 case BUILT_IN_STRNDUP:
1861 return true;
1862 default:
1863 break;
1864 }
1865
1866 return false;
1867 }
1868
1869 /* If PTR's nullness is not known, transition it to the "assumed-non-null"
1870 state for the current frame. */
1871
1872 void
1873 malloc_state_machine::maybe_assume_non_null (sm_context *sm_ctxt,
1874 tree ptr,
1875 const gimple *stmt) const
1876 {
1877 const region_model *old_model = sm_ctxt->get_old_region_model ();
1878 if (!old_model)
1879 return;
1880
1881 tree null_ptr_cst = build_int_cst (TREE_TYPE (ptr), 0);
1882 tristate known_non_null
1883 = old_model->eval_condition (ptr, NE_EXPR, null_ptr_cst, NULL);
1884 if (known_non_null.is_unknown ())
1885 {
1886 /* Cast away const-ness for cache-like operations. */
1887 malloc_state_machine *mut_this
1888 = const_cast <malloc_state_machine *> (this);
1889 state_t next_state
1890 = mut_this->get_or_create_assumed_non_null_state_for_frame
1891 (old_model->get_current_frame ());
1892 sm_ctxt->set_next_state (stmt, ptr, next_state);
1893 }
1894 }
1895
1896 /* Implementation of state_machine::on_stmt vfunc for malloc_state_machine. */
1897
1898 bool
1899 malloc_state_machine::on_stmt (sm_context *sm_ctxt,
1900 const supernode *node,
1901 const gimple *stmt) const
1902 {
1903 if (const gcall *call = dyn_cast <const gcall *> (stmt))
1904 if (tree callee_fndecl = sm_ctxt->get_fndecl_for_call (call))
1905 {
1906 if (known_allocator_p (callee_fndecl, call))
1907 {
1908 on_allocator_call (sm_ctxt, call, &m_free);
1909 return true;
1910 }
1911
1912 if (is_named_call_p (callee_fndecl, "operator new", call, 1))
1913 on_allocator_call (sm_ctxt, call, &m_scalar_delete);
1914 else if (is_named_call_p (callee_fndecl, "operator new []", call, 1))
1915 on_allocator_call (sm_ctxt, call, &m_vector_delete);
1916 else if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
1917 || is_named_call_p (callee_fndecl, "operator delete", call, 2))
1918 {
1919 on_deallocator_call (sm_ctxt, node, call,
1920 &m_scalar_delete.m_deallocator, 0);
1921 return true;
1922 }
1923 else if (is_named_call_p (callee_fndecl, "operator delete []", call, 1))
1924 {
1925 on_deallocator_call (sm_ctxt, node, call,
1926 &m_vector_delete.m_deallocator, 0);
1927 return true;
1928 }
1929
1930 if (is_named_call_p (callee_fndecl, "alloca", call, 1)
1931 || is_named_call_p (callee_fndecl, "__builtin_alloca", call, 1))
1932 {
1933 tree lhs = gimple_call_lhs (call);
1934 if (lhs)
1935 sm_ctxt->on_transition (node, stmt, lhs, m_start, m_non_heap);
1936 return true;
1937 }
1938
1939 if (is_named_call_p (callee_fndecl, "free", call, 1)
1940 || is_std_named_call_p (callee_fndecl, "free", call, 1)
1941 || is_named_call_p (callee_fndecl, "__builtin_free", call, 1))
1942 {
1943 on_deallocator_call (sm_ctxt, node, call,
1944 &m_free.m_deallocator, 0);
1945 return true;
1946 }
1947
1948 if (is_named_call_p (callee_fndecl, "realloc", call, 2)
1949 || is_named_call_p (callee_fndecl, "__builtin_realloc", call, 2))
1950 {
1951 on_realloc_call (sm_ctxt, node, call);
1952 return true;
1953 }
1954
1955 if (unaffected_by_call_p (callee_fndecl))
1956 return true;
1957
1958 /* Cast away const-ness for cache-like operations. */
1959 malloc_state_machine *mutable_this
1960 = const_cast <malloc_state_machine *> (this);
1961
1962 /* Handle "__attribute__((malloc(FOO)))". */
1963 if (const deallocator_set *deallocators
1964 = mutable_this->get_or_create_custom_deallocator_set
1965 (callee_fndecl))
1966 {
1967 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee_fndecl));
1968 bool returns_nonnull
1969 = lookup_attribute ("returns_nonnull", attrs);
1970 on_allocator_call (sm_ctxt, call, deallocators, returns_nonnull);
1971 }
1972
1973 /* Handle "__attribute__((nonnull))". */
1974 {
1975 tree fntype = TREE_TYPE (callee_fndecl);
1976 bitmap nonnull_args = get_nonnull_args (fntype);
1977 if (nonnull_args)
1978 {
1979 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
1980 {
1981 tree arg = gimple_call_arg (stmt, i);
1982 if (TREE_CODE (TREE_TYPE (arg)) != POINTER_TYPE)
1983 continue;
1984 /* If we have a nonnull-args, and either all pointers, or just
1985 the specified pointers. */
1986 if (bitmap_empty_p (nonnull_args)
1987 || bitmap_bit_p (nonnull_args, i))
1988 {
1989 state_t state = sm_ctxt->get_state (stmt, arg);
1990 /* Can't use a switch as the states are non-const. */
1991 if (unchecked_p (state))
1992 {
1993 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1994 sm_ctxt->warn (node, stmt, arg,
1995 make_unique<possible_null_arg>
1996 (*this, diag_arg, callee_fndecl, i));
1997 const allocation_state *astate
1998 = as_a_allocation_state (state);
1999 sm_ctxt->set_next_state (stmt, arg,
2000 astate->get_nonnull ());
2001 }
2002 else if (state == m_null)
2003 {
2004 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2005 sm_ctxt->warn (node, stmt, arg,
2006 make_unique<null_arg>
2007 (*this, diag_arg, callee_fndecl, i));
2008 sm_ctxt->set_next_state (stmt, arg, m_stop);
2009 }
2010 else if (state == m_start)
2011 maybe_assume_non_null (sm_ctxt, arg, stmt);
2012 }
2013 }
2014 BITMAP_FREE (nonnull_args);
2015 }
2016 }
2017
2018 /* Check for this after nonnull, so that if we have both
2019 then we transition to "freed", rather than "checked". */
2020 unsigned dealloc_argno = fndecl_dealloc_argno (callee_fndecl);
2021 if (dealloc_argno != UINT_MAX)
2022 {
2023 const deallocator *d
2024 = mutable_this->get_or_create_deallocator (callee_fndecl);
2025 on_deallocator_call (sm_ctxt, node, call, d, dealloc_argno);
2026 }
2027 }
2028
2029 /* Look for pointers explicitly being compared against zero
2030 that are in state assumed_non_null i.e. we already defererenced
2031 them.
2032 We have to do this check here, rather than in on_condition
2033 because we add a constraint that the pointer is non-null when
2034 dereferencing it, and this makes the apply_constraints_for_gcond
2035 find known-true and known-false conditions; on_condition is only
2036 called when adding new constraints. */
2037 if (const gcond *cond_stmt = dyn_cast <const gcond *> (stmt))
2038 {
2039 enum tree_code op = gimple_cond_code (cond_stmt);
2040 if (op == EQ_EXPR || op == NE_EXPR)
2041 {
2042 tree lhs = gimple_cond_lhs (cond_stmt);
2043 tree rhs = gimple_cond_rhs (cond_stmt);
2044 if (any_pointer_p (lhs)
2045 && any_pointer_p (rhs)
2046 && zerop (rhs))
2047 {
2048 state_t state = sm_ctxt->get_state (stmt, lhs);
2049 if (assumed_non_null_p (state))
2050 maybe_complain_about_deref_before_check
2051 (sm_ctxt, node,
2052 stmt,
2053 (const assumed_non_null_state *)state,
2054 lhs);
2055 }
2056 }
2057 }
2058
2059 if (tree lhs = sm_ctxt->is_zero_assignment (stmt))
2060 if (any_pointer_p (lhs))
2061 on_zero_assignment (sm_ctxt, stmt,lhs);
2062
2063 /* Handle dereferences. */
2064 for (unsigned i = 0; i < gimple_num_ops (stmt); i++)
2065 {
2066 tree op = gimple_op (stmt, i);
2067 if (!op)
2068 continue;
2069 if (TREE_CODE (op) == COMPONENT_REF)
2070 op = TREE_OPERAND (op, 0);
2071
2072 if (TREE_CODE (op) == MEM_REF)
2073 {
2074 tree arg = TREE_OPERAND (op, 0);
2075
2076 state_t state = sm_ctxt->get_state (stmt, arg);
2077 if (state == m_start)
2078 maybe_assume_non_null (sm_ctxt, arg, stmt);
2079 else if (unchecked_p (state))
2080 {
2081 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2082 sm_ctxt->warn (node, stmt, arg,
2083 make_unique<possible_null_deref> (*this,
2084 diag_arg));
2085 const allocation_state *astate = as_a_allocation_state (state);
2086 sm_ctxt->set_next_state (stmt, arg, astate->get_nonnull ());
2087 }
2088 else if (state == m_null)
2089 {
2090 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2091 sm_ctxt->warn (node, stmt, arg,
2092 make_unique<null_deref> (*this, diag_arg));
2093 sm_ctxt->set_next_state (stmt, arg, m_stop);
2094 }
2095 else if (freed_p (state))
2096 {
2097 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2098 const allocation_state *astate = as_a_allocation_state (state);
2099 sm_ctxt->warn (node, stmt, arg,
2100 make_unique<use_after_free>
2101 (*this, diag_arg, astate->m_deallocator));
2102 sm_ctxt->set_next_state (stmt, arg, m_stop);
2103 }
2104 }
2105 }
2106 return false;
2107 }
2108
2109 /* Given a check against null of PTR in assumed-non-null state STATE,
2110 potentially add a deref_before_check warning to SM_CTXT. */
2111
2112 void
2113 malloc_state_machine::
2114 maybe_complain_about_deref_before_check (sm_context *sm_ctxt,
2115 const supernode *node,
2116 const gimple *stmt,
2117 const assumed_non_null_state *state,
2118 tree ptr) const
2119 {
2120 const region_model *model = sm_ctxt->get_old_region_model ();
2121 if (!model)
2122 return;
2123
2124 /* Don't complain if the current frame (where the check is occurring) is
2125 deeper than the frame in which the "not null" assumption was made.
2126 This suppress false positives for cases like:
2127
2128 void foo (struct s *p)
2129 {
2130 int val = s->some_field; // deref here
2131 shared_helper (p);
2132 }
2133
2134 where "shared_helper" has:
2135
2136 void shared_helper (struct s *p)
2137 {
2138 if (!p) // check here
2139 return;
2140 // etc
2141 }
2142
2143 since the check in "shared_helper" is OK. */
2144 const frame_region *checked_in_frame = model->get_current_frame ();
2145 const frame_region *assumed_nonnull_in_frame = state->m_frame;
2146 if (checked_in_frame->get_index () > assumed_nonnull_in_frame->get_index ())
2147 return;
2148
2149 tree diag_ptr = sm_ctxt->get_diagnostic_tree (ptr);
2150 if (diag_ptr)
2151 sm_ctxt->warn
2152 (node, stmt, ptr,
2153 make_unique<deref_before_check> (*this, diag_ptr));
2154 sm_ctxt->set_next_state (stmt, ptr, m_stop);
2155 }
2156
2157 /* Handle a call to an allocator.
2158 RETURNS_NONNULL is true if CALL is to a fndecl known to have
2159 __attribute__((returns_nonnull)). */
2160
2161 void
2162 malloc_state_machine::on_allocator_call (sm_context *sm_ctxt,
2163 const gcall *call,
2164 const deallocator_set *deallocators,
2165 bool returns_nonnull) const
2166 {
2167 tree lhs = gimple_call_lhs (call);
2168 if (lhs)
2169 {
2170 if (sm_ctxt->get_state (call, lhs) == m_start)
2171 sm_ctxt->set_next_state (call, lhs,
2172 (returns_nonnull
2173 ? deallocators->m_nonnull
2174 : deallocators->m_unchecked));
2175 }
2176 else
2177 {
2178 /* TODO: report leak. */
2179 }
2180 }
2181
2182 /* Handle deallocations of non-heap pointers.
2183 non-heap -> stop, with warning. */
2184
2185 void
2186 malloc_state_machine::handle_free_of_non_heap (sm_context *sm_ctxt,
2187 const supernode *node,
2188 const gcall *call,
2189 tree arg,
2190 const deallocator *d) const
2191 {
2192 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2193 const region *freed_reg = NULL;
2194 if (const program_state *old_state = sm_ctxt->get_old_program_state ())
2195 {
2196 const region_model *old_model = old_state->m_region_model;
2197 const svalue *ptr_sval = old_model->get_rvalue (arg, NULL);
2198 freed_reg = old_model->deref_rvalue (ptr_sval, arg, NULL);
2199 }
2200 sm_ctxt->warn (node, call, arg,
2201 make_unique<free_of_non_heap>
2202 (*this, diag_arg, freed_reg, d->m_name));
2203 sm_ctxt->set_next_state (call, arg, m_stop);
2204 }
2205
2206 void
2207 malloc_state_machine::on_deallocator_call (sm_context *sm_ctxt,
2208 const supernode *node,
2209 const gcall *call,
2210 const deallocator *d,
2211 unsigned argno) const
2212 {
2213 if (argno >= gimple_call_num_args (call))
2214 return;
2215 tree arg = gimple_call_arg (call, argno);
2216
2217 state_t state = sm_ctxt->get_state (call, arg);
2218
2219 /* start/assumed_non_null/unchecked/nonnull -> freed. */
2220 if (state == m_start || assumed_non_null_p (state))
2221 sm_ctxt->set_next_state (call, arg, d->m_freed);
2222 else if (unchecked_p (state) || nonnull_p (state))
2223 {
2224 const allocation_state *astate = as_a_allocation_state (state);
2225 gcc_assert (astate->m_deallocators);
2226 if (!astate->m_deallocators->contains_p (d))
2227 {
2228 /* Wrong allocator. */
2229 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2230 sm_ctxt->warn (node, call, arg,
2231 make_unique<mismatching_deallocation>
2232 (*this, diag_arg,
2233 astate->m_deallocators,
2234 d));
2235 }
2236 sm_ctxt->set_next_state (call, arg, d->m_freed);
2237 }
2238
2239 /* Keep state "null" as-is, rather than transitioning to "freed";
2240 we don't want to complain about double-free of NULL. */
2241 else if (state == d->m_freed)
2242 {
2243 /* freed -> stop, with warning. */
2244 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2245 sm_ctxt->warn (node, call, arg,
2246 make_unique<double_free> (*this, diag_arg, d->m_name));
2247 sm_ctxt->set_next_state (call, arg, m_stop);
2248 }
2249 else if (state == m_non_heap)
2250 {
2251 /* non-heap -> stop, with warning. */
2252 handle_free_of_non_heap (sm_ctxt, node, call, arg, d);
2253 }
2254 }
2255
2256 /* Handle a call to "realloc".
2257 Check for free of non-heap or mismatching allocators,
2258 transitioning to the "stop" state for such cases.
2259
2260 Otherwise, kf_realloc::impl_call_post will later
2261 get called (which will handle other sm-state transitions
2262 when the state is bifurcated). */
2263
2264 void
2265 malloc_state_machine::on_realloc_call (sm_context *sm_ctxt,
2266 const supernode *node,
2267 const gcall *call) const
2268 {
2269 const unsigned argno = 0;
2270 const deallocator *d = &m_realloc;
2271
2272 tree arg = gimple_call_arg (call, argno);
2273
2274 state_t state = sm_ctxt->get_state (call, arg);
2275
2276 if (unchecked_p (state) || nonnull_p (state))
2277 {
2278 const allocation_state *astate = as_a_allocation_state (state);
2279 gcc_assert (astate->m_deallocators);
2280 if (!astate->m_deallocators->contains_p (&m_free.m_deallocator))
2281 {
2282 /* Wrong allocator. */
2283 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2284 sm_ctxt->warn (node, call, arg,
2285 make_unique<mismatching_deallocation>
2286 (*this, diag_arg,
2287 astate->m_deallocators, d));
2288 sm_ctxt->set_next_state (call, arg, m_stop);
2289 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
2290 path_ctxt->terminate_path ();
2291 }
2292 }
2293 else if (state == m_free.m_deallocator.m_freed)
2294 {
2295 /* freed -> stop, with warning. */
2296 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
2297 sm_ctxt->warn (node, call, arg,
2298 make_unique<double_free> (*this, diag_arg, "free"));
2299 sm_ctxt->set_next_state (call, arg, m_stop);
2300 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
2301 path_ctxt->terminate_path ();
2302 }
2303 else if (state == m_non_heap)
2304 {
2305 /* non-heap -> stop, with warning. */
2306 handle_free_of_non_heap (sm_ctxt, node, call, arg, d);
2307 if (path_context *path_ctxt = sm_ctxt->get_path_context ())
2308 path_ctxt->terminate_path ();
2309 }
2310 }
2311
2312 /* Implementation of state_machine::on_phi vfunc for malloc_state_machine. */
2313
2314 void
2315 malloc_state_machine::on_phi (sm_context *sm_ctxt,
2316 const supernode *node ATTRIBUTE_UNUSED,
2317 const gphi *phi,
2318 tree rhs) const
2319 {
2320 if (zerop (rhs))
2321 {
2322 tree lhs = gimple_phi_result (phi);
2323 on_zero_assignment (sm_ctxt, phi, lhs);
2324 }
2325 }
2326
2327 /* Implementation of state_machine::on_condition vfunc for malloc_state_machine.
2328 Potentially transition state 'unchecked' to 'nonnull' or to 'null'. */
2329
2330 void
2331 malloc_state_machine::on_condition (sm_context *sm_ctxt,
2332 const supernode *node ATTRIBUTE_UNUSED,
2333 const gimple *stmt,
2334 const svalue *lhs,
2335 enum tree_code op,
2336 const svalue *rhs) const
2337 {
2338 if (!rhs->all_zeroes_p ())
2339 return;
2340
2341 if (!any_pointer_p (lhs))
2342 return;
2343 if (!any_pointer_p (rhs))
2344 return;
2345
2346 if (op == NE_EXPR)
2347 {
2348 log ("got 'ARG != 0' match");
2349 state_t s = sm_ctxt->get_state (stmt, lhs);
2350 if (unchecked_p (s))
2351 {
2352 const allocation_state *astate = as_a_allocation_state (s);
2353 sm_ctxt->set_next_state (stmt, lhs, astate->get_nonnull ());
2354 }
2355 }
2356 else if (op == EQ_EXPR)
2357 {
2358 log ("got 'ARG == 0' match");
2359 state_t s = sm_ctxt->get_state (stmt, lhs);
2360 if (unchecked_p (s))
2361 sm_ctxt->set_next_state (stmt, lhs, m_null);
2362 }
2363 }
2364
2365 /* Implementation of state_machine::on_pop_frame vfunc for malloc_state_machine.
2366 Clear any "assumed-non-null" state where the assumption happened in
2367 FRAME_REG. */
2368
2369 void
2370 malloc_state_machine::on_pop_frame (sm_state_map *smap,
2371 const frame_region *frame_reg) const
2372 {
2373 hash_set<const svalue *> svals_to_clear;
2374 for (auto kv : *smap)
2375 {
2376 const svalue *sval = kv.first;
2377 state_t state = kv.second.m_state;
2378 if (assumed_non_null_p (state))
2379 {
2380 const assumed_non_null_state *assumed_state
2381 = (const assumed_non_null_state *)state;
2382 if (frame_reg == assumed_state->m_frame)
2383 svals_to_clear.add (sval);
2384 }
2385 }
2386 for (auto sval : svals_to_clear)
2387 smap->clear_any_state (sval);
2388 }
2389
2390 /* Implementation of state_machine::can_purge_p vfunc for malloc_state_machine.
2391 Don't allow purging of pointers in state 'unchecked' or 'nonnull'
2392 (to avoid false leak reports). */
2393
2394 bool
2395 malloc_state_machine::can_purge_p (state_t s) const
2396 {
2397 enum resource_state rs = get_rs (s);
2398 return rs != RS_UNCHECKED && rs != RS_NONNULL;
2399 }
2400
2401 /* Implementation of state_machine::on_leak vfunc for malloc_state_machine
2402 (for complaining about leaks of pointers in state 'unchecked' and
2403 'nonnull'). */
2404
2405 std::unique_ptr<pending_diagnostic>
2406 malloc_state_machine::on_leak (tree var) const
2407 {
2408 return make_unique<malloc_leak> (*this, var);
2409 }
2410
2411 /* Implementation of state_machine::reset_when_passed_to_unknown_fn_p vfunc
2412 for malloc_state_machine. */
2413
2414 bool
2415 malloc_state_machine::reset_when_passed_to_unknown_fn_p (state_t s,
2416 bool is_mutable) const
2417 {
2418 /* An on-stack ptr doesn't stop being stack-allocated when passed to an
2419 unknown fn. */
2420 if (s == m_non_heap)
2421 return false;
2422
2423 /* Otherwise, pointers passed as non-const can be freed. */
2424 return is_mutable;
2425 }
2426
2427 /* Implementation of state_machine::maybe_get_merged_states_nonequal vfunc
2428 for malloc_state_machine.
2429
2430 Support discarding "assumed-non-null" states when merging with
2431 start state. */
2432
2433 state_machine::state_t
2434 malloc_state_machine::maybe_get_merged_states_nonequal (state_t state_a,
2435 state_t state_b) const
2436 {
2437 if (assumed_non_null_p (state_a) && state_b == m_start)
2438 return m_start;
2439 if (state_a == m_start && assumed_non_null_p (state_b))
2440 return m_start;
2441 return NULL;
2442 }
2443
2444 /* Return true if calls to FNDECL are known to not affect this sm-state. */
2445
2446 bool
2447 malloc_state_machine::unaffected_by_call_p (tree fndecl)
2448 {
2449 /* A set of functions that are known to not affect allocation
2450 status, even if we haven't fully modelled the rest of their
2451 behavior yet. */
2452 static const char * const funcnames[] = {
2453 /* This array must be kept sorted. */
2454 "strsep",
2455 };
2456 const size_t count = ARRAY_SIZE (funcnames);
2457 function_set fs (funcnames, count);
2458
2459 if (fs.contains_decl_p (fndecl))
2460 return true;
2461
2462 return false;
2463 }
2464
2465 /* Shared logic for handling GIMPLE_ASSIGNs and GIMPLE_PHIs that
2466 assign zero to LHS. */
2467
2468 void
2469 malloc_state_machine::on_zero_assignment (sm_context *sm_ctxt,
2470 const gimple *stmt,
2471 tree lhs) const
2472 {
2473 state_t s = sm_ctxt->get_state (stmt, lhs);
2474 enum resource_state rs = get_rs (s);
2475 if (rs == RS_START
2476 || rs == RS_UNCHECKED
2477 || rs == RS_NONNULL
2478 || rs == RS_FREED)
2479 sm_ctxt->set_next_state (stmt, lhs, m_null);
2480 }
2481
2482 /* Special-case hook for handling realloc, for the "success with move to
2483 a new buffer" case, marking OLD_PTR_SVAL as freed and NEW_PTR_SVAL as
2484 non-null.
2485
2486 This is similar to on_deallocator_call and on_allocator_call,
2487 but the checks happen in on_realloc_call, and by splitting the states. */
2488
2489 void
2490 malloc_state_machine::
2491 on_realloc_with_move (region_model *model,
2492 sm_state_map *smap,
2493 const svalue *old_ptr_sval,
2494 const svalue *new_ptr_sval,
2495 const extrinsic_state &ext_state) const
2496 {
2497 smap->set_state (model, old_ptr_sval,
2498 m_free.m_deallocator.m_freed,
2499 NULL, ext_state);
2500
2501 smap->set_state (model, new_ptr_sval,
2502 m_free.m_nonnull,
2503 NULL, ext_state);
2504 }
2505
2506 } // anonymous namespace
2507
2508 /* Internal interface to this file. */
2509
2510 state_machine *
2511 make_malloc_state_machine (logger *logger)
2512 {
2513 return new malloc_state_machine (logger);
2514 }
2515
2516 /* Specialcase hook for handling realloc, for use by
2517 kf_realloc::impl_call_post::success_with_move::update_model. */
2518
2519 void
2520 region_model::on_realloc_with_move (const call_details &cd,
2521 const svalue *old_ptr_sval,
2522 const svalue *new_ptr_sval)
2523 {
2524 region_model_context *ctxt = cd.get_ctxt ();
2525 if (!ctxt)
2526 return;
2527 const extrinsic_state *ext_state = ctxt->get_ext_state ();
2528 if (!ext_state)
2529 return;
2530
2531 sm_state_map *smap;
2532 const state_machine *sm;
2533 unsigned sm_idx;
2534 if (!ctxt->get_malloc_map (&smap, &sm, &sm_idx))
2535 return;
2536
2537 gcc_assert (smap);
2538 gcc_assert (sm);
2539
2540 const malloc_state_machine &malloc_sm
2541 = (const malloc_state_machine &)*sm;
2542
2543 malloc_sm.on_realloc_with_move (this,
2544 smap,
2545 old_ptr_sval,
2546 new_ptr_sval,
2547 *ext_state);
2548 }
2549
2550 } // namespace ana
2551
2552 #endif /* #if ENABLE_ANALYZER */