]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/analyzer/sm-malloc.cc
analyzer: remove add_any_constraints_from_ssa_def_stmt
[thirdparty/gcc.git] / gcc / analyzer / sm-malloc.cc
1 /* A state machine for detecting misuses of the malloc/free API.
2 Copyright (C) 2019-2021 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "function.h"
26 #include "basic-block.h"
27 #include "gimple.h"
28 #include "options.h"
29 #include "bitmap.h"
30 #include "diagnostic-path.h"
31 #include "diagnostic-metadata.h"
32 #include "function.h"
33 #include "json.h"
34 #include "analyzer/analyzer.h"
35 #include "diagnostic-event-id.h"
36 #include "analyzer/analyzer-logging.h"
37 #include "analyzer/sm.h"
38 #include "analyzer/pending-diagnostic.h"
39 #include "tristate.h"
40 #include "selftest.h"
41 #include "analyzer/call-string.h"
42 #include "analyzer/program-point.h"
43 #include "analyzer/store.h"
44 #include "analyzer/region-model.h"
45 #include "stringpool.h"
46 #include "attribs.h"
47 #include "analyzer/function-set.h"
48
49 #if ENABLE_ANALYZER
50
51 namespace ana {
52
53 namespace {
54
55 /* This state machine and its various support classes track allocations
56 and deallocations.
57
58 It has a few standard allocation/deallocation pairs (e.g. new/delete),
59 and also supports user-defined ones via
60 __attribute__ ((malloc(DEALLOCATOR))).
61
62 There can be more than one valid deallocator for a given allocator,
63 for example:
64 __attribute__ ((malloc (fclose)))
65 __attribute__ ((malloc (freopen, 3)))
66 FILE* fopen (const char*, const char*);
67 A deallocator_set represents a particular set of valid deallocators.
68
69 We track the expected deallocator_set for a value, but not the allocation
70 function - there could be more than one allocator per deallocator_set.
71 For example, there could be dozens of allocators for "free" beyond just
72 malloc e.g. calloc, xstrdup, etc. We don't want to explode the number
73 of states by tracking individual allocators in the exploded graph;
74 we merely want to track "this value expects to have 'free' called on it".
75 Perhaps we can reconstruct which allocator was used later, when emitting
76 the path, if it's necessary for precision of wording of diagnostics. */
77
78 class deallocator;
79 class deallocator_set;
80 class malloc_state_machine;
81
82 /* An enum for discriminating between different kinds of allocation_state. */
83
84 enum resource_state
85 {
86 /* States that are independent of allocator/deallocator. */
87
88 /* The start state. */
89 RS_START,
90
91 /* State for a pointer that's known to be NULL. */
92 RS_NULL,
93
94 /* State for a pointer that's known to not be on the heap (e.g. to a local
95 or global). */
96 RS_NON_HEAP,
97
98 /* Stop state, for pointers we don't want to track any more. */
99 RS_STOP,
100
101 /* States that relate to a specific deallocator_set. */
102
103 /* State for a pointer returned from an allocator that hasn't
104 been checked for NULL.
105 It could be a pointer to heap-allocated memory, or could be NULL. */
106 RS_UNCHECKED,
107
108 /* State for a pointer returned from an allocator,
109 known to be non-NULL. */
110 RS_NONNULL,
111
112 /* State for a pointer passed to a deallocator. */
113 RS_FREED
114 };
115
116 /* Custom state subclass, which can optionally refer to an a
117 deallocator_set. */
118
119 struct allocation_state : public state_machine::state
120 {
121 allocation_state (const char *name, unsigned id,
122 enum resource_state rs,
123 const deallocator_set *deallocators,
124 const deallocator *deallocator)
125 : state (name, id), m_rs (rs),
126 m_deallocators (deallocators),
127 m_deallocator (deallocator)
128 {}
129
130 void dump_to_pp (pretty_printer *pp) const FINAL OVERRIDE;
131
132 const allocation_state *get_nonnull () const;
133
134 enum resource_state m_rs;
135 const deallocator_set *m_deallocators;
136 const deallocator *m_deallocator;
137 };
138
139 /* An enum for choosing which wording to use in various diagnostics
140 when describing deallocations. */
141
142 enum wording
143 {
144 WORDING_FREED,
145 WORDING_DELETED,
146 WORDING_DEALLOCATED,
147 WORDING_REALLOCATED
148 };
149
150 /* Base class representing a deallocation function,
151 either a built-in one we know about, or one exposed via
152 __attribute__((malloc(DEALLOCATOR))). */
153
154 struct deallocator
155 {
156 hashval_t hash () const;
157 void dump_to_pp (pretty_printer *pp) const;
158 static int cmp (const deallocator *a, const deallocator *b);
159 static int cmp_ptr_ptr (const void *, const void *);
160
161 /* Name to use in diagnostics. */
162 const char *m_name;
163
164 /* Which wording to use in diagnostics. */
165 enum wording m_wording;
166
167 /* State for a value passed to one of the deallocators. */
168 state_machine::state_t m_freed;
169
170 protected:
171 deallocator (malloc_state_machine *sm,
172 const char *name,
173 enum wording wording);
174 };
175
176 /* Subclass representing a predefined deallocator.
177 e.g. "delete []", without needing a specific FUNCTION_DECL
178 ahead of time. */
179
180 struct standard_deallocator : public deallocator
181 {
182 standard_deallocator (malloc_state_machine *sm,
183 const char *name,
184 enum wording wording);
185 };
186
187 /* Subclass representing a user-defined deallocator
188 via __attribute__((malloc(DEALLOCATOR))) given
189 a specific FUNCTION_DECL. */
190
191 struct custom_deallocator : public deallocator
192 {
193 custom_deallocator (malloc_state_machine *sm,
194 tree deallocator_fndecl,
195 enum wording wording)
196 : deallocator (sm, IDENTIFIER_POINTER (DECL_NAME (deallocator_fndecl)),
197 wording)
198 {
199 }
200 };
201
202 /* Base class representing a set of possible deallocators.
203 Often this will be just a single deallocator, but some
204 allocators have multiple valid deallocators (e.g. the result of
205 "fopen" can be closed by either "fclose" or "freopen"). */
206
207 struct deallocator_set
208 {
209 deallocator_set (malloc_state_machine *sm,
210 enum wording wording);
211 virtual ~deallocator_set () {}
212
213 virtual bool contains_p (const deallocator *d) const = 0;
214 virtual const deallocator *maybe_get_single () const = 0;
215 virtual void dump_to_pp (pretty_printer *pp) const = 0;
216 void dump () const;
217
218 /* Which wording to use in diagnostics. */
219 enum wording m_wording;
220
221 /* Pointers to states.
222 These states are owned by the state_machine base class. */
223
224 /* State for an unchecked result from an allocator using this set. */
225 state_machine::state_t m_unchecked;
226
227 /* State for a known non-NULL result from such an allocator. */
228 state_machine::state_t m_nonnull;
229 };
230
231 /* Subclass of deallocator_set representing a set of deallocators
232 defined by one or more __attribute__((malloc(DEALLOCATOR))). */
233
234 struct custom_deallocator_set : public deallocator_set
235 {
236 typedef const auto_vec <const deallocator *> *key_t;
237
238 custom_deallocator_set (malloc_state_machine *sm,
239 const auto_vec <const deallocator *> *vec,
240 //const char *name,
241 //const char *dealloc_funcname,
242 //unsigned arg_idx,
243 enum wording wording);
244
245 bool contains_p (const deallocator *d) const FINAL OVERRIDE;
246 const deallocator *maybe_get_single () const FINAL OVERRIDE;
247 void dump_to_pp (pretty_printer *pp) const FINAL OVERRIDE;
248
249 auto_vec <const deallocator *> m_deallocator_vec;
250 };
251
252 /* Subclass of deallocator_set representing a set of deallocators
253 with a single standard_deallocator, e.g. "delete []". */
254
255 struct standard_deallocator_set : public deallocator_set
256 {
257 standard_deallocator_set (malloc_state_machine *sm,
258 const char *name,
259 enum wording wording);
260
261 bool contains_p (const deallocator *d) const FINAL OVERRIDE;
262 const deallocator *maybe_get_single () const FINAL OVERRIDE;
263 void dump_to_pp (pretty_printer *pp) const FINAL OVERRIDE;
264
265 standard_deallocator m_deallocator;
266 };
267
268 /* Traits class for ensuring uniqueness of deallocator_sets within
269 malloc_state_machine. */
270
271 struct deallocator_set_map_traits
272 {
273 typedef custom_deallocator_set::key_t key_type;
274 typedef custom_deallocator_set *value_type;
275 typedef custom_deallocator_set *compare_type;
276
277 static inline hashval_t hash (const key_type &k)
278 {
279 gcc_assert (k != NULL);
280 gcc_assert (k != reinterpret_cast<key_type> (1));
281
282 hashval_t result = 0;
283 unsigned i;
284 const deallocator *d;
285 FOR_EACH_VEC_ELT (*k, i, d)
286 result ^= d->hash ();
287 return result;
288 }
289 static inline bool equal_keys (const key_type &k1, const key_type &k2)
290 {
291 if (k1->length () != k2->length ())
292 return false;
293
294 for (unsigned i = 0; i < k1->length (); i++)
295 if ((*k1)[i] != (*k2)[i])
296 return false;
297
298 return true;
299 }
300 template <typename T>
301 static inline void remove (T &)
302 {
303 /* empty; the nodes are handled elsewhere. */
304 }
305 template <typename T>
306 static inline void mark_deleted (T &entry)
307 {
308 entry.m_key = reinterpret_cast<key_type> (1);
309 }
310 template <typename T>
311 static inline void mark_empty (T &entry)
312 {
313 entry.m_key = NULL;
314 }
315 template <typename T>
316 static inline bool is_deleted (const T &entry)
317 {
318 return entry.m_key == reinterpret_cast<key_type> (1);
319 }
320 template <typename T>
321 static inline bool is_empty (const T &entry)
322 {
323 return entry.m_key == NULL;
324 }
325 static const bool empty_zero_p = false;
326 };
327
328 /* A state machine for detecting misuses of the malloc/free API.
329
330 See sm-malloc.dot for an overview (keep this in-sync with that file). */
331
332 class malloc_state_machine : public state_machine
333 {
334 public:
335 typedef allocation_state custom_data_t;
336
337 malloc_state_machine (logger *logger);
338 ~malloc_state_machine ();
339
340 state_t
341 add_state (const char *name, enum resource_state rs,
342 const deallocator_set *deallocators,
343 const deallocator *deallocator);
344
345 bool inherited_state_p () const FINAL OVERRIDE { return false; }
346
347 state_machine::state_t
348 get_default_state (const svalue *sval) const FINAL OVERRIDE
349 {
350 if (tree cst = sval->maybe_get_constant ())
351 {
352 if (zerop (cst))
353 return m_null;
354 }
355 if (const region_svalue *ptr = sval->dyn_cast_region_svalue ())
356 {
357 const region *reg = ptr->get_pointee ();
358 const region *base_reg = reg->get_base_region ();
359 if (base_reg->get_kind () == RK_DECL
360 || base_reg->get_kind () == RK_STRING)
361 return m_non_heap;
362 }
363 return m_start;
364 }
365
366 bool on_stmt (sm_context *sm_ctxt,
367 const supernode *node,
368 const gimple *stmt) const FINAL OVERRIDE;
369
370 void on_phi (sm_context *sm_ctxt,
371 const supernode *node,
372 const gphi *phi,
373 tree rhs) const FINAL OVERRIDE;
374
375 void on_condition (sm_context *sm_ctxt,
376 const supernode *node,
377 const gimple *stmt,
378 const svalue *lhs,
379 enum tree_code op,
380 const svalue *rhs) const FINAL OVERRIDE;
381
382 bool can_purge_p (state_t s) const FINAL OVERRIDE;
383 pending_diagnostic *on_leak (tree var) const FINAL OVERRIDE;
384
385 bool reset_when_passed_to_unknown_fn_p (state_t s,
386 bool is_mutable) const FINAL OVERRIDE;
387
388 static bool unaffected_by_call_p (tree fndecl);
389
390 standard_deallocator_set m_free;
391 standard_deallocator_set m_scalar_delete;
392 standard_deallocator_set m_vector_delete;
393
394 standard_deallocator m_realloc;
395
396 /* States that are independent of api. */
397
398 /* State for a pointer that's known to be NULL. */
399 state_t m_null;
400
401 /* State for a pointer that's known to not be on the heap (e.g. to a local
402 or global). */
403 state_t m_non_heap; // TODO: or should this be a different state machine?
404 // or do we need child values etc?
405
406 /* Stop state, for pointers we don't want to track any more. */
407 state_t m_stop;
408
409 private:
410 const custom_deallocator_set *
411 get_or_create_custom_deallocator_set (tree allocator_fndecl);
412 custom_deallocator_set *
413 maybe_create_custom_deallocator_set (tree allocator_fndecl);
414 const deallocator *
415 get_or_create_deallocator (tree deallocator_fndecl);
416
417 void on_allocator_call (sm_context *sm_ctxt,
418 const gcall *call,
419 const deallocator_set *deallocators,
420 bool returns_nonnull = false) const;
421 void on_deallocator_call (sm_context *sm_ctxt,
422 const supernode *node,
423 const gcall *call,
424 const deallocator *d,
425 unsigned argno) const;
426 void on_realloc_call (sm_context *sm_ctxt,
427 const supernode *node,
428 const gcall *call) const;
429 void on_zero_assignment (sm_context *sm_ctxt,
430 const gimple *stmt,
431 tree lhs) const;
432
433 /* A map for consolidating deallocators so that they are
434 unique per deallocator FUNCTION_DECL. */
435 typedef hash_map<tree, deallocator *> deallocator_map_t;
436 deallocator_map_t m_deallocator_map;
437
438 /* Memoized lookups from FUNCTION_DECL to custom_deallocator_set *. */
439 typedef hash_map<tree, custom_deallocator_set *> deallocator_set_cache_t;
440 deallocator_set_cache_t m_custom_deallocator_set_cache;
441
442 /* A map for consolidating custom_deallocator_set instances. */
443 typedef hash_map<custom_deallocator_set::key_t,
444 custom_deallocator_set *,
445 deallocator_set_map_traits> custom_deallocator_set_map_t;
446 custom_deallocator_set_map_t m_custom_deallocator_set_map;
447
448 /* Record of dynamically-allocated objects, for cleanup. */
449 auto_vec <custom_deallocator_set *> m_dynamic_sets;
450 auto_vec <custom_deallocator *> m_dynamic_deallocators;
451 };
452
453 /* struct deallocator. */
454
455 deallocator::deallocator (malloc_state_machine *sm,
456 const char *name,
457 enum wording wording)
458 : m_name (name),
459 m_wording (wording),
460 m_freed (sm->add_state ("freed", RS_FREED, NULL, this))
461 {
462 }
463
464 hashval_t
465 deallocator::hash () const
466 {
467 return (hashval_t)m_freed->get_id ();
468 }
469
470 void
471 deallocator::dump_to_pp (pretty_printer *pp) const
472 {
473 pp_printf (pp, "%qs", m_name);
474 }
475
476 int
477 deallocator::cmp (const deallocator *a, const deallocator *b)
478 {
479 return (int)a->m_freed->get_id () - (int)b->m_freed->get_id ();
480 }
481
482 int
483 deallocator::cmp_ptr_ptr (const void *a, const void *b)
484 {
485 return cmp (*(const deallocator * const *)a,
486 *(const deallocator * const *)b);
487 }
488
489
490 /* struct standard_deallocator : public deallocator. */
491
492 standard_deallocator::standard_deallocator (malloc_state_machine *sm,
493 const char *name,
494 enum wording wording)
495 : deallocator (sm, name, wording)
496 {
497 }
498
499 /* struct deallocator_set. */
500
501 deallocator_set::deallocator_set (malloc_state_machine *sm,
502 enum wording wording)
503 : m_wording (wording),
504 m_unchecked (sm->add_state ("unchecked", RS_UNCHECKED, this, NULL)),
505 m_nonnull (sm->add_state ("nonnull", RS_NONNULL, this, NULL))
506 {
507 }
508
509 /* Dump a description of this deallocator_set to stderr. */
510
511 DEBUG_FUNCTION void
512 deallocator_set::dump () const
513 {
514 pretty_printer pp;
515 pp_show_color (&pp) = pp_show_color (global_dc->printer);
516 pp.buffer->stream = stderr;
517 dump_to_pp (&pp);
518 pp_newline (&pp);
519 pp_flush (&pp);
520 }
521
522 /* struct custom_deallocator_set : public deallocator_set. */
523
524 custom_deallocator_set::
525 custom_deallocator_set (malloc_state_machine *sm,
526 const auto_vec <const deallocator *> *vec,
527 enum wording wording)
528 : deallocator_set (sm, wording),
529 m_deallocator_vec (vec->length ())
530 {
531 unsigned i;
532 const deallocator *d;
533 FOR_EACH_VEC_ELT (*vec, i, d)
534 m_deallocator_vec.safe_push (d);
535 }
536
537 bool
538 custom_deallocator_set::contains_p (const deallocator *d) const
539 {
540 unsigned i;
541 const deallocator *cd;
542 FOR_EACH_VEC_ELT (m_deallocator_vec, i, cd)
543 if (cd == d)
544 return true;
545 return false;
546 }
547
548 const deallocator *
549 custom_deallocator_set::maybe_get_single () const
550 {
551 if (m_deallocator_vec.length () == 1)
552 return m_deallocator_vec[0];
553 return NULL;
554 }
555
556 void
557 custom_deallocator_set::dump_to_pp (pretty_printer *pp) const
558 {
559 pp_character (pp, '{');
560 unsigned i;
561 const deallocator *d;
562 FOR_EACH_VEC_ELT (m_deallocator_vec, i, d)
563 {
564 if (i > 0)
565 pp_string (pp, ", ");
566 d->dump_to_pp (pp);
567 }
568 pp_character (pp, '}');
569 }
570
571 /* struct standard_deallocator_set : public deallocator_set. */
572
573 standard_deallocator_set::standard_deallocator_set (malloc_state_machine *sm,
574 const char *name,
575 enum wording wording)
576 : deallocator_set (sm, wording),
577 m_deallocator (sm, name, wording)
578 {
579 }
580
581 bool
582 standard_deallocator_set::contains_p (const deallocator *d) const
583 {
584 return d == &m_deallocator;
585 }
586
587 const deallocator *
588 standard_deallocator_set::maybe_get_single () const
589 {
590 return &m_deallocator;
591 }
592
593 void
594 standard_deallocator_set::dump_to_pp (pretty_printer *pp) const
595 {
596 pp_character (pp, '{');
597 pp_string (pp, m_deallocator.m_name);
598 pp_character (pp, '}');
599 }
600
601 /* Return STATE cast to the custom state subclass, or NULL for the start state.
602 Everything should be an allocation_state apart from the start state. */
603
604 static const allocation_state *
605 dyn_cast_allocation_state (state_machine::state_t state)
606 {
607 if (state->get_id () == 0)
608 return NULL;
609 return static_cast <const allocation_state *> (state);
610 }
611
612 /* Return STATE cast to the custom state subclass, for a state that is
613 already known to not be the start state . */
614
615 static const allocation_state *
616 as_a_allocation_state (state_machine::state_t state)
617 {
618 gcc_assert (state->get_id () != 0);
619 return static_cast <const allocation_state *> (state);
620 }
621
622 /* Get the resource_state for STATE. */
623
624 static enum resource_state
625 get_rs (state_machine::state_t state)
626 {
627 if (const allocation_state *astate = dyn_cast_allocation_state (state))
628 return astate->m_rs;
629 else
630 return RS_START;
631 }
632
633 /* Return true if STATE is the start state. */
634
635 static bool
636 start_p (state_machine::state_t state)
637 {
638 return get_rs (state) == RS_START;
639 }
640
641 /* Return true if STATE is an unchecked result from an allocator. */
642
643 static bool
644 unchecked_p (state_machine::state_t state)
645 {
646 return get_rs (state) == RS_UNCHECKED;
647 }
648
649 /* Return true if STATE is a non-null result from an allocator. */
650
651 static bool
652 nonnull_p (state_machine::state_t state)
653 {
654 return get_rs (state) == RS_NONNULL;
655 }
656
657 /* Return true if STATE is a value that has been passed to a deallocator. */
658
659 static bool
660 freed_p (state_machine::state_t state)
661 {
662 return get_rs (state) == RS_FREED;
663 }
664
665 /* Class for diagnostics relating to malloc_state_machine. */
666
667 class malloc_diagnostic : public pending_diagnostic
668 {
669 public:
670 malloc_diagnostic (const malloc_state_machine &sm, tree arg)
671 : m_sm (sm), m_arg (arg)
672 {}
673
674 bool subclass_equal_p (const pending_diagnostic &base_other) const OVERRIDE
675 {
676 return same_tree_p (m_arg, ((const malloc_diagnostic &)base_other).m_arg);
677 }
678
679 label_text describe_state_change (const evdesc::state_change &change)
680 OVERRIDE
681 {
682 if (change.m_old_state == m_sm.get_start_state ()
683 && unchecked_p (change.m_new_state))
684 // TODO: verify that it's the allocation stmt, not a copy
685 return label_text::borrow ("allocated here");
686 if (unchecked_p (change.m_old_state)
687 && nonnull_p (change.m_new_state))
688 {
689 if (change.m_expr)
690 return change.formatted_print ("assuming %qE is non-NULL",
691 change.m_expr);
692 else
693 return change.formatted_print ("assuming %qs is non-NULL",
694 "<unknown>");
695 }
696 if (change.m_new_state == m_sm.m_null)
697 {
698 if (unchecked_p (change.m_old_state))
699 {
700 if (change.m_expr)
701 return change.formatted_print ("assuming %qE is NULL",
702 change.m_expr);
703 else
704 return change.formatted_print ("assuming %qs is NULL",
705 "<unknown>");
706 }
707 else
708 {
709 if (change.m_expr)
710 return change.formatted_print ("%qE is NULL",
711 change.m_expr);
712 else
713 return change.formatted_print ("%qs is NULL",
714 "<unknown>");
715 }
716 }
717
718 return label_text ();
719 }
720
721 protected:
722 const malloc_state_machine &m_sm;
723 tree m_arg;
724 };
725
726 /* Concrete subclass for reporting mismatching allocator/deallocator
727 diagnostics. */
728
729 class mismatching_deallocation : public malloc_diagnostic
730 {
731 public:
732 mismatching_deallocation (const malloc_state_machine &sm, tree arg,
733 const deallocator_set *expected_deallocators,
734 const deallocator *actual_dealloc)
735 : malloc_diagnostic (sm, arg),
736 m_expected_deallocators (expected_deallocators),
737 m_actual_dealloc (actual_dealloc)
738 {}
739
740 const char *get_kind () const FINAL OVERRIDE
741 {
742 return "mismatching_deallocation";
743 }
744
745 bool emit (rich_location *rich_loc) FINAL OVERRIDE
746 {
747 auto_diagnostic_group d;
748 diagnostic_metadata m;
749 m.add_cwe (762); /* CWE-762: Mismatched Memory Management Routines. */
750 if (const deallocator *expected_dealloc
751 = m_expected_deallocators->maybe_get_single ())
752 return warning_meta (rich_loc, m, OPT_Wanalyzer_mismatching_deallocation,
753 "%qE should have been deallocated with %qs"
754 " but was deallocated with %qs",
755 m_arg, expected_dealloc->m_name,
756 m_actual_dealloc->m_name);
757 else
758 return warning_meta (rich_loc, m, OPT_Wanalyzer_mismatching_deallocation,
759 "%qs called on %qE returned from a mismatched"
760 " allocation function",
761 m_actual_dealloc->m_name, m_arg);
762 }
763
764 label_text describe_state_change (const evdesc::state_change &change)
765 FINAL OVERRIDE
766 {
767 if (unchecked_p (change.m_new_state))
768 {
769 m_alloc_event = change.m_event_id;
770 if (const deallocator *expected_dealloc
771 = m_expected_deallocators->maybe_get_single ())
772 return change.formatted_print ("allocated here"
773 " (expects deallocation with %qs)",
774 expected_dealloc->m_name);
775 else
776 return change.formatted_print ("allocated here");
777 }
778 return malloc_diagnostic::describe_state_change (change);
779 }
780
781 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
782 {
783 if (m_alloc_event.known_p ())
784 {
785 if (const deallocator *expected_dealloc
786 = m_expected_deallocators->maybe_get_single ())
787 return ev.formatted_print
788 ("deallocated with %qs here;"
789 " allocation at %@ expects deallocation with %qs",
790 m_actual_dealloc->m_name, &m_alloc_event,
791 expected_dealloc->m_name);
792 else
793 return ev.formatted_print
794 ("deallocated with %qs here;"
795 " allocated at %@",
796 m_actual_dealloc->m_name, &m_alloc_event);
797 }
798 return ev.formatted_print ("deallocated with %qs here",
799 m_actual_dealloc->m_name);
800 }
801
802 private:
803 diagnostic_event_id_t m_alloc_event;
804 const deallocator_set *m_expected_deallocators;
805 const deallocator *m_actual_dealloc;
806 };
807
808 /* Concrete subclass for reporting double-free diagnostics. */
809
810 class double_free : public malloc_diagnostic
811 {
812 public:
813 double_free (const malloc_state_machine &sm, tree arg, const char *funcname)
814 : malloc_diagnostic (sm, arg), m_funcname (funcname)
815 {}
816
817 const char *get_kind () const FINAL OVERRIDE { return "double_free"; }
818
819 bool emit (rich_location *rich_loc) FINAL OVERRIDE
820 {
821 auto_diagnostic_group d;
822 diagnostic_metadata m;
823 m.add_cwe (415); /* CWE-415: Double Free. */
824 return warning_meta (rich_loc, m, OPT_Wanalyzer_double_free,
825 "double-%qs of %qE", m_funcname, m_arg);
826 }
827
828 label_text describe_state_change (const evdesc::state_change &change)
829 FINAL OVERRIDE
830 {
831 if (freed_p (change.m_new_state))
832 {
833 m_first_free_event = change.m_event_id;
834 return change.formatted_print ("first %qs here", m_funcname);
835 }
836 return malloc_diagnostic::describe_state_change (change);
837 }
838
839 label_text describe_call_with_state (const evdesc::call_with_state &info)
840 FINAL OVERRIDE
841 {
842 if (freed_p (info.m_state))
843 return info.formatted_print
844 ("passing freed pointer %qE in call to %qE from %qE",
845 info.m_expr, info.m_callee_fndecl, info.m_caller_fndecl);
846 return label_text ();
847 }
848
849 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
850 {
851 if (m_first_free_event.known_p ())
852 return ev.formatted_print ("second %qs here; first %qs was at %@",
853 m_funcname, m_funcname,
854 &m_first_free_event);
855 return ev.formatted_print ("second %qs here", m_funcname);
856 }
857
858 private:
859 diagnostic_event_id_t m_first_free_event;
860 const char *m_funcname;
861 };
862
863 /* Abstract subclass for describing possible bad uses of NULL.
864 Responsible for describing the call that could return NULL. */
865
866 class possible_null : public malloc_diagnostic
867 {
868 public:
869 possible_null (const malloc_state_machine &sm, tree arg)
870 : malloc_diagnostic (sm, arg)
871 {}
872
873 label_text describe_state_change (const evdesc::state_change &change)
874 FINAL OVERRIDE
875 {
876 if (change.m_old_state == m_sm.get_start_state ()
877 && unchecked_p (change.m_new_state))
878 {
879 m_origin_of_unchecked_event = change.m_event_id;
880 return label_text::borrow ("this call could return NULL");
881 }
882 return malloc_diagnostic::describe_state_change (change);
883 }
884
885 label_text describe_return_of_state (const evdesc::return_of_state &info)
886 FINAL OVERRIDE
887 {
888 if (unchecked_p (info.m_state))
889 return info.formatted_print ("possible return of NULL to %qE from %qE",
890 info.m_caller_fndecl, info.m_callee_fndecl);
891 return label_text ();
892 }
893
894 protected:
895 diagnostic_event_id_t m_origin_of_unchecked_event;
896 };
897
898 /* Concrete subclass for describing dereference of a possible NULL
899 value. */
900
901 class possible_null_deref : public possible_null
902 {
903 public:
904 possible_null_deref (const malloc_state_machine &sm, tree arg)
905 : possible_null (sm, arg)
906 {}
907
908 const char *get_kind () const FINAL OVERRIDE { return "possible_null_deref"; }
909
910 bool emit (rich_location *rich_loc) FINAL OVERRIDE
911 {
912 /* CWE-690: Unchecked Return Value to NULL Pointer Dereference. */
913 diagnostic_metadata m;
914 m.add_cwe (690);
915 return warning_meta (rich_loc, m,
916 OPT_Wanalyzer_possible_null_dereference,
917 "dereference of possibly-NULL %qE", m_arg);
918 }
919
920 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
921 {
922 if (m_origin_of_unchecked_event.known_p ())
923 return ev.formatted_print ("%qE could be NULL: unchecked value from %@",
924 ev.m_expr,
925 &m_origin_of_unchecked_event);
926 else
927 return ev.formatted_print ("%qE could be NULL", ev.m_expr);
928 }
929
930 };
931
932 /* Return true if FNDECL is a C++ method. */
933
934 static bool
935 method_p (tree fndecl)
936 {
937 return TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE;
938 }
939
940 /* Return a 1-based description of ARG_IDX (0-based) of FNDECL.
941 Compare with %P in the C++ FE (implemented in cp/error.c: parm_to_string
942 as called from cp_printer). */
943
944 static label_text
945 describe_argument_index (tree fndecl, int arg_idx)
946 {
947 if (method_p (fndecl))
948 if (arg_idx == 0)
949 return label_text::borrow ("'this'");
950 pretty_printer pp;
951 pp_printf (&pp, "%u", arg_idx + 1 - method_p (fndecl));
952 return label_text::take (xstrdup (pp_formatted_text (&pp)));
953 }
954
955 /* Subroutine for use by possible_null_arg::emit and null_arg::emit.
956 Issue a note informing that the pertinent argument must be non-NULL. */
957
958 static void
959 inform_nonnull_attribute (tree fndecl, int arg_idx)
960 {
961 label_text arg_desc = describe_argument_index (fndecl, arg_idx);
962 inform (DECL_SOURCE_LOCATION (fndecl),
963 "argument %s of %qD must be non-null",
964 arg_desc.m_buffer, fndecl);
965 arg_desc.maybe_free ();
966 /* Ideally we would use the location of the parm and underline the
967 attribute also - but we don't have the location_t values at this point
968 in the middle-end.
969 For reference, the C and C++ FEs have get_fndecl_argument_location. */
970 }
971
972 /* Concrete subclass for describing passing a possibly-NULL value to a
973 function marked with __attribute__((nonnull)). */
974
975 class possible_null_arg : public possible_null
976 {
977 public:
978 possible_null_arg (const malloc_state_machine &sm, tree arg,
979 tree fndecl, int arg_idx)
980 : possible_null (sm, arg),
981 m_fndecl (fndecl), m_arg_idx (arg_idx)
982 {}
983
984 const char *get_kind () const FINAL OVERRIDE { return "possible_null_arg"; }
985
986 bool subclass_equal_p (const pending_diagnostic &base_other) const
987 {
988 const possible_null_arg &sub_other
989 = (const possible_null_arg &)base_other;
990 return (same_tree_p (m_arg, sub_other.m_arg)
991 && m_fndecl == sub_other.m_fndecl
992 && m_arg_idx == sub_other.m_arg_idx);
993 }
994
995
996 bool emit (rich_location *rich_loc) FINAL OVERRIDE
997 {
998 /* CWE-690: Unchecked Return Value to NULL Pointer Dereference. */
999 auto_diagnostic_group d;
1000 diagnostic_metadata m;
1001 m.add_cwe (690);
1002 bool warned
1003 = warning_meta (rich_loc, m, OPT_Wanalyzer_possible_null_argument,
1004 "use of possibly-NULL %qE where non-null expected",
1005 m_arg);
1006 if (warned)
1007 inform_nonnull_attribute (m_fndecl, m_arg_idx);
1008 return warned;
1009 }
1010
1011 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
1012 {
1013 label_text arg_desc = describe_argument_index (m_fndecl, m_arg_idx);
1014 label_text result;
1015 if (m_origin_of_unchecked_event.known_p ())
1016 result = ev.formatted_print ("argument %s (%qE) from %@ could be NULL"
1017 " where non-null expected",
1018 arg_desc.m_buffer, ev.m_expr,
1019 &m_origin_of_unchecked_event);
1020 else
1021 result = ev.formatted_print ("argument %s (%qE) could be NULL"
1022 " where non-null expected",
1023 arg_desc.m_buffer, ev.m_expr);
1024 arg_desc.maybe_free ();
1025 return result;
1026 }
1027
1028 private:
1029 tree m_fndecl;
1030 int m_arg_idx;
1031 };
1032
1033 /* Concrete subclass for describing a dereference of a NULL value. */
1034
1035 class null_deref : public malloc_diagnostic
1036 {
1037 public:
1038 null_deref (const malloc_state_machine &sm, tree arg)
1039 : malloc_diagnostic (sm, arg) {}
1040
1041 const char *get_kind () const FINAL OVERRIDE { return "null_deref"; }
1042
1043 bool emit (rich_location *rich_loc) FINAL OVERRIDE
1044 {
1045 /* CWE-476: NULL Pointer Dereference. */
1046 diagnostic_metadata m;
1047 m.add_cwe (476);
1048 return warning_meta (rich_loc, m,
1049 OPT_Wanalyzer_null_dereference,
1050 "dereference of NULL %qE", m_arg);
1051 }
1052
1053 label_text describe_return_of_state (const evdesc::return_of_state &info)
1054 FINAL OVERRIDE
1055 {
1056 if (info.m_state == m_sm.m_null)
1057 return info.formatted_print ("return of NULL to %qE from %qE",
1058 info.m_caller_fndecl, info.m_callee_fndecl);
1059 return label_text ();
1060 }
1061
1062 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
1063 {
1064 return ev.formatted_print ("dereference of NULL %qE", ev.m_expr);
1065 }
1066 };
1067
1068 /* Concrete subclass for describing passing a NULL value to a
1069 function marked with __attribute__((nonnull)). */
1070
1071 class null_arg : public malloc_diagnostic
1072 {
1073 public:
1074 null_arg (const malloc_state_machine &sm, tree arg,
1075 tree fndecl, int arg_idx)
1076 : malloc_diagnostic (sm, arg),
1077 m_fndecl (fndecl), m_arg_idx (arg_idx)
1078 {}
1079
1080 const char *get_kind () const FINAL OVERRIDE { return "null_arg"; }
1081
1082 bool subclass_equal_p (const pending_diagnostic &base_other) const
1083 {
1084 const null_arg &sub_other
1085 = (const null_arg &)base_other;
1086 return (same_tree_p (m_arg, sub_other.m_arg)
1087 && m_fndecl == sub_other.m_fndecl
1088 && m_arg_idx == sub_other.m_arg_idx);
1089 }
1090
1091 bool emit (rich_location *rich_loc) FINAL OVERRIDE
1092 {
1093 /* CWE-476: NULL Pointer Dereference. */
1094 auto_diagnostic_group d;
1095 diagnostic_metadata m;
1096 m.add_cwe (476);
1097
1098 bool warned;
1099 if (zerop (m_arg))
1100 warned = warning_meta (rich_loc, m, OPT_Wanalyzer_null_argument,
1101 "use of NULL where non-null expected");
1102 else
1103 warned = warning_meta (rich_loc, m, OPT_Wanalyzer_null_argument,
1104 "use of NULL %qE where non-null expected",
1105 m_arg);
1106 if (warned)
1107 inform_nonnull_attribute (m_fndecl, m_arg_idx);
1108 return warned;
1109 }
1110
1111 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
1112 {
1113 label_text arg_desc = describe_argument_index (m_fndecl, m_arg_idx);
1114 label_text result;
1115 if (zerop (ev.m_expr))
1116 result = ev.formatted_print ("argument %s NULL where non-null expected",
1117 arg_desc.m_buffer);
1118 else
1119 result = ev.formatted_print ("argument %s (%qE) NULL"
1120 " where non-null expected",
1121 arg_desc.m_buffer, ev.m_expr);
1122 arg_desc.maybe_free ();
1123 return result;
1124 }
1125
1126 private:
1127 tree m_fndecl;
1128 int m_arg_idx;
1129 };
1130
1131 class use_after_free : public malloc_diagnostic
1132 {
1133 public:
1134 use_after_free (const malloc_state_machine &sm, tree arg,
1135 const deallocator *deallocator)
1136 : malloc_diagnostic (sm, arg),
1137 m_deallocator (deallocator)
1138 {
1139 gcc_assert (deallocator);
1140 }
1141
1142 const char *get_kind () const FINAL OVERRIDE { return "use_after_free"; }
1143
1144 bool emit (rich_location *rich_loc) FINAL OVERRIDE
1145 {
1146 /* CWE-416: Use After Free. */
1147 diagnostic_metadata m;
1148 m.add_cwe (416);
1149 return warning_meta (rich_loc, m, OPT_Wanalyzer_use_after_free,
1150 "use after %<%s%> of %qE",
1151 m_deallocator->m_name, m_arg);
1152 }
1153
1154 label_text describe_state_change (const evdesc::state_change &change)
1155 FINAL OVERRIDE
1156 {
1157 if (freed_p (change.m_new_state))
1158 {
1159 m_free_event = change.m_event_id;
1160 switch (m_deallocator->m_wording)
1161 {
1162 default:
1163 case WORDING_REALLOCATED:
1164 gcc_unreachable ();
1165 case WORDING_FREED:
1166 return label_text::borrow ("freed here");
1167 case WORDING_DELETED:
1168 return label_text::borrow ("deleted here");
1169 case WORDING_DEALLOCATED:
1170 return label_text::borrow ("deallocated here");
1171 }
1172 }
1173 return malloc_diagnostic::describe_state_change (change);
1174 }
1175
1176 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
1177 {
1178 const char *funcname = m_deallocator->m_name;
1179 if (m_free_event.known_p ())
1180 switch (m_deallocator->m_wording)
1181 {
1182 default:
1183 case WORDING_REALLOCATED:
1184 gcc_unreachable ();
1185 case WORDING_FREED:
1186 return ev.formatted_print ("use after %<%s%> of %qE; freed at %@",
1187 funcname, ev.m_expr, &m_free_event);
1188 case WORDING_DELETED:
1189 return ev.formatted_print ("use after %<%s%> of %qE; deleted at %@",
1190 funcname, ev.m_expr, &m_free_event);
1191 case WORDING_DEALLOCATED:
1192 return ev.formatted_print ("use after %<%s%> of %qE;"
1193 " deallocated at %@",
1194 funcname, ev.m_expr, &m_free_event);
1195 }
1196 else
1197 return ev.formatted_print ("use after %<%s%> of %qE",
1198 funcname, ev.m_expr);
1199 }
1200
1201 private:
1202 diagnostic_event_id_t m_free_event;
1203 const deallocator *m_deallocator;
1204 };
1205
1206 class malloc_leak : public malloc_diagnostic
1207 {
1208 public:
1209 malloc_leak (const malloc_state_machine &sm, tree arg)
1210 : malloc_diagnostic (sm, arg) {}
1211
1212 const char *get_kind () const FINAL OVERRIDE { return "malloc_leak"; }
1213
1214 bool emit (rich_location *rich_loc) FINAL OVERRIDE
1215 {
1216 diagnostic_metadata m;
1217 m.add_cwe (401);
1218 if (m_arg)
1219 return warning_meta (rich_loc, m, OPT_Wanalyzer_malloc_leak,
1220 "leak of %qE", m_arg);
1221 else
1222 return warning_meta (rich_loc, m, OPT_Wanalyzer_malloc_leak,
1223 "leak of %qs", "<unknown>");
1224 }
1225
1226 label_text describe_state_change (const evdesc::state_change &change)
1227 FINAL OVERRIDE
1228 {
1229 if (unchecked_p (change.m_new_state)
1230 || (start_p (change.m_old_state) && nonnull_p (change.m_new_state)))
1231 {
1232 m_alloc_event = change.m_event_id;
1233 return label_text::borrow ("allocated here");
1234 }
1235 return malloc_diagnostic::describe_state_change (change);
1236 }
1237
1238 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
1239 {
1240 if (ev.m_expr)
1241 {
1242 if (m_alloc_event.known_p ())
1243 return ev.formatted_print ("%qE leaks here; was allocated at %@",
1244 ev.m_expr, &m_alloc_event);
1245 else
1246 return ev.formatted_print ("%qE leaks here", ev.m_expr);
1247 }
1248 else
1249 {
1250 if (m_alloc_event.known_p ())
1251 return ev.formatted_print ("%qs leaks here; was allocated at %@",
1252 "<unknown>", &m_alloc_event);
1253 else
1254 return ev.formatted_print ("%qs leaks here", "<unknown>");
1255 }
1256 }
1257
1258 private:
1259 diagnostic_event_id_t m_alloc_event;
1260 };
1261
1262 class free_of_non_heap : public malloc_diagnostic
1263 {
1264 public:
1265 free_of_non_heap (const malloc_state_machine &sm, tree arg,
1266 const char *funcname)
1267 : malloc_diagnostic (sm, arg), m_funcname (funcname), m_kind (KIND_UNKNOWN)
1268 {
1269 }
1270
1271 const char *get_kind () const FINAL OVERRIDE { return "free_of_non_heap"; }
1272
1273 bool subclass_equal_p (const pending_diagnostic &base_other) const
1274 FINAL OVERRIDE
1275 {
1276 const free_of_non_heap &other = (const free_of_non_heap &)base_other;
1277 return (same_tree_p (m_arg, other.m_arg) && m_kind == other.m_kind);
1278 }
1279
1280 bool emit (rich_location *rich_loc) FINAL OVERRIDE
1281 {
1282 auto_diagnostic_group d;
1283 diagnostic_metadata m;
1284 m.add_cwe (590); /* CWE-590: Free of Memory not on the Heap. */
1285 switch (m_kind)
1286 {
1287 default:
1288 gcc_unreachable ();
1289 case KIND_UNKNOWN:
1290 return warning_meta (rich_loc, m, OPT_Wanalyzer_free_of_non_heap,
1291 "%<%s%> of %qE which points to memory"
1292 " not on the heap",
1293 m_funcname, m_arg);
1294 break;
1295 case KIND_ALLOCA:
1296 return warning_meta (rich_loc, m, OPT_Wanalyzer_free_of_non_heap,
1297 "%<%s%> of memory allocated on the stack by"
1298 " %qs (%qE) will corrupt the heap",
1299 m_funcname, "alloca", m_arg);
1300 break;
1301 }
1302 }
1303
1304 label_text describe_state_change (const evdesc::state_change &change)
1305 FINAL OVERRIDE
1306 {
1307 /* Attempt to reconstruct what kind of pointer it is.
1308 (It seems neater for this to be a part of the state, though). */
1309 if (change.m_expr && TREE_CODE (change.m_expr) == SSA_NAME)
1310 {
1311 gimple *def_stmt = SSA_NAME_DEF_STMT (change.m_expr);
1312 if (gcall *call = dyn_cast <gcall *> (def_stmt))
1313 {
1314 if (is_special_named_call_p (call, "alloca", 1)
1315 || is_special_named_call_p (call, "__builtin_alloca", 1))
1316 {
1317 m_kind = KIND_ALLOCA;
1318 return label_text::borrow
1319 ("memory is allocated on the stack here");
1320 }
1321 }
1322 }
1323 return label_text::borrow ("pointer is from here");
1324 }
1325
1326 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
1327 {
1328 return ev.formatted_print ("call to %qs here", m_funcname);
1329 }
1330
1331 private:
1332 enum kind
1333 {
1334 KIND_UNKNOWN,
1335 KIND_ALLOCA
1336 };
1337 const char *m_funcname;
1338 enum kind m_kind;
1339 };
1340
1341 /* struct allocation_state : public state_machine::state. */
1342
1343 /* Implementation of state_machine::state::dump_to_pp vfunc
1344 for allocation_state: append the API that this allocation is
1345 associated with. */
1346
1347 void
1348 allocation_state::dump_to_pp (pretty_printer *pp) const
1349 {
1350 state_machine::state::dump_to_pp (pp);
1351 if (m_deallocators)
1352 {
1353 pp_string (pp, " (");
1354 m_deallocators->dump_to_pp (pp);
1355 pp_character (pp, ')');
1356 }
1357 }
1358
1359 /* Given a allocation_state for a deallocator_set, get the "nonnull" state
1360 for the corresponding allocator(s). */
1361
1362 const allocation_state *
1363 allocation_state::get_nonnull () const
1364 {
1365 gcc_assert (m_deallocators);
1366 return as_a_allocation_state (m_deallocators->m_nonnull);
1367 }
1368
1369 /* malloc_state_machine's ctor. */
1370
1371 malloc_state_machine::malloc_state_machine (logger *logger)
1372 : state_machine ("malloc", logger),
1373 m_free (this, "free", WORDING_FREED),
1374 m_scalar_delete (this, "delete", WORDING_DELETED),
1375 m_vector_delete (this, "delete[]", WORDING_DELETED),
1376 m_realloc (this, "realloc", WORDING_REALLOCATED)
1377 {
1378 gcc_assert (m_start->get_id () == 0);
1379 m_null = add_state ("null", RS_FREED, NULL, NULL);
1380 m_non_heap = add_state ("non-heap", RS_NON_HEAP, NULL, NULL);
1381 m_stop = add_state ("stop", RS_STOP, NULL, NULL);
1382 }
1383
1384 malloc_state_machine::~malloc_state_machine ()
1385 {
1386 unsigned i;
1387 custom_deallocator_set *set;
1388 FOR_EACH_VEC_ELT (m_dynamic_sets, i, set)
1389 delete set;
1390 custom_deallocator *d;
1391 FOR_EACH_VEC_ELT (m_dynamic_deallocators, i, d)
1392 delete d;
1393 }
1394
1395 state_machine::state_t
1396 malloc_state_machine::add_state (const char *name, enum resource_state rs,
1397 const deallocator_set *deallocators,
1398 const deallocator *deallocator)
1399 {
1400 return add_custom_state (new allocation_state (name, alloc_state_id (),
1401 rs, deallocators,
1402 deallocator));
1403 }
1404
1405 /* If ALLOCATOR_FNDECL has any "__attribute__((malloc(FOO)))",
1406 return a custom_deallocator_set for them, consolidating them
1407 to ensure uniqueness of the sets.
1408
1409 Return NULL if it has no such attributes. */
1410
1411 const custom_deallocator_set *
1412 malloc_state_machine::
1413 get_or_create_custom_deallocator_set (tree allocator_fndecl)
1414 {
1415 /* Early rejection of decls without attributes. */
1416 tree attrs = DECL_ATTRIBUTES (allocator_fndecl);
1417 if (!attrs)
1418 return NULL;
1419
1420 /* Otherwise, call maybe_create_custom_deallocator_set,
1421 memoizing the result. */
1422 if (custom_deallocator_set **slot
1423 = m_custom_deallocator_set_cache.get (allocator_fndecl))
1424 return *slot;
1425 custom_deallocator_set *set
1426 = maybe_create_custom_deallocator_set (allocator_fndecl);
1427 m_custom_deallocator_set_cache.put (allocator_fndecl, set);
1428 return set;
1429 }
1430
1431 /* Given ALLOCATOR_FNDECL, a FUNCTION_DECL with attributes,
1432 look for any "__attribute__((malloc(FOO)))" and return a
1433 custom_deallocator_set for them, consolidating them
1434 to ensure uniqueness of the sets.
1435
1436 Return NULL if it has no such attributes.
1437
1438 Subroutine of get_or_create_custom_deallocator_set which
1439 memoizes the result. */
1440
1441 custom_deallocator_set *
1442 malloc_state_machine::
1443 maybe_create_custom_deallocator_set (tree allocator_fndecl)
1444 {
1445 tree attrs = DECL_ATTRIBUTES (allocator_fndecl);
1446 gcc_assert (attrs);
1447
1448 /* Look for instances of __attribute__((malloc(FOO))). */
1449 auto_vec<const deallocator *> deallocator_vec;
1450 for (tree allocs = attrs;
1451 (allocs = lookup_attribute ("malloc", allocs));
1452 allocs = TREE_CHAIN (allocs))
1453 {
1454 tree args = TREE_VALUE (allocs);
1455 if (!args)
1456 continue;
1457 if (TREE_VALUE (args))
1458 {
1459 const deallocator *d
1460 = get_or_create_deallocator (TREE_VALUE (args));
1461 deallocator_vec.safe_push (d);
1462 }
1463 }
1464
1465 /* If there weren't any deallocators, bail. */
1466 if (deallocator_vec.length () == 0)
1467 return NULL;
1468
1469 /* Consolidate, so that we reuse existing deallocator_set
1470 instances. */
1471 deallocator_vec.qsort (deallocator::cmp_ptr_ptr);
1472 custom_deallocator_set **slot
1473 = m_custom_deallocator_set_map.get (&deallocator_vec);
1474 if (slot)
1475 return *slot;
1476 custom_deallocator_set *set
1477 = new custom_deallocator_set (this, &deallocator_vec, WORDING_DEALLOCATED);
1478 m_custom_deallocator_set_map.put (&set->m_deallocator_vec, set);
1479 m_dynamic_sets.safe_push (set);
1480 return set;
1481 }
1482
1483 /* Get the deallocator for DEALLOCATOR_FNDECL, creating it if necessary. */
1484
1485 const deallocator *
1486 malloc_state_machine::get_or_create_deallocator (tree deallocator_fndecl)
1487 {
1488 deallocator **slot = m_deallocator_map.get (deallocator_fndecl);
1489 if (slot)
1490 return *slot;
1491
1492 /* Reuse "free". */
1493 deallocator *d;
1494 if (is_named_call_p (deallocator_fndecl, "free")
1495 || is_std_named_call_p (deallocator_fndecl, "free"))
1496 d = &m_free.m_deallocator;
1497 else
1498 {
1499 custom_deallocator *cd
1500 = new custom_deallocator (this, deallocator_fndecl,
1501 WORDING_DEALLOCATED);
1502 m_dynamic_deallocators.safe_push (cd);
1503 d = cd;
1504 }
1505 m_deallocator_map.put (deallocator_fndecl, d);
1506 return d;
1507 }
1508
1509 /* Implementation of state_machine::on_stmt vfunc for malloc_state_machine. */
1510
1511 bool
1512 malloc_state_machine::on_stmt (sm_context *sm_ctxt,
1513 const supernode *node,
1514 const gimple *stmt) const
1515 {
1516 if (const gcall *call = dyn_cast <const gcall *> (stmt))
1517 if (tree callee_fndecl = sm_ctxt->get_fndecl_for_call (call))
1518 {
1519 if (is_named_call_p (callee_fndecl, "malloc", call, 1)
1520 || is_named_call_p (callee_fndecl, "calloc", call, 2)
1521 || is_std_named_call_p (callee_fndecl, "malloc", call, 1)
1522 || is_std_named_call_p (callee_fndecl, "calloc", call, 2)
1523 || is_named_call_p (callee_fndecl, "__builtin_malloc", call, 1)
1524 || is_named_call_p (callee_fndecl, "__builtin_calloc", call, 2)
1525 || is_named_call_p (callee_fndecl, "strdup", call, 1)
1526 || is_named_call_p (callee_fndecl, "strndup", call, 2))
1527 {
1528 on_allocator_call (sm_ctxt, call, &m_free);
1529 return true;
1530 }
1531
1532 if (is_named_call_p (callee_fndecl, "operator new", call, 1))
1533 on_allocator_call (sm_ctxt, call, &m_scalar_delete);
1534 else if (is_named_call_p (callee_fndecl, "operator new []", call, 1))
1535 on_allocator_call (sm_ctxt, call, &m_vector_delete);
1536 else if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
1537 || is_named_call_p (callee_fndecl, "operator delete", call, 2))
1538 {
1539 on_deallocator_call (sm_ctxt, node, call,
1540 &m_scalar_delete.m_deallocator, 0);
1541 return true;
1542 }
1543 else if (is_named_call_p (callee_fndecl, "operator delete []", call, 1))
1544 {
1545 on_deallocator_call (sm_ctxt, node, call,
1546 &m_vector_delete.m_deallocator, 0);
1547 return true;
1548 }
1549
1550 if (is_named_call_p (callee_fndecl, "alloca", call, 1)
1551 || is_named_call_p (callee_fndecl, "__builtin_alloca", call, 1))
1552 {
1553 tree lhs = gimple_call_lhs (call);
1554 if (lhs)
1555 sm_ctxt->on_transition (node, stmt, lhs, m_start, m_non_heap);
1556 return true;
1557 }
1558
1559 if (is_named_call_p (callee_fndecl, "free", call, 1)
1560 || is_std_named_call_p (callee_fndecl, "free", call, 1)
1561 || is_named_call_p (callee_fndecl, "__builtin_free", call, 1))
1562 {
1563 on_deallocator_call (sm_ctxt, node, call,
1564 &m_free.m_deallocator, 0);
1565 return true;
1566 }
1567
1568 if (is_named_call_p (callee_fndecl, "realloc", call, 2)
1569 || is_named_call_p (callee_fndecl, "__builtin_realloc", call, 2))
1570 {
1571 on_realloc_call (sm_ctxt, node, call);
1572 return true;
1573 }
1574
1575 if (unaffected_by_call_p (callee_fndecl))
1576 return true;
1577
1578 /* Cast away const-ness for cache-like operations. */
1579 malloc_state_machine *mutable_this
1580 = const_cast <malloc_state_machine *> (this);
1581
1582 /* Handle "__attribute__((malloc(FOO)))". */
1583 if (const deallocator_set *deallocators
1584 = mutable_this->get_or_create_custom_deallocator_set
1585 (callee_fndecl))
1586 {
1587 tree attrs = TYPE_ATTRIBUTES (TREE_TYPE (callee_fndecl));
1588 bool returns_nonnull
1589 = lookup_attribute ("returns_nonnull", attrs);
1590 on_allocator_call (sm_ctxt, call, deallocators, returns_nonnull);
1591 }
1592
1593 /* Handle "__attribute__((nonnull))". */
1594 {
1595 tree fntype = TREE_TYPE (callee_fndecl);
1596 bitmap nonnull_args = get_nonnull_args (fntype);
1597 if (nonnull_args)
1598 {
1599 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
1600 {
1601 tree arg = gimple_call_arg (stmt, i);
1602 if (TREE_CODE (TREE_TYPE (arg)) != POINTER_TYPE)
1603 continue;
1604 /* If we have a nonnull-args, and either all pointers, or just
1605 the specified pointers. */
1606 if (bitmap_empty_p (nonnull_args)
1607 || bitmap_bit_p (nonnull_args, i))
1608 {
1609 state_t state = sm_ctxt->get_state (stmt, arg);
1610 /* Can't use a switch as the states are non-const. */
1611 if (unchecked_p (state))
1612 {
1613 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1614 sm_ctxt->warn (node, stmt, arg,
1615 new possible_null_arg (*this, diag_arg,
1616 callee_fndecl,
1617 i));
1618 const allocation_state *astate
1619 = as_a_allocation_state (state);
1620 sm_ctxt->set_next_state (stmt, arg,
1621 astate->get_nonnull ());
1622 }
1623 else if (state == m_null)
1624 {
1625 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1626 sm_ctxt->warn (node, stmt, arg,
1627 new null_arg (*this, diag_arg,
1628 callee_fndecl, i));
1629 sm_ctxt->set_next_state (stmt, arg, m_stop);
1630 }
1631 }
1632 }
1633 BITMAP_FREE (nonnull_args);
1634 }
1635 }
1636
1637 /* Check for this after nonnull, so that if we have both
1638 then we transition to "freed", rather than "checked". */
1639 unsigned dealloc_argno = fndecl_dealloc_argno (callee_fndecl);
1640 if (dealloc_argno != UINT_MAX)
1641 {
1642 const deallocator *d
1643 = mutable_this->get_or_create_deallocator (callee_fndecl);
1644 on_deallocator_call (sm_ctxt, node, call, d, dealloc_argno);
1645 }
1646 }
1647
1648 if (tree lhs = sm_ctxt->is_zero_assignment (stmt))
1649 if (any_pointer_p (lhs))
1650 on_zero_assignment (sm_ctxt, stmt,lhs);
1651
1652 /* If we have "LHS = &EXPR;" and EXPR is something other than a MEM_REF,
1653 transition LHS from start to non_heap.
1654 Doing it for ADDR_EXPR(MEM_REF()) is likely wrong, and can lead to
1655 unbounded chains of unmergeable sm-state on pointer arithmetic in loops
1656 when optimization is enabled. */
1657 if (const gassign *assign_stmt = dyn_cast <const gassign *> (stmt))
1658 {
1659 enum tree_code op = gimple_assign_rhs_code (assign_stmt);
1660 if (op == ADDR_EXPR)
1661 {
1662 tree lhs = gimple_assign_lhs (assign_stmt);
1663 if (lhs)
1664 {
1665 tree addr_expr = gimple_assign_rhs1 (assign_stmt);
1666 if (TREE_CODE (TREE_OPERAND (addr_expr, 0)) != MEM_REF)
1667 sm_ctxt->on_transition (node, stmt, lhs, m_start, m_non_heap);
1668 }
1669 }
1670 }
1671
1672 /* Handle dereferences. */
1673 for (unsigned i = 0; i < gimple_num_ops (stmt); i++)
1674 {
1675 tree op = gimple_op (stmt, i);
1676 if (!op)
1677 continue;
1678 if (TREE_CODE (op) == COMPONENT_REF)
1679 op = TREE_OPERAND (op, 0);
1680
1681 if (TREE_CODE (op) == MEM_REF)
1682 {
1683 tree arg = TREE_OPERAND (op, 0);
1684
1685 state_t state = sm_ctxt->get_state (stmt, arg);
1686 if (unchecked_p (state))
1687 {
1688 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1689 sm_ctxt->warn (node, stmt, arg,
1690 new possible_null_deref (*this, diag_arg));
1691 const allocation_state *astate = as_a_allocation_state (state);
1692 sm_ctxt->set_next_state (stmt, arg, astate->get_nonnull ());
1693 }
1694 else if (state == m_null)
1695 {
1696 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1697 sm_ctxt->warn (node, stmt, arg,
1698 new null_deref (*this, diag_arg));
1699 sm_ctxt->set_next_state (stmt, arg, m_stop);
1700 }
1701 else if (freed_p (state))
1702 {
1703 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1704 const allocation_state *astate = as_a_allocation_state (state);
1705 sm_ctxt->warn (node, stmt, arg,
1706 new use_after_free (*this, diag_arg,
1707 astate->m_deallocator));
1708 sm_ctxt->set_next_state (stmt, arg, m_stop);
1709 }
1710 }
1711 }
1712 return false;
1713 }
1714
1715 /* Handle a call to an allocator.
1716 RETURNS_NONNULL is true if CALL is to a fndecl known to have
1717 __attribute__((returns_nonnull)). */
1718
1719 void
1720 malloc_state_machine::on_allocator_call (sm_context *sm_ctxt,
1721 const gcall *call,
1722 const deallocator_set *deallocators,
1723 bool returns_nonnull) const
1724 {
1725 tree lhs = gimple_call_lhs (call);
1726 if (lhs)
1727 {
1728 if (sm_ctxt->get_state (call, lhs) == m_start)
1729 sm_ctxt->set_next_state (call, lhs,
1730 (returns_nonnull
1731 ? deallocators->m_nonnull
1732 : deallocators->m_unchecked));
1733 }
1734 else
1735 {
1736 /* TODO: report leak. */
1737 }
1738 }
1739
1740 void
1741 malloc_state_machine::on_deallocator_call (sm_context *sm_ctxt,
1742 const supernode *node,
1743 const gcall *call,
1744 const deallocator *d,
1745 unsigned argno) const
1746 {
1747 if (argno >= gimple_call_num_args (call))
1748 return;
1749 tree arg = gimple_call_arg (call, argno);
1750
1751 state_t state = sm_ctxt->get_state (call, arg);
1752
1753 /* start/unchecked/nonnull -> freed. */
1754 if (state == m_start)
1755 sm_ctxt->set_next_state (call, arg, d->m_freed);
1756 else if (unchecked_p (state) || nonnull_p (state))
1757 {
1758 const allocation_state *astate = as_a_allocation_state (state);
1759 gcc_assert (astate->m_deallocators);
1760 if (!astate->m_deallocators->contains_p (d))
1761 {
1762 /* Wrong allocator. */
1763 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1764 pending_diagnostic *pd
1765 = new mismatching_deallocation (*this, diag_arg,
1766 astate->m_deallocators,
1767 d);
1768 sm_ctxt->warn (node, call, arg, pd);
1769 }
1770 sm_ctxt->set_next_state (call, arg, d->m_freed);
1771 }
1772
1773 /* Keep state "null" as-is, rather than transitioning to "freed";
1774 we don't want to complain about double-free of NULL. */
1775 else if (state == d->m_freed)
1776 {
1777 /* freed -> stop, with warning. */
1778 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1779 sm_ctxt->warn (node, call, arg,
1780 new double_free (*this, diag_arg, d->m_name));
1781 sm_ctxt->set_next_state (call, arg, m_stop);
1782 }
1783 else if (state == m_non_heap)
1784 {
1785 /* non-heap -> stop, with warning. */
1786 tree diag_arg = sm_ctxt->get_diagnostic_tree (arg);
1787 sm_ctxt->warn (node, call, arg,
1788 new free_of_non_heap (*this, diag_arg,
1789 d->m_name));
1790 sm_ctxt->set_next_state (call, arg, m_stop);
1791 }
1792 }
1793
1794 /* Implementation of realloc(3):
1795
1796 void *realloc(void *ptr, size_t size);
1797
1798 realloc(3) is awkward.
1799
1800 We currently don't have a way to express multiple possible outcomes
1801 from a function call, "bifurcating" the state such as:
1802 - success: non-NULL is returned
1803 - failure: NULL is returned, existing buffer is not freed.
1804 or even an N-way state split e.g.:
1805 - buffer grew successfully in-place
1806 - buffer was successfully moved to a larger allocation
1807 - buffer was successfully contracted
1808 - realloc failed, returning NULL, without freeing existing buffer.
1809 (PR analyzer/99260 tracks this)
1810
1811 Given that we can currently only express one outcome, eliminate
1812 false positives by dropping state from the buffer. */
1813
1814 void
1815 malloc_state_machine::on_realloc_call (sm_context *sm_ctxt,
1816 const supernode *node ATTRIBUTE_UNUSED,
1817 const gcall *call) const
1818 {
1819 tree ptr = gimple_call_arg (call, 0);
1820
1821 state_t state = sm_ctxt->get_state (call, ptr);
1822
1823 /* Detect mismatches. */
1824 if (unchecked_p (state) || nonnull_p (state))
1825 {
1826 const allocation_state *astate = as_a_allocation_state (state);
1827 gcc_assert (astate->m_deallocators);
1828 if (astate->m_deallocators != &m_free)
1829 {
1830 /* Wrong allocator. */
1831 tree diag_ptr = sm_ctxt->get_diagnostic_tree (ptr);
1832 pending_diagnostic *pd
1833 = new mismatching_deallocation (*this, diag_ptr,
1834 astate->m_deallocators,
1835 &m_realloc);
1836 sm_ctxt->warn (node, call, ptr, pd);
1837 }
1838 }
1839
1840 /* Transition ptr to "stop" state. */
1841 sm_ctxt->set_next_state (call, ptr, m_stop);
1842 }
1843
1844 /* Implementation of state_machine::on_phi vfunc for malloc_state_machine. */
1845
1846 void
1847 malloc_state_machine::on_phi (sm_context *sm_ctxt,
1848 const supernode *node ATTRIBUTE_UNUSED,
1849 const gphi *phi,
1850 tree rhs) const
1851 {
1852 if (zerop (rhs))
1853 {
1854 tree lhs = gimple_phi_result (phi);
1855 on_zero_assignment (sm_ctxt, phi, lhs);
1856 }
1857 }
1858
1859 /* Implementation of state_machine::on_condition vfunc for malloc_state_machine.
1860 Potentially transition state 'unchecked' to 'nonnull' or to 'null'. */
1861
1862 void
1863 malloc_state_machine::on_condition (sm_context *sm_ctxt,
1864 const supernode *node ATTRIBUTE_UNUSED,
1865 const gimple *stmt,
1866 const svalue *lhs,
1867 enum tree_code op,
1868 const svalue *rhs) const
1869 {
1870 if (!rhs->all_zeroes_p ())
1871 return;
1872
1873 if (!any_pointer_p (lhs))
1874 return;
1875 if (!any_pointer_p (rhs))
1876 return;
1877
1878 if (op == NE_EXPR)
1879 {
1880 log ("got 'ARG != 0' match");
1881 state_t s = sm_ctxt->get_state (stmt, lhs);
1882 if (unchecked_p (s))
1883 {
1884 const allocation_state *astate = as_a_allocation_state (s);
1885 sm_ctxt->set_next_state (stmt, lhs, astate->get_nonnull ());
1886 }
1887 }
1888 else if (op == EQ_EXPR)
1889 {
1890 log ("got 'ARG == 0' match");
1891 state_t s = sm_ctxt->get_state (stmt, lhs);
1892 if (unchecked_p (s))
1893 sm_ctxt->set_next_state (stmt, lhs, m_null);
1894 }
1895 }
1896
1897 /* Implementation of state_machine::can_purge_p vfunc for malloc_state_machine.
1898 Don't allow purging of pointers in state 'unchecked' or 'nonnull'
1899 (to avoid false leak reports). */
1900
1901 bool
1902 malloc_state_machine::can_purge_p (state_t s) const
1903 {
1904 enum resource_state rs = get_rs (s);
1905 return rs != RS_UNCHECKED && rs != RS_NONNULL;
1906 }
1907
1908 /* Implementation of state_machine::on_leak vfunc for malloc_state_machine
1909 (for complaining about leaks of pointers in state 'unchecked' and
1910 'nonnull'). */
1911
1912 pending_diagnostic *
1913 malloc_state_machine::on_leak (tree var) const
1914 {
1915 return new malloc_leak (*this, var);
1916 }
1917
1918 /* Implementation of state_machine::reset_when_passed_to_unknown_fn_p vfunc
1919 for malloc_state_machine. */
1920
1921 bool
1922 malloc_state_machine::reset_when_passed_to_unknown_fn_p (state_t s,
1923 bool is_mutable) const
1924 {
1925 /* An on-stack ptr doesn't stop being stack-allocated when passed to an
1926 unknown fn. */
1927 if (s == m_non_heap)
1928 return false;
1929
1930 /* Otherwise, pointers passed as non-const can be freed. */
1931 return is_mutable;
1932 }
1933
1934 /* Return true if calls to FNDECL are known to not affect this sm-state. */
1935
1936 bool
1937 malloc_state_machine::unaffected_by_call_p (tree fndecl)
1938 {
1939 /* A set of functions that are known to not affect allocation
1940 status, even if we haven't fully modelled the rest of their
1941 behavior yet. */
1942 static const char * const funcnames[] = {
1943 /* This array must be kept sorted. */
1944 "strsep",
1945 };
1946 const size_t count
1947 = sizeof(funcnames) / sizeof (funcnames[0]);
1948 function_set fs (funcnames, count);
1949
1950 if (fs.contains_decl_p (fndecl))
1951 return true;
1952
1953 return false;
1954 }
1955
1956 /* Shared logic for handling GIMPLE_ASSIGNs and GIMPLE_PHIs that
1957 assign zero to LHS. */
1958
1959 void
1960 malloc_state_machine::on_zero_assignment (sm_context *sm_ctxt,
1961 const gimple *stmt,
1962 tree lhs) const
1963 {
1964 state_t s = sm_ctxt->get_state (stmt, lhs);
1965 enum resource_state rs = get_rs (s);
1966 if (rs == RS_START
1967 || rs == RS_UNCHECKED
1968 || rs == RS_NONNULL
1969 || rs == RS_FREED)
1970 sm_ctxt->set_next_state (stmt, lhs, m_null);
1971 }
1972
1973 } // anonymous namespace
1974
1975 /* Internal interface to this file. */
1976
1977 state_machine *
1978 make_malloc_state_machine (logger *logger)
1979 {
1980 return new malloc_state_machine (logger);
1981 }
1982
1983 } // namespace ana
1984
1985 #endif /* #if ENABLE_ANALYZER */