]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/analyzer/bounds-checking.cc
Update copyright years.
[thirdparty/gcc.git] / gcc / analyzer / bounds-checking.cc
1 /* Bounds-checking of reads and writes to memory regions.
2 Copyright (C) 2019-2023 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10
11 GCC is distributed in the hope that it will be useful, but
12 WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #define INCLUDE_MEMORY
22 #include "system.h"
23 #include "coretypes.h"
24 #include "make-unique.h"
25 #include "tree.h"
26 #include "function.h"
27 #include "basic-block.h"
28 #include "gimple.h"
29 #include "gimple-iterator.h"
30 #include "diagnostic-core.h"
31 #include "diagnostic-metadata.h"
32 #include "analyzer/analyzer.h"
33 #include "analyzer/analyzer-logging.h"
34 #include "analyzer/region-model.h"
35 #include "analyzer/checker-event.h"
36 #include "analyzer/checker-path.h"
37
38 #if ENABLE_ANALYZER
39
40 namespace ana {
41
42 /* Abstract base class for all out-of-bounds warnings. */
43
44 class out_of_bounds : public pending_diagnostic
45 {
46 public:
47 out_of_bounds (const region *reg, tree diag_arg)
48 : m_reg (reg), m_diag_arg (diag_arg)
49 {}
50
51 bool subclass_equal_p (const pending_diagnostic &base_other) const override
52 {
53 const out_of_bounds &other
54 (static_cast <const out_of_bounds &>(base_other));
55 return (m_reg == other.m_reg
56 && pending_diagnostic::same_tree_p (m_diag_arg, other.m_diag_arg));
57 }
58
59 int get_controlling_option () const final override
60 {
61 return OPT_Wanalyzer_out_of_bounds;
62 }
63
64 void mark_interesting_stuff (interesting_t *interest) final override
65 {
66 interest->add_region_creation (m_reg);
67 }
68
69 void add_region_creation_events (const region *,
70 tree capacity,
71 const event_loc_info &loc_info,
72 checker_path &emission_path) override
73 {
74 /* The memory space is described in the diagnostic message itself,
75 so we don't need an event for that. */
76 if (capacity)
77 emission_path.add_event
78 (make_unique<region_creation_event_capacity> (capacity, loc_info));
79 }
80
81 protected:
82 enum memory_space get_memory_space () const
83 {
84 return m_reg->get_memory_space ();
85 }
86
87 /* Potentially add a note about valid ways to index this array, such
88 as (given "int arr[10];"):
89 note: valid subscripts for 'arr' are '[0]' to '[9]'
90 We print the '[' and ']' characters so as to express the valid
91 subscripts using C syntax, rather than just as byte ranges,
92 which hopefully is more clear to the user. */
93 void
94 maybe_describe_array_bounds (location_t loc) const
95 {
96 if (!m_diag_arg)
97 return;
98 tree t = TREE_TYPE (m_diag_arg);
99 if (!t)
100 return;
101 if (TREE_CODE (t) != ARRAY_TYPE)
102 return;
103 tree domain = TYPE_DOMAIN (t);
104 if (!domain)
105 return;
106 tree max_idx = TYPE_MAX_VALUE (domain);
107 if (!max_idx)
108 return;
109 tree min_idx = TYPE_MIN_VALUE (domain);
110 inform (loc,
111 "valid subscripts for %qE are %<[%E]%> to %<[%E]%>",
112 m_diag_arg, min_idx, max_idx);
113 }
114
115 const region *m_reg;
116 tree m_diag_arg;
117 };
118
119 /* Abstract base class for all out-of-bounds warnings where the
120 out-of-bounds range is concrete. */
121
122 class concrete_out_of_bounds : public out_of_bounds
123 {
124 public:
125 concrete_out_of_bounds (const region *reg, tree diag_arg,
126 byte_range out_of_bounds_range)
127 : out_of_bounds (reg, diag_arg),
128 m_out_of_bounds_range (out_of_bounds_range)
129 {}
130
131 bool subclass_equal_p (const pending_diagnostic &base_other) const override
132 {
133 const concrete_out_of_bounds &other
134 (static_cast <const concrete_out_of_bounds &>(base_other));
135 return (out_of_bounds::subclass_equal_p (other)
136 && m_out_of_bounds_range == other.m_out_of_bounds_range);
137 }
138
139 protected:
140 byte_range m_out_of_bounds_range;
141 };
142
143 /* Abstract subclass to complaing about concrete out-of-bounds
144 past the end of the buffer. */
145
146 class concrete_past_the_end : public concrete_out_of_bounds
147 {
148 public:
149 concrete_past_the_end (const region *reg, tree diag_arg, byte_range range,
150 tree byte_bound)
151 : concrete_out_of_bounds (reg, diag_arg, range), m_byte_bound (byte_bound)
152 {}
153
154 bool
155 subclass_equal_p (const pending_diagnostic &base_other) const final override
156 {
157 const concrete_past_the_end &other
158 (static_cast <const concrete_past_the_end &>(base_other));
159 return (concrete_out_of_bounds::subclass_equal_p (other)
160 && pending_diagnostic::same_tree_p (m_byte_bound,
161 other.m_byte_bound));
162 }
163
164 void add_region_creation_events (const region *,
165 tree,
166 const event_loc_info &loc_info,
167 checker_path &emission_path) final override
168 {
169 if (m_byte_bound && TREE_CODE (m_byte_bound) == INTEGER_CST)
170 emission_path.add_event
171 (make_unique<region_creation_event_capacity> (m_byte_bound, loc_info));
172 }
173
174 protected:
175 tree m_byte_bound;
176 };
177
178 /* Concrete subclass to complain about buffer overflows. */
179
180 class concrete_buffer_overflow : public concrete_past_the_end
181 {
182 public:
183 concrete_buffer_overflow (const region *reg, tree diag_arg,
184 byte_range range, tree byte_bound)
185 : concrete_past_the_end (reg, diag_arg, range, byte_bound)
186 {}
187
188 const char *get_kind () const final override
189 {
190 return "concrete_buffer_overflow";
191 }
192
193 bool emit (rich_location *rich_loc) final override
194 {
195 diagnostic_metadata m;
196 bool warned;
197 switch (get_memory_space ())
198 {
199 default:
200 m.add_cwe (787);
201 warned = warning_meta (rich_loc, m, get_controlling_option (),
202 "buffer overflow");
203 break;
204 case MEMSPACE_STACK:
205 m.add_cwe (121);
206 warned = warning_meta (rich_loc, m, get_controlling_option (),
207 "stack-based buffer overflow");
208 break;
209 case MEMSPACE_HEAP:
210 m.add_cwe (122);
211 warned = warning_meta (rich_loc, m, get_controlling_option (),
212 "heap-based buffer overflow");
213 break;
214 }
215
216 if (warned)
217 {
218 if (wi::fits_uhwi_p (m_out_of_bounds_range.m_size_in_bytes))
219 {
220 unsigned HOST_WIDE_INT num_bad_bytes
221 = m_out_of_bounds_range.m_size_in_bytes.to_uhwi ();
222 if (m_diag_arg)
223 inform_n (rich_loc->get_loc (),
224 num_bad_bytes,
225 "write of %wu byte to beyond the end of %qE",
226 "write of %wu bytes to beyond the end of %qE",
227 num_bad_bytes,
228 m_diag_arg);
229 else
230 inform_n (rich_loc->get_loc (),
231 num_bad_bytes,
232 "write of %wu byte to beyond the end of the region",
233 "write of %wu bytes to beyond the end of the region",
234 num_bad_bytes);
235 }
236 else if (m_diag_arg)
237 inform (rich_loc->get_loc (),
238 "write to beyond the end of %qE",
239 m_diag_arg);
240
241 maybe_describe_array_bounds (rich_loc->get_loc ());
242 }
243
244 return warned;
245 }
246
247 label_text describe_final_event (const evdesc::final_event &ev)
248 final override
249 {
250 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
251 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
252 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
253 print_dec (start, start_buf, SIGNED);
254 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
255 print_dec (end, end_buf, SIGNED);
256
257 if (start == end)
258 {
259 if (m_diag_arg)
260 return ev.formatted_print ("out-of-bounds write at byte %s but %qE"
261 " ends at byte %E", start_buf, m_diag_arg,
262 m_byte_bound);
263 return ev.formatted_print ("out-of-bounds write at byte %s but region"
264 " ends at byte %E", start_buf,
265 m_byte_bound);
266 }
267 else
268 {
269 if (m_diag_arg)
270 return ev.formatted_print ("out-of-bounds write from byte %s till"
271 " byte %s but %qE ends at byte %E",
272 start_buf, end_buf, m_diag_arg,
273 m_byte_bound);
274 return ev.formatted_print ("out-of-bounds write from byte %s till"
275 " byte %s but region ends at byte %E",
276 start_buf, end_buf, m_byte_bound);
277 }
278 }
279 };
280
281 /* Concrete subclass to complain about buffer over-reads. */
282
283 class concrete_buffer_over_read : public concrete_past_the_end
284 {
285 public:
286 concrete_buffer_over_read (const region *reg, tree diag_arg,
287 byte_range range, tree byte_bound)
288 : concrete_past_the_end (reg, diag_arg, range, byte_bound)
289 {}
290
291 const char *get_kind () const final override
292 {
293 return "concrete_buffer_over_read";
294 }
295
296 bool emit (rich_location *rich_loc) final override
297 {
298 diagnostic_metadata m;
299 bool warned;
300 m.add_cwe (126);
301 switch (get_memory_space ())
302 {
303 default:
304 warned = warning_meta (rich_loc, m, get_controlling_option (),
305 "buffer over-read");
306 break;
307 case MEMSPACE_STACK:
308 warned = warning_meta (rich_loc, m, get_controlling_option (),
309 "stack-based buffer over-read");
310 break;
311 case MEMSPACE_HEAP:
312 warned = warning_meta (rich_loc, m, get_controlling_option (),
313 "heap-based buffer over-read");
314 break;
315 }
316
317 if (warned)
318 {
319 if (wi::fits_uhwi_p (m_out_of_bounds_range.m_size_in_bytes))
320 {
321 unsigned HOST_WIDE_INT num_bad_bytes
322 = m_out_of_bounds_range.m_size_in_bytes.to_uhwi ();
323 if (m_diag_arg)
324 inform_n (rich_loc->get_loc (),
325 num_bad_bytes,
326 "read of %wu byte from after the end of %qE",
327 "read of %wu bytes from after the end of %qE",
328 num_bad_bytes,
329 m_diag_arg);
330 else
331 inform_n (rich_loc->get_loc (),
332 num_bad_bytes,
333 "read of %wu byte from after the end of the region",
334 "read of %wu bytes from after the end of the region",
335 num_bad_bytes);
336 }
337 else if (m_diag_arg)
338 inform (rich_loc->get_loc (),
339 "read from after the end of %qE",
340 m_diag_arg);
341
342 maybe_describe_array_bounds (rich_loc->get_loc ());
343 }
344
345 return warned;
346 }
347
348 label_text describe_final_event (const evdesc::final_event &ev)
349 final override
350 {
351 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
352 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
353 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
354 print_dec (start, start_buf, SIGNED);
355 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
356 print_dec (end, end_buf, SIGNED);
357
358 if (start == end)
359 {
360 if (m_diag_arg)
361 return ev.formatted_print ("out-of-bounds read at byte %s but %qE"
362 " ends at byte %E", start_buf, m_diag_arg,
363 m_byte_bound);
364 return ev.formatted_print ("out-of-bounds read at byte %s but region"
365 " ends at byte %E", start_buf,
366 m_byte_bound);
367 }
368 else
369 {
370 if (m_diag_arg)
371 return ev.formatted_print ("out-of-bounds read from byte %s till"
372 " byte %s but %qE ends at byte %E",
373 start_buf, end_buf, m_diag_arg,
374 m_byte_bound);
375 return ev.formatted_print ("out-of-bounds read from byte %s till"
376 " byte %s but region ends at byte %E",
377 start_buf, end_buf, m_byte_bound);
378 }
379 }
380 };
381
382 /* Concrete subclass to complain about buffer underwrites. */
383
384 class concrete_buffer_underwrite : public concrete_out_of_bounds
385 {
386 public:
387 concrete_buffer_underwrite (const region *reg, tree diag_arg,
388 byte_range range)
389 : concrete_out_of_bounds (reg, diag_arg, range)
390 {}
391
392 const char *get_kind () const final override
393 {
394 return "concrete_buffer_underwrite";
395 }
396
397 bool emit (rich_location *rich_loc) final override
398 {
399 diagnostic_metadata m;
400 bool warned;
401 m.add_cwe (124);
402 switch (get_memory_space ())
403 {
404 default:
405 warned = warning_meta (rich_loc, m, get_controlling_option (),
406 "buffer underwrite");
407 break;
408 case MEMSPACE_STACK:
409 warned = warning_meta (rich_loc, m, get_controlling_option (),
410 "stack-based buffer underwrite");
411 break;
412 case MEMSPACE_HEAP:
413 warned = warning_meta (rich_loc, m, get_controlling_option (),
414 "heap-based buffer underwrite");
415 break;
416 }
417 if (warned)
418 maybe_describe_array_bounds (rich_loc->get_loc ());
419 return warned;
420 }
421
422 label_text describe_final_event (const evdesc::final_event &ev)
423 final override
424 {
425 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
426 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
427 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
428 print_dec (start, start_buf, SIGNED);
429 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
430 print_dec (end, end_buf, SIGNED);
431
432 if (start == end)
433 {
434 if (m_diag_arg)
435 return ev.formatted_print ("out-of-bounds write at byte %s but %qE"
436 " starts at byte 0", start_buf,
437 m_diag_arg);
438 return ev.formatted_print ("out-of-bounds write at byte %s but region"
439 " starts at byte 0", start_buf);
440 }
441 else
442 {
443 if (m_diag_arg)
444 return ev.formatted_print ("out-of-bounds write from byte %s till"
445 " byte %s but %qE starts at byte 0",
446 start_buf, end_buf, m_diag_arg);
447 return ev.formatted_print ("out-of-bounds write from byte %s till"
448 " byte %s but region starts at byte 0",
449 start_buf, end_buf);;
450 }
451 }
452 };
453
454 /* Concrete subclass to complain about buffer under-reads. */
455
456 class concrete_buffer_under_read : public concrete_out_of_bounds
457 {
458 public:
459 concrete_buffer_under_read (const region *reg, tree diag_arg,
460 byte_range range)
461 : concrete_out_of_bounds (reg, diag_arg, range)
462 {}
463
464 const char *get_kind () const final override
465 {
466 return "concrete_buffer_under_read";
467 }
468
469 bool emit (rich_location *rich_loc) final override
470 {
471 diagnostic_metadata m;
472 bool warned;
473 m.add_cwe (127);
474 switch (get_memory_space ())
475 {
476 default:
477 warned = warning_meta (rich_loc, m, get_controlling_option (),
478 "buffer under-read");
479 break;
480 case MEMSPACE_STACK:
481 warned = warning_meta (rich_loc, m, get_controlling_option (),
482 "stack-based buffer under-read");
483 break;
484 case MEMSPACE_HEAP:
485 warned = warning_meta (rich_loc, m, get_controlling_option (),
486 "heap-based buffer under-read");
487 break;
488 }
489 if (warned)
490 maybe_describe_array_bounds (rich_loc->get_loc ());
491 return warned;
492 }
493
494 label_text describe_final_event (const evdesc::final_event &ev)
495 final override
496 {
497 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
498 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
499 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
500 print_dec (start, start_buf, SIGNED);
501 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
502 print_dec (end, end_buf, SIGNED);
503
504 if (start == end)
505 {
506 if (m_diag_arg)
507 return ev.formatted_print ("out-of-bounds read at byte %s but %qE"
508 " starts at byte 0", start_buf,
509 m_diag_arg);
510 return ev.formatted_print ("out-of-bounds read at byte %s but region"
511 " starts at byte 0", start_buf);
512 }
513 else
514 {
515 if (m_diag_arg)
516 return ev.formatted_print ("out-of-bounds read from byte %s till"
517 " byte %s but %qE starts at byte 0",
518 start_buf, end_buf, m_diag_arg);
519 return ev.formatted_print ("out-of-bounds read from byte %s till"
520 " byte %s but region starts at byte 0",
521 start_buf, end_buf);;
522 }
523 }
524 };
525
526 /* Abstract class to complain about out-of-bounds read/writes where
527 the values are symbolic. */
528
529 class symbolic_past_the_end : public out_of_bounds
530 {
531 public:
532 symbolic_past_the_end (const region *reg, tree diag_arg, tree offset,
533 tree num_bytes, tree capacity)
534 : out_of_bounds (reg, diag_arg),
535 m_offset (offset),
536 m_num_bytes (num_bytes),
537 m_capacity (capacity)
538 {}
539
540 bool
541 subclass_equal_p (const pending_diagnostic &base_other) const final override
542 {
543 const symbolic_past_the_end &other
544 (static_cast <const symbolic_past_the_end &>(base_other));
545 return (out_of_bounds::subclass_equal_p (other)
546 && pending_diagnostic::same_tree_p (m_offset, other.m_offset)
547 && pending_diagnostic::same_tree_p (m_num_bytes, other.m_num_bytes)
548 && pending_diagnostic::same_tree_p (m_capacity, other.m_capacity));
549 }
550
551 protected:
552 tree m_offset;
553 tree m_num_bytes;
554 tree m_capacity;
555 };
556
557 /* Concrete subclass to complain about overflows with symbolic values. */
558
559 class symbolic_buffer_overflow : public symbolic_past_the_end
560 {
561 public:
562 symbolic_buffer_overflow (const region *reg, tree diag_arg, tree offset,
563 tree num_bytes, tree capacity)
564 : symbolic_past_the_end (reg, diag_arg, offset, num_bytes, capacity)
565 {
566 }
567
568 const char *get_kind () const final override
569 {
570 return "symbolic_buffer_overflow";
571 }
572
573 bool emit (rich_location *rich_loc) final override
574 {
575 diagnostic_metadata m;
576 switch (get_memory_space ())
577 {
578 default:
579 m.add_cwe (787);
580 return warning_meta (rich_loc, m, get_controlling_option (),
581 "buffer overflow");
582 case MEMSPACE_STACK:
583 m.add_cwe (121);
584 return warning_meta (rich_loc, m, get_controlling_option (),
585 "stack-based buffer overflow");
586 case MEMSPACE_HEAP:
587 m.add_cwe (122);
588 return warning_meta (rich_loc, m, get_controlling_option (),
589 "heap-based buffer overflow");
590 }
591 }
592
593 label_text
594 describe_final_event (const evdesc::final_event &ev) final override
595 {
596 if (m_offset)
597 {
598 /* Known offset. */
599 if (m_num_bytes)
600 {
601 /* Known offset, known size. */
602 if (TREE_CODE (m_num_bytes) == INTEGER_CST)
603 {
604 /* Known offset, known constant size. */
605 if (pending_diagnostic::same_tree_p (m_num_bytes,
606 integer_one_node))
607 {
608 /* Singular m_num_bytes. */
609 if (m_diag_arg)
610 return ev.formatted_print
611 ("write of %E byte at offset %qE exceeds %qE",
612 m_num_bytes, m_offset, m_diag_arg);
613 else
614 return ev.formatted_print
615 ("write of %E byte at offset %qE exceeds the buffer",
616 m_num_bytes, m_offset);
617 }
618 else
619 {
620 /* Plural m_num_bytes. */
621 if (m_diag_arg)
622 return ev.formatted_print
623 ("write of %E bytes at offset %qE exceeds %qE",
624 m_num_bytes, m_offset, m_diag_arg);
625 else
626 return ev.formatted_print
627 ("write of %E bytes at offset %qE exceeds the buffer",
628 m_num_bytes, m_offset);
629 }
630 }
631 else
632 {
633 /* Known offset, known symbolic size. */
634 if (m_diag_arg)
635 return ev.formatted_print
636 ("write of %qE bytes at offset %qE exceeds %qE",
637 m_num_bytes, m_offset, m_diag_arg);
638 else
639 return ev.formatted_print
640 ("write of %qE bytes at offset %qE exceeds the buffer",
641 m_num_bytes, m_offset);
642 }
643 }
644 else
645 {
646 /* Known offset, unknown size. */
647 if (m_diag_arg)
648 return ev.formatted_print ("write at offset %qE exceeds %qE",
649 m_offset, m_diag_arg);
650 else
651 return ev.formatted_print ("write at offset %qE exceeds the"
652 " buffer", m_offset);
653 }
654 }
655 /* Unknown offset. */
656 if (m_diag_arg)
657 return ev.formatted_print ("out-of-bounds write on %qE",
658 m_diag_arg);
659 return ev.formatted_print ("out-of-bounds write");
660 }
661 };
662
663 /* Concrete subclass to complain about over-reads with symbolic values. */
664
665 class symbolic_buffer_over_read : public symbolic_past_the_end
666 {
667 public:
668 symbolic_buffer_over_read (const region *reg, tree diag_arg, tree offset,
669 tree num_bytes, tree capacity)
670 : symbolic_past_the_end (reg, diag_arg, offset, num_bytes, capacity)
671 {
672 }
673
674 const char *get_kind () const final override
675 {
676 return "symbolic_buffer_over_read";
677 }
678
679 bool emit (rich_location *rich_loc) final override
680 {
681 diagnostic_metadata m;
682 m.add_cwe (126);
683 switch (get_memory_space ())
684 {
685 default:
686 m.add_cwe (787);
687 return warning_meta (rich_loc, m, get_controlling_option (),
688 "buffer over-read");
689 case MEMSPACE_STACK:
690 m.add_cwe (121);
691 return warning_meta (rich_loc, m, get_controlling_option (),
692 "stack-based buffer over-read");
693 case MEMSPACE_HEAP:
694 m.add_cwe (122);
695 return warning_meta (rich_loc, m, get_controlling_option (),
696 "heap-based buffer over-read");
697 }
698 }
699
700 label_text
701 describe_final_event (const evdesc::final_event &ev) final override
702 {
703 if (m_offset)
704 {
705 /* Known offset. */
706 if (m_num_bytes)
707 {
708 /* Known offset, known size. */
709 if (TREE_CODE (m_num_bytes) == INTEGER_CST)
710 {
711 /* Known offset, known constant size. */
712 if (pending_diagnostic::same_tree_p (m_num_bytes,
713 integer_one_node))
714 {
715 /* Singular m_num_bytes. */
716 if (m_diag_arg)
717 return ev.formatted_print
718 ("read of %E byte at offset %qE exceeds %qE",
719 m_num_bytes, m_offset, m_diag_arg);
720 else
721 return ev.formatted_print
722 ("read of %E byte at offset %qE exceeds the buffer",
723 m_num_bytes, m_offset);
724 }
725 else
726 {
727 /* Plural m_num_bytes. */
728 if (m_diag_arg)
729 return ev.formatted_print
730 ("read of %E bytes at offset %qE exceeds %qE",
731 m_num_bytes, m_offset, m_diag_arg);
732 else
733 return ev.formatted_print
734 ("read of %E bytes at offset %qE exceeds the buffer",
735 m_num_bytes, m_offset);
736 }
737 }
738 else
739 {
740 /* Known offset, known symbolic size. */
741 if (m_diag_arg)
742 return ev.formatted_print
743 ("read of %qE bytes at offset %qE exceeds %qE",
744 m_num_bytes, m_offset, m_diag_arg);
745 else
746 return ev.formatted_print
747 ("read of %qE bytes at offset %qE exceeds the buffer",
748 m_num_bytes, m_offset);
749 }
750 }
751 else
752 {
753 /* Known offset, unknown size. */
754 if (m_diag_arg)
755 return ev.formatted_print ("read at offset %qE exceeds %qE",
756 m_offset, m_diag_arg);
757 else
758 return ev.formatted_print ("read at offset %qE exceeds the"
759 " buffer", m_offset);
760 }
761 }
762 /* Unknown offset. */
763 if (m_diag_arg)
764 return ev.formatted_print ("out-of-bounds read on %qE",
765 m_diag_arg);
766 return ev.formatted_print ("out-of-bounds read");
767 }
768 };
769
770 /* Check whether an access is past the end of the BASE_REG. */
771
772 void
773 region_model::check_symbolic_bounds (const region *base_reg,
774 const svalue *sym_byte_offset,
775 const svalue *num_bytes_sval,
776 const svalue *capacity,
777 enum access_direction dir,
778 region_model_context *ctxt) const
779 {
780 gcc_assert (ctxt);
781
782 const svalue *next_byte
783 = m_mgr->get_or_create_binop (num_bytes_sval->get_type (), PLUS_EXPR,
784 sym_byte_offset, num_bytes_sval);
785
786 if (eval_condition (next_byte, GT_EXPR, capacity).is_true ())
787 {
788 tree diag_arg = get_representative_tree (base_reg);
789 tree offset_tree = get_representative_tree (sym_byte_offset);
790 tree num_bytes_tree = get_representative_tree (num_bytes_sval);
791 tree capacity_tree = get_representative_tree (capacity);
792 switch (dir)
793 {
794 default:
795 gcc_unreachable ();
796 break;
797 case DIR_READ:
798 ctxt->warn (make_unique<symbolic_buffer_over_read> (base_reg,
799 diag_arg,
800 offset_tree,
801 num_bytes_tree,
802 capacity_tree));
803 break;
804 case DIR_WRITE:
805 ctxt->warn (make_unique<symbolic_buffer_overflow> (base_reg,
806 diag_arg,
807 offset_tree,
808 num_bytes_tree,
809 capacity_tree));
810 break;
811 }
812 }
813 }
814
815 static tree
816 maybe_get_integer_cst_tree (const svalue *sval)
817 {
818 tree cst_tree = sval->maybe_get_constant ();
819 if (cst_tree && TREE_CODE (cst_tree) == INTEGER_CST)
820 return cst_tree;
821
822 return NULL_TREE;
823 }
824
825 /* May complain when the access on REG is out-of-bounds. */
826
827 void
828 region_model::check_region_bounds (const region *reg,
829 enum access_direction dir,
830 region_model_context *ctxt) const
831 {
832 gcc_assert (ctxt);
833
834 /* Get the offset. */
835 region_offset reg_offset = reg->get_offset (m_mgr);
836 const region *base_reg = reg_offset.get_base_region ();
837
838 /* Bail out on symbolic regions.
839 (e.g. because the analyzer did not see previous offsets on the latter,
840 it might think that a negative access is before the buffer). */
841 if (base_reg->symbolic_p ())
842 return;
843
844 /* Find out how many bytes were accessed. */
845 const svalue *num_bytes_sval = reg->get_byte_size_sval (m_mgr);
846 tree num_bytes_tree = maybe_get_integer_cst_tree (num_bytes_sval);
847 /* Bail out if 0 bytes are accessed. */
848 if (num_bytes_tree && zerop (num_bytes_tree))
849 return;
850
851 /* Get the capacity of the buffer. */
852 const svalue *capacity = get_capacity (base_reg);
853 tree cst_capacity_tree = maybe_get_integer_cst_tree (capacity);
854
855 /* The constant offset from a pointer is represented internally as a sizetype
856 but should be interpreted as a signed value here. The statement below
857 converts the offset from bits to bytes and then to a signed integer with
858 the same precision the sizetype has on the target system.
859
860 For example, this is needed for out-of-bounds-3.c test1 to pass when
861 compiled with a 64-bit gcc build targeting 32-bit systems. */
862 byte_offset_t offset;
863 if (!reg_offset.symbolic_p ())
864 offset = wi::sext (reg_offset.get_bit_offset () >> LOG2_BITS_PER_UNIT,
865 TYPE_PRECISION (size_type_node));
866
867 /* If either the offset or the number of bytes accessed are symbolic,
868 we have to reason about symbolic values. */
869 if (reg_offset.symbolic_p () || !num_bytes_tree)
870 {
871 const svalue* byte_offset_sval;
872 if (!reg_offset.symbolic_p ())
873 {
874 tree offset_tree = wide_int_to_tree (integer_type_node, offset);
875 byte_offset_sval
876 = m_mgr->get_or_create_constant_svalue (offset_tree);
877 }
878 else
879 byte_offset_sval = reg_offset.get_symbolic_byte_offset ();
880 check_symbolic_bounds (base_reg, byte_offset_sval, num_bytes_sval,
881 capacity, dir, ctxt);
882 return;
883 }
884
885 /* Otherwise continue to check with concrete values. */
886 byte_range out (0, 0);
887 /* NUM_BYTES_TREE should always be interpreted as unsigned. */
888 byte_offset_t num_bytes_unsigned = wi::to_offset (num_bytes_tree);
889 byte_range read_bytes (offset, num_bytes_unsigned);
890 /* If read_bytes has a subset < 0, we do have an underwrite. */
891 if (read_bytes.falls_short_of_p (0, &out))
892 {
893 tree diag_arg = get_representative_tree (base_reg);
894 switch (dir)
895 {
896 default:
897 gcc_unreachable ();
898 break;
899 case DIR_READ:
900 ctxt->warn (make_unique<concrete_buffer_under_read> (reg, diag_arg,
901 out));
902 break;
903 case DIR_WRITE:
904 ctxt->warn (make_unique<concrete_buffer_underwrite> (reg, diag_arg,
905 out));
906 break;
907 }
908 }
909
910 /* For accesses past the end, we do need a concrete capacity. No need to
911 do a symbolic check here because the inequality check does not reason
912 whether constants are greater than symbolic values. */
913 if (!cst_capacity_tree)
914 return;
915
916 byte_range buffer (0, wi::to_offset (cst_capacity_tree));
917 /* If READ_BYTES exceeds BUFFER, we do have an overflow. */
918 if (read_bytes.exceeds_p (buffer, &out))
919 {
920 tree byte_bound = wide_int_to_tree (size_type_node,
921 buffer.get_next_byte_offset ());
922 tree diag_arg = get_representative_tree (base_reg);
923
924 switch (dir)
925 {
926 default:
927 gcc_unreachable ();
928 break;
929 case DIR_READ:
930 ctxt->warn (make_unique<concrete_buffer_over_read> (reg, diag_arg,
931 out, byte_bound));
932 break;
933 case DIR_WRITE:
934 ctxt->warn (make_unique<concrete_buffer_overflow> (reg, diag_arg,
935 out, byte_bound));
936 break;
937 }
938 }
939 }
940
941 } // namespace ana
942
943 #endif /* #if ENABLE_ANALYZER */