]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/analyzer/region.cc
analyzer: fix missing check for uninit of return values
[thirdparty/gcc.git] / gcc / analyzer / region.cc
1 /* Regions of memory.
2 Copyright (C) 2019-2022 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "diagnostic-core.h"
26 #include "gimple-pretty-print.h"
27 #include "function.h"
28 #include "basic-block.h"
29 #include "gimple.h"
30 #include "gimple-iterator.h"
31 #include "diagnostic-core.h"
32 #include "graphviz.h"
33 #include "options.h"
34 #include "cgraph.h"
35 #include "tree-dfa.h"
36 #include "stringpool.h"
37 #include "convert.h"
38 #include "target.h"
39 #include "fold-const.h"
40 #include "tree-pretty-print.h"
41 #include "diagnostic-color.h"
42 #include "diagnostic-metadata.h"
43 #include "tristate.h"
44 #include "bitmap.h"
45 #include "selftest.h"
46 #include "function.h"
47 #include "json.h"
48 #include "analyzer/analyzer.h"
49 #include "analyzer/analyzer-logging.h"
50 #include "ordered-hash-map.h"
51 #include "options.h"
52 #include "cgraph.h"
53 #include "cfg.h"
54 #include "digraph.h"
55 #include "analyzer/supergraph.h"
56 #include "sbitmap.h"
57 #include "analyzer/call-string.h"
58 #include "analyzer/program-point.h"
59 #include "analyzer/store.h"
60 #include "analyzer/region.h"
61 #include "analyzer/region-model.h"
62
63 #if ENABLE_ANALYZER
64
65 namespace ana {
66
67 /* class region and its various subclasses. */
68
69 /* class region. */
70
71 region::~region ()
72 {
73 delete m_cached_offset;
74 }
75
76 /* Compare REG1 and REG2 by id. */
77
78 int
79 region::cmp_ids (const region *reg1, const region *reg2)
80 {
81 return (long)reg1->get_id () - (long)reg2->get_id ();
82 }
83
84 /* Determine the base region for this region: when considering bindings
85 for this region, the base region is the ancestor which identifies
86 which cluster they should be partitioned into.
87 Regions within the same struct/union/array are in the same cluster.
88 Different decls are in different clusters. */
89
90 const region *
91 region::get_base_region () const
92 {
93 const region *iter = this;
94 while (iter)
95 {
96 switch (iter->get_kind ())
97 {
98 case RK_FIELD:
99 case RK_ELEMENT:
100 case RK_OFFSET:
101 case RK_SIZED:
102 case RK_BIT_RANGE:
103 iter = iter->get_parent_region ();
104 continue;
105 case RK_CAST:
106 iter = iter->dyn_cast_cast_region ()->get_original_region ();
107 continue;
108 default:
109 return iter;
110 }
111 }
112 return iter;
113 }
114
115 /* Return true if get_base_region() == this for this region. */
116
117 bool
118 region::base_region_p () const
119 {
120 switch (get_kind ())
121 {
122 /* Region kinds representing a descendent of a base region. */
123 case RK_FIELD:
124 case RK_ELEMENT:
125 case RK_OFFSET:
126 case RK_SIZED:
127 case RK_CAST:
128 case RK_BIT_RANGE:
129 return false;
130
131 default:
132 return true;
133 }
134 }
135
136 /* Return true if this region is ELDER or one of its descendents. */
137
138 bool
139 region::descendent_of_p (const region *elder) const
140 {
141 const region *iter = this;
142 while (iter)
143 {
144 if (iter == elder)
145 return true;
146 if (iter->get_kind () == RK_CAST)
147 iter = iter->dyn_cast_cast_region ()->get_original_region ();
148 else
149 iter = iter->get_parent_region ();
150 }
151 return false;
152 }
153
154 /* If this region is a frame_region, or a descendent of one, return it.
155 Otherwise return NULL. */
156
157 const frame_region *
158 region::maybe_get_frame_region () const
159 {
160 const region *iter = this;
161 while (iter)
162 {
163 if (const frame_region *frame_reg = iter->dyn_cast_frame_region ())
164 return frame_reg;
165 if (iter->get_kind () == RK_CAST)
166 iter = iter->dyn_cast_cast_region ()->get_original_region ();
167 else
168 iter = iter->get_parent_region ();
169 }
170 return NULL;
171 }
172
173 /* Get the memory space of this region. */
174
175 enum memory_space
176 region::get_memory_space () const
177 {
178 const region *iter = this;
179 while (iter)
180 {
181 switch (iter->get_kind ())
182 {
183 default:
184 break;
185 case RK_GLOBALS:
186 return MEMSPACE_GLOBALS;
187 case RK_CODE:
188 case RK_FUNCTION:
189 case RK_LABEL:
190 return MEMSPACE_CODE;
191 case RK_FRAME:
192 case RK_STACK:
193 case RK_ALLOCA:
194 return MEMSPACE_STACK;
195 case RK_HEAP:
196 case RK_HEAP_ALLOCATED:
197 return MEMSPACE_HEAP;
198 case RK_STRING:
199 return MEMSPACE_READONLY_DATA;
200 }
201 if (iter->get_kind () == RK_CAST)
202 iter = iter->dyn_cast_cast_region ()->get_original_region ();
203 else
204 iter = iter->get_parent_region ();
205 }
206 return MEMSPACE_UNKNOWN;
207 }
208
209 /* Subroutine for use by region_model_manager::get_or_create_initial_value.
210 Return true if this region has an initial_svalue.
211 Return false if attempting to use INIT_VAL(this_region) should give
212 the "UNINITIALIZED" poison value. */
213
214 bool
215 region::can_have_initial_svalue_p () const
216 {
217 const region *base_reg = get_base_region ();
218
219 /* Check for memory spaces that are uninitialized by default. */
220 enum memory_space mem_space = base_reg->get_memory_space ();
221 switch (mem_space)
222 {
223 default:
224 gcc_unreachable ();
225 case MEMSPACE_UNKNOWN:
226 case MEMSPACE_CODE:
227 case MEMSPACE_GLOBALS:
228 case MEMSPACE_READONLY_DATA:
229 /* Such regions have initial_svalues. */
230 return true;
231
232 case MEMSPACE_HEAP:
233 /* Heap allocations are uninitialized by default. */
234 return false;
235
236 case MEMSPACE_STACK:
237 if (tree decl = base_reg->maybe_get_decl ())
238 {
239 /* See the assertion in frame_region::get_region_for_local for the
240 tree codes we need to handle here. */
241 switch (TREE_CODE (decl))
242 {
243 default:
244 gcc_unreachable ();
245
246 case PARM_DECL:
247 /* Parameters have initial values. */
248 return true;
249
250 case VAR_DECL:
251 case RESULT_DECL:
252 /* Function locals don't have initial values. */
253 return false;
254
255 case SSA_NAME:
256 {
257 tree ssa_name = decl;
258 /* SSA names that are the default defn of a PARM_DECL
259 have initial_svalues; other SSA names don't. */
260 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
261 && SSA_NAME_VAR (ssa_name)
262 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == PARM_DECL)
263 return true;
264 else
265 return false;
266 }
267 }
268 }
269
270 /* If we have an on-stack region that isn't associated with a decl
271 or SSA name, then we have VLA/alloca, which is uninitialized. */
272 return false;
273 }
274 }
275
276 /* If this region is a decl_region, return the decl.
277 Otherwise return NULL. */
278
279 tree
280 region::maybe_get_decl () const
281 {
282 if (const decl_region *decl_reg = dyn_cast_decl_region ())
283 return decl_reg->get_decl ();
284 return NULL_TREE;
285 }
286
287 /* Get the region_offset for this region (calculating it on the
288 first call and caching it internally). */
289
290 region_offset
291 region::get_offset () const
292 {
293 if(!m_cached_offset)
294 m_cached_offset = new region_offset (calc_offset ());
295 return *m_cached_offset;
296 }
297
298 /* Base class implementation of region::get_byte_size vfunc.
299 If the size of this region (in bytes) is known statically, write it to *OUT
300 and return true.
301 Otherwise return false. */
302
303 bool
304 region::get_byte_size (byte_size_t *out) const
305 {
306 tree type = get_type ();
307
308 /* Bail out e.g. for heap-allocated regions. */
309 if (!type)
310 return false;
311
312 HOST_WIDE_INT bytes = int_size_in_bytes (type);
313 if (bytes == -1)
314 return false;
315 *out = bytes;
316 return true;
317 }
318
319 /* Base implementation of region::get_byte_size_sval vfunc. */
320
321 const svalue *
322 region::get_byte_size_sval (region_model_manager *mgr) const
323 {
324 tree type = get_type ();
325
326 /* Bail out e.g. for heap-allocated regions. */
327 if (!type)
328 return mgr->get_or_create_unknown_svalue (size_type_node);
329
330 HOST_WIDE_INT bytes = int_size_in_bytes (type);
331 if (bytes == -1)
332 return mgr->get_or_create_unknown_svalue (size_type_node);
333
334 tree byte_size = size_in_bytes (type);
335 if (TREE_TYPE (byte_size) != size_type_node)
336 byte_size = fold_build1 (NOP_EXPR, size_type_node, byte_size);
337 return mgr->get_or_create_constant_svalue (byte_size);
338 }
339
340 /* Attempt to get the size of TYPE in bits.
341 If successful, return true and write the size to *OUT.
342 Otherwise return false. */
343
344 bool
345 int_size_in_bits (const_tree type, bit_size_t *out)
346 {
347 if (INTEGRAL_TYPE_P (type))
348 {
349 *out = TYPE_PRECISION (type);
350 return true;
351 }
352
353 tree sz = TYPE_SIZE (type);
354 if (sz && tree_fits_uhwi_p (sz))
355 {
356 *out = TREE_INT_CST_LOW (sz);
357 return true;
358 }
359 else
360 return false;
361 }
362
363 /* If the size of this region (in bits) is known statically, write it to *OUT
364 and return true.
365 Otherwise return false. */
366
367 bool
368 region::get_bit_size (bit_size_t *out) const
369 {
370 tree type = get_type ();
371
372 /* Bail out e.g. for heap-allocated regions. */
373 if (!type)
374 return false;
375
376 return int_size_in_bits (type, out);
377 }
378
379 /* Get the field within RECORD_TYPE at BIT_OFFSET. */
380
381 tree
382 get_field_at_bit_offset (tree record_type, bit_offset_t bit_offset)
383 {
384 gcc_assert (TREE_CODE (record_type) == RECORD_TYPE);
385 if (bit_offset < 0)
386 return NULL;
387
388 /* Find the first field that has an offset > BIT_OFFSET,
389 then return the one preceding it.
390 Skip other trees within the chain, such as FUNCTION_DECLs. */
391 tree last_field = NULL_TREE;
392 for (tree iter = TYPE_FIELDS (record_type); iter != NULL_TREE;
393 iter = DECL_CHAIN (iter))
394 {
395 if (TREE_CODE (iter) == FIELD_DECL)
396 {
397 int iter_field_offset = int_bit_position (iter);
398 if (bit_offset < iter_field_offset)
399 return last_field;
400 last_field = iter;
401 }
402 }
403 return last_field;
404 }
405
406 /* Populate *OUT with descendent regions of type TYPE that match
407 RELATIVE_BIT_OFFSET and SIZE_IN_BITS within this region. */
408
409 void
410 region::get_subregions_for_binding (region_model_manager *mgr,
411 bit_offset_t relative_bit_offset,
412 bit_size_t size_in_bits,
413 tree type,
414 auto_vec <const region *> *out) const
415 {
416 if (get_type () == NULL_TREE || type == NULL_TREE)
417 return;
418 if (relative_bit_offset == 0
419 && types_compatible_p (get_type (), type))
420 {
421 out->safe_push (this);
422 return;
423 }
424 switch (TREE_CODE (get_type ()))
425 {
426 case ARRAY_TYPE:
427 {
428 tree element_type = TREE_TYPE (get_type ());
429 HOST_WIDE_INT hwi_byte_size = int_size_in_bytes (element_type);
430 if (hwi_byte_size > 0)
431 {
432 HOST_WIDE_INT bits_per_element
433 = hwi_byte_size << LOG2_BITS_PER_UNIT;
434 HOST_WIDE_INT element_index
435 = (relative_bit_offset.to_shwi () / bits_per_element);
436 tree element_index_cst
437 = build_int_cst (integer_type_node, element_index);
438 HOST_WIDE_INT inner_bit_offset
439 = relative_bit_offset.to_shwi () % bits_per_element;
440 const region *subregion = mgr->get_element_region
441 (this, element_type,
442 mgr->get_or_create_constant_svalue (element_index_cst));
443 subregion->get_subregions_for_binding (mgr, inner_bit_offset,
444 size_in_bits, type, out);
445 }
446 }
447 break;
448 case RECORD_TYPE:
449 {
450 /* The bit offset might be *within* one of the fields (such as
451 with nested structs).
452 So we want to find the enclosing field, adjust the offset,
453 and repeat. */
454 if (tree field = get_field_at_bit_offset (get_type (),
455 relative_bit_offset))
456 {
457 int field_bit_offset = int_bit_position (field);
458 const region *subregion = mgr->get_field_region (this, field);
459 subregion->get_subregions_for_binding
460 (mgr, relative_bit_offset - field_bit_offset,
461 size_in_bits, type, out);
462 }
463 }
464 break;
465 case UNION_TYPE:
466 {
467 for (tree field = TYPE_FIELDS (get_type ()); field != NULL_TREE;
468 field = DECL_CHAIN (field))
469 {
470 if (TREE_CODE (field) != FIELD_DECL)
471 continue;
472 const region *subregion = mgr->get_field_region (this, field);
473 subregion->get_subregions_for_binding (mgr,
474 relative_bit_offset,
475 size_in_bits,
476 type,
477 out);
478 }
479 }
480 break;
481 default:
482 /* Do nothing. */
483 break;
484 }
485 }
486
487 /* Walk from this region up to the base region within its cluster, calculating
488 the offset relative to the base region, either as an offset in bits,
489 or a symbolic offset. */
490
491 region_offset
492 region::calc_offset () const
493 {
494 const region *iter_region = this;
495 bit_offset_t accum_bit_offset = 0;
496
497 while (iter_region)
498 {
499 switch (iter_region->get_kind ())
500 {
501 case RK_FIELD:
502 case RK_ELEMENT:
503 case RK_OFFSET:
504 case RK_BIT_RANGE:
505 {
506 bit_offset_t rel_bit_offset;
507 if (!iter_region->get_relative_concrete_offset (&rel_bit_offset))
508 return region_offset::make_symbolic
509 (iter_region->get_parent_region ());
510 accum_bit_offset += rel_bit_offset;
511 iter_region = iter_region->get_parent_region ();
512 }
513 continue;
514
515 case RK_SIZED:
516 iter_region = iter_region->get_parent_region ();
517 continue;
518
519 case RK_CAST:
520 {
521 const cast_region *cast_reg
522 = as_a <const cast_region *> (iter_region);
523 iter_region = cast_reg->get_original_region ();
524 }
525 continue;
526
527 default:
528 return region_offset::make_concrete (iter_region, accum_bit_offset);
529 }
530 }
531 return region_offset::make_concrete (iter_region, accum_bit_offset);
532 }
533
534 /* Base implementation of region::get_relative_concrete_offset vfunc. */
535
536 bool
537 region::get_relative_concrete_offset (bit_offset_t *) const
538 {
539 return false;
540 }
541
542 /* Dump a description of this region to stderr. */
543
544 DEBUG_FUNCTION void
545 region::dump (bool simple) const
546 {
547 pretty_printer pp;
548 pp_format_decoder (&pp) = default_tree_printer;
549 pp_show_color (&pp) = pp_show_color (global_dc->printer);
550 pp.buffer->stream = stderr;
551 dump_to_pp (&pp, simple);
552 pp_newline (&pp);
553 pp_flush (&pp);
554 }
555
556 /* Return a new json::string describing the region. */
557
558 json::value *
559 region::to_json () const
560 {
561 label_text desc = get_desc (true);
562 json::value *reg_js = new json::string (desc.m_buffer);
563 desc.maybe_free ();
564 return reg_js;
565 }
566
567 /* Generate a description of this region. */
568
569 DEBUG_FUNCTION label_text
570 region::get_desc (bool simple) const
571 {
572 pretty_printer pp;
573 pp_format_decoder (&pp) = default_tree_printer;
574 dump_to_pp (&pp, simple);
575 return label_text::take (xstrdup (pp_formatted_text (&pp)));
576 }
577
578 /* Base implementation of region::accept vfunc.
579 Subclass implementations should chain up to this. */
580
581 void
582 region::accept (visitor *v) const
583 {
584 v->visit_region (this);
585 if (m_parent)
586 m_parent->accept (v);
587 }
588
589 /* Return true if this is a symbolic region for deferencing an
590 unknown ptr.
591 We shouldn't attempt to bind values for this region (but
592 can unbind values for other regions). */
593
594 bool
595 region::symbolic_for_unknown_ptr_p () const
596 {
597 if (const symbolic_region *sym_reg = dyn_cast_symbolic_region ())
598 if (sym_reg->get_pointer ()->get_kind () == SK_UNKNOWN)
599 return true;
600 return false;
601 }
602
603 /* Return true if this is a region for a decl with name DECL_NAME.
604 Intended for use when debugging (for assertions and conditional
605 breakpoints). */
606
607 DEBUG_FUNCTION bool
608 region::is_named_decl_p (const char *decl_name) const
609 {
610 if (tree decl = maybe_get_decl ())
611 if (DECL_NAME (decl)
612 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (decl)), decl_name))
613 return true;
614 return false;
615 }
616
617 /* region's ctor. */
618
619 region::region (complexity c, unsigned id, const region *parent, tree type)
620 : m_complexity (c), m_id (id), m_parent (parent), m_type (type),
621 m_cached_offset (NULL)
622 {
623 gcc_assert (type == NULL_TREE || TYPE_P (type));
624 }
625
626 /* Comparator for use by vec<const region *>::qsort,
627 using their IDs to order them. */
628
629 int
630 region::cmp_ptr_ptr (const void *p1, const void *p2)
631 {
632 const region * const *reg1 = (const region * const *)p1;
633 const region * const *reg2 = (const region * const *)p2;
634
635 return cmp_ids (*reg1, *reg2);
636 }
637
638 /* Determine if a pointer to this region must be non-NULL.
639
640 Generally, pointers to regions must be non-NULL, but pointers
641 to symbolic_regions might, in fact, be NULL.
642
643 This allows us to simulate functions like malloc and calloc with:
644 - only one "outcome" from each statement,
645 - the idea that the pointer is on the heap if non-NULL
646 - the possibility that the pointer could be NULL
647 - the idea that successive values returned from malloc are non-equal
648 - to be able to zero-fill for calloc. */
649
650 bool
651 region::non_null_p () const
652 {
653 switch (get_kind ())
654 {
655 default:
656 return true;
657 case RK_SYMBOLIC:
658 /* Are we within a symbolic_region? If so, it could be NULL, and we
659 have to fall back on the constraints. */
660 return false;
661 case RK_HEAP_ALLOCATED:
662 return false;
663 }
664 }
665
666 /* Return true iff this region is defined in terms of SVAL. */
667
668 bool
669 region::involves_p (const svalue *sval) const
670 {
671 if (const symbolic_region *symbolic_reg = dyn_cast_symbolic_region ())
672 {
673 if (symbolic_reg->get_pointer ()->involves_p (sval))
674 return true;
675 }
676
677 return false;
678 }
679
680 /* Comparator for trees to impose a deterministic ordering on
681 T1 and T2. */
682
683 static int
684 tree_cmp (const_tree t1, const_tree t2)
685 {
686 gcc_assert (t1);
687 gcc_assert (t2);
688
689 /* Test tree codes first. */
690 if (TREE_CODE (t1) != TREE_CODE (t2))
691 return TREE_CODE (t1) - TREE_CODE (t2);
692
693 /* From this point on, we know T1 and T2 have the same tree code. */
694
695 if (DECL_P (t1))
696 {
697 if (DECL_NAME (t1) && DECL_NAME (t2))
698 return strcmp (IDENTIFIER_POINTER (DECL_NAME (t1)),
699 IDENTIFIER_POINTER (DECL_NAME (t2)));
700 else
701 {
702 if (DECL_NAME (t1))
703 return -1;
704 else if (DECL_NAME (t2))
705 return 1;
706 else
707 return DECL_UID (t1) - DECL_UID (t2);
708 }
709 }
710
711 switch (TREE_CODE (t1))
712 {
713 case SSA_NAME:
714 {
715 if (SSA_NAME_VAR (t1) && SSA_NAME_VAR (t2))
716 {
717 int var_cmp = tree_cmp (SSA_NAME_VAR (t1), SSA_NAME_VAR (t2));
718 if (var_cmp)
719 return var_cmp;
720 return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2);
721 }
722 else
723 {
724 if (SSA_NAME_VAR (t1))
725 return -1;
726 else if (SSA_NAME_VAR (t2))
727 return 1;
728 else
729 return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2);
730 }
731 }
732 break;
733
734 case INTEGER_CST:
735 return tree_int_cst_compare (t1, t2);
736
737 case REAL_CST:
738 {
739 const real_value *rv1 = TREE_REAL_CST_PTR (t1);
740 const real_value *rv2 = TREE_REAL_CST_PTR (t2);
741 if (real_compare (UNORDERED_EXPR, rv1, rv2))
742 {
743 /* Impose an arbitrary order on NaNs relative to other NaNs
744 and to non-NaNs. */
745 if (int cmp_isnan = real_isnan (rv1) - real_isnan (rv2))
746 return cmp_isnan;
747 if (int cmp_issignaling_nan
748 = real_issignaling_nan (rv1) - real_issignaling_nan (rv2))
749 return cmp_issignaling_nan;
750 return real_isneg (rv1) - real_isneg (rv2);
751 }
752 if (real_compare (LT_EXPR, rv1, rv2))
753 return -1;
754 if (real_compare (GT_EXPR, rv1, rv2))
755 return 1;
756 return 0;
757 }
758
759 case STRING_CST:
760 return strcmp (TREE_STRING_POINTER (t1),
761 TREE_STRING_POINTER (t2));
762
763 default:
764 gcc_unreachable ();
765 break;
766 }
767
768 gcc_unreachable ();
769
770 return 0;
771 }
772
773 /* qsort comparator for trees to impose a deterministic ordering on
774 P1 and P2. */
775
776 int
777 tree_cmp (const void *p1, const void *p2)
778 {
779 const_tree t1 = *(const_tree const *)p1;
780 const_tree t2 = *(const_tree const *)p2;
781
782 return tree_cmp (t1, t2);
783 }
784
785 /* class frame_region : public space_region. */
786
787 frame_region::~frame_region ()
788 {
789 for (map_t::iterator iter = m_locals.begin ();
790 iter != m_locals.end ();
791 ++iter)
792 delete (*iter).second;
793 }
794
795 void
796 frame_region::accept (visitor *v) const
797 {
798 region::accept (v);
799 if (m_calling_frame)
800 m_calling_frame->accept (v);
801 }
802
803 /* Implementation of region::dump_to_pp vfunc for frame_region. */
804
805 void
806 frame_region::dump_to_pp (pretty_printer *pp, bool simple) const
807 {
808 if (simple)
809 pp_printf (pp, "frame: %qs@%i", function_name (m_fun), get_stack_depth ());
810 else
811 pp_printf (pp, "frame_region(%qs, index: %i, depth: %i)",
812 function_name (m_fun), m_index, get_stack_depth ());
813 }
814
815 const decl_region *
816 frame_region::get_region_for_local (region_model_manager *mgr,
817 tree expr) const
818 {
819 // TODO: could also check that VAR_DECLs are locals
820 gcc_assert (TREE_CODE (expr) == PARM_DECL
821 || TREE_CODE (expr) == VAR_DECL
822 || TREE_CODE (expr) == SSA_NAME
823 || TREE_CODE (expr) == RESULT_DECL);
824
825 /* Ideally we'd use mutable here. */
826 map_t &mutable_locals = const_cast <map_t &> (m_locals);
827
828 if (decl_region **slot = mutable_locals.get (expr))
829 return *slot;
830 decl_region *reg
831 = new decl_region (mgr->alloc_region_id (), this, expr);
832 mutable_locals.put (expr, reg);
833 return reg;
834 }
835
836 /* class globals_region : public space_region. */
837
838 /* Implementation of region::dump_to_pp vfunc for globals_region. */
839
840 void
841 globals_region::dump_to_pp (pretty_printer *pp, bool simple) const
842 {
843 if (simple)
844 pp_string (pp, "::");
845 else
846 pp_string (pp, "globals");
847 }
848
849 /* class code_region : public map_region. */
850
851 /* Implementation of region::dump_to_pp vfunc for code_region. */
852
853 void
854 code_region::dump_to_pp (pretty_printer *pp, bool simple) const
855 {
856 if (simple)
857 pp_string (pp, "code region");
858 else
859 pp_string (pp, "code_region()");
860 }
861
862 /* class function_region : public region. */
863
864 /* Implementation of region::dump_to_pp vfunc for function_region. */
865
866 void
867 function_region::dump_to_pp (pretty_printer *pp, bool simple) const
868 {
869 if (simple)
870 {
871 dump_quoted_tree (pp, m_fndecl);
872 }
873 else
874 {
875 pp_string (pp, "function_region(");
876 dump_quoted_tree (pp, m_fndecl);
877 pp_string (pp, ")");
878 }
879 }
880
881 /* class label_region : public region. */
882
883 /* Implementation of region::dump_to_pp vfunc for label_region. */
884
885 void
886 label_region::dump_to_pp (pretty_printer *pp, bool simple) const
887 {
888 if (simple)
889 {
890 dump_quoted_tree (pp, m_label);
891 }
892 else
893 {
894 pp_string (pp, "label_region(");
895 dump_quoted_tree (pp, m_label);
896 pp_string (pp, ")");
897 }
898 }
899
900 /* class stack_region : public region. */
901
902 /* Implementation of region::dump_to_pp vfunc for stack_region. */
903
904 void
905 stack_region::dump_to_pp (pretty_printer *pp, bool simple) const
906 {
907 if (simple)
908 pp_string (pp, "stack region");
909 else
910 pp_string (pp, "stack_region()");
911 }
912
913 /* class heap_region : public region. */
914
915 /* Implementation of region::dump_to_pp vfunc for heap_region. */
916
917 void
918 heap_region::dump_to_pp (pretty_printer *pp, bool simple) const
919 {
920 if (simple)
921 pp_string (pp, "heap region");
922 else
923 pp_string (pp, "heap_region()");
924 }
925
926 /* class root_region : public region. */
927
928 /* root_region's ctor. */
929
930 root_region::root_region (unsigned id)
931 : region (complexity (1, 1), id, NULL, NULL_TREE)
932 {
933 }
934
935 /* Implementation of region::dump_to_pp vfunc for root_region. */
936
937 void
938 root_region::dump_to_pp (pretty_printer *pp, bool simple) const
939 {
940 if (simple)
941 pp_string (pp, "root region");
942 else
943 pp_string (pp, "root_region()");
944 }
945
946 /* class symbolic_region : public map_region. */
947
948 /* symbolic_region's ctor. */
949
950 symbolic_region::symbolic_region (unsigned id, region *parent,
951 const svalue *sval_ptr)
952 : region (complexity::from_pair (parent, sval_ptr), id, parent,
953 TREE_TYPE (sval_ptr->get_type ())),
954 m_sval_ptr (sval_ptr)
955 {
956 }
957
958 /* Implementation of region::accept vfunc for symbolic_region. */
959
960 void
961 symbolic_region::accept (visitor *v) const
962 {
963 region::accept (v);
964 m_sval_ptr->accept (v);
965 }
966
967 /* Implementation of region::dump_to_pp vfunc for symbolic_region. */
968
969 void
970 symbolic_region::dump_to_pp (pretty_printer *pp, bool simple) const
971 {
972 if (simple)
973 {
974 pp_string (pp, "(*");
975 m_sval_ptr->dump_to_pp (pp, simple);
976 pp_string (pp, ")");
977 }
978 else
979 {
980 pp_string (pp, "symbolic_region(");
981 get_parent_region ()->dump_to_pp (pp, simple);
982 pp_string (pp, ", ");
983 print_quoted_type (pp, get_type ());
984 pp_string (pp, ", ");
985 m_sval_ptr->dump_to_pp (pp, simple);
986 pp_string (pp, ")");
987 }
988 }
989
990 /* class decl_region : public region. */
991
992 /* Implementation of region::dump_to_pp vfunc for decl_region. */
993
994 void
995 decl_region::dump_to_pp (pretty_printer *pp, bool simple) const
996 {
997 if (simple)
998 pp_printf (pp, "%E", m_decl);
999 else
1000 {
1001 pp_string (pp, "decl_region(");
1002 get_parent_region ()->dump_to_pp (pp, simple);
1003 pp_string (pp, ", ");
1004 print_quoted_type (pp, get_type ());
1005 pp_printf (pp, ", %qE)", m_decl);
1006 }
1007 }
1008
1009 /* Get the stack depth for the frame containing this decl, or 0
1010 for a global. */
1011
1012 int
1013 decl_region::get_stack_depth () const
1014 {
1015 if (get_parent_region () == NULL)
1016 return 0;
1017 if (const frame_region *frame_reg
1018 = get_parent_region ()->dyn_cast_frame_region ())
1019 return frame_reg->get_stack_depth ();
1020 return 0;
1021 }
1022
1023 /* If the underlying decl is in the global constant pool,
1024 return an svalue representing the constant value.
1025 Otherwise return NULL. */
1026
1027 const svalue *
1028 decl_region::maybe_get_constant_value (region_model_manager *mgr) const
1029 {
1030 if (TREE_CODE (m_decl) == VAR_DECL
1031 && DECL_IN_CONSTANT_POOL (m_decl)
1032 && DECL_INITIAL (m_decl)
1033 && TREE_CODE (DECL_INITIAL (m_decl)) == CONSTRUCTOR)
1034 return get_svalue_for_constructor (DECL_INITIAL (m_decl), mgr);
1035 return NULL;
1036 }
1037
1038 /* Get an svalue for CTOR, a CONSTRUCTOR for this region's decl. */
1039
1040 const svalue *
1041 decl_region::get_svalue_for_constructor (tree ctor,
1042 region_model_manager *mgr) const
1043 {
1044 gcc_assert (!TREE_CLOBBER_P (ctor));
1045
1046 /* Create a binding map, applying ctor to it, using this
1047 decl_region as the base region when building child regions
1048 for offset calculations. */
1049 binding_map map;
1050 if (!map.apply_ctor_to_region (this, ctor, mgr))
1051 return mgr->get_or_create_unknown_svalue (get_type ());
1052
1053 /* Return a compound svalue for the map we built. */
1054 return mgr->get_or_create_compound_svalue (get_type (), map);
1055 }
1056
1057 /* For use on decl_regions for global variables.
1058
1059 Get an svalue for the initial value of this region at entry to
1060 "main" (either based on DECL_INITIAL, or implicit initialization to
1061 zero.
1062
1063 Return NULL if there is a problem. */
1064
1065 const svalue *
1066 decl_region::get_svalue_for_initializer (region_model_manager *mgr) const
1067 {
1068 tree init = DECL_INITIAL (m_decl);
1069 if (!init)
1070 {
1071 /* If we have an "extern" decl then there may be an initializer in
1072 another TU. */
1073 if (DECL_EXTERNAL (m_decl))
1074 return NULL;
1075
1076 /* Implicit initialization to zero; use a compound_svalue for it.
1077 Doing so requires that we have a concrete binding for this region,
1078 which can fail if we have a region with unknown size
1079 (e.g. "extern const char arr[];"). */
1080 const binding_key *binding
1081 = binding_key::make (mgr->get_store_manager (), this);
1082 if (binding->symbolic_p ())
1083 return NULL;
1084
1085 binding_cluster c (this);
1086 c.zero_fill_region (mgr->get_store_manager (), this);
1087 return mgr->get_or_create_compound_svalue (TREE_TYPE (m_decl),
1088 c.get_map ());
1089 }
1090
1091 /* LTO can write out error_mark_node as the DECL_INITIAL for simple scalar
1092 values (to avoid writing out an extra section). */
1093 if (init == error_mark_node)
1094 return NULL;
1095
1096 if (TREE_CODE (init) == CONSTRUCTOR)
1097 return get_svalue_for_constructor (init, mgr);
1098
1099 /* Reuse the get_rvalue logic from region_model. */
1100 region_model m (mgr);
1101 return m.get_rvalue (path_var (init, 0), NULL);
1102 }
1103
1104 /* class field_region : public region. */
1105
1106 /* Implementation of region::dump_to_pp vfunc for field_region. */
1107
1108 void
1109 field_region::dump_to_pp (pretty_printer *pp, bool simple) const
1110 {
1111 if (simple)
1112 {
1113 get_parent_region ()->dump_to_pp (pp, simple);
1114 pp_string (pp, ".");
1115 pp_printf (pp, "%E", m_field);
1116 }
1117 else
1118 {
1119 pp_string (pp, "field_region(");
1120 get_parent_region ()->dump_to_pp (pp, simple);
1121 pp_string (pp, ", ");
1122 print_quoted_type (pp, get_type ());
1123 pp_printf (pp, ", %qE)", m_field);
1124 }
1125 }
1126
1127 /* Implementation of region::get_relative_concrete_offset vfunc
1128 for field_region. */
1129
1130 bool
1131 field_region::get_relative_concrete_offset (bit_offset_t *out) const
1132 {
1133 /* Compare with e.g. gimple-fold.cc's
1134 fold_nonarray_ctor_reference. */
1135 tree byte_offset = DECL_FIELD_OFFSET (m_field);
1136 if (TREE_CODE (byte_offset) != INTEGER_CST)
1137 return false;
1138 tree field_offset = DECL_FIELD_BIT_OFFSET (m_field);
1139 /* Compute bit offset of the field. */
1140 offset_int bitoffset
1141 = (wi::to_offset (field_offset)
1142 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
1143 *out = bitoffset;
1144 return true;
1145 }
1146
1147 /* class element_region : public region. */
1148
1149 /* Implementation of region::accept vfunc for element_region. */
1150
1151 void
1152 element_region::accept (visitor *v) const
1153 {
1154 region::accept (v);
1155 m_index->accept (v);
1156 }
1157
1158 /* Implementation of region::dump_to_pp vfunc for element_region. */
1159
1160 void
1161 element_region::dump_to_pp (pretty_printer *pp, bool simple) const
1162 {
1163 if (simple)
1164 {
1165 //pp_string (pp, "(");
1166 get_parent_region ()->dump_to_pp (pp, simple);
1167 pp_string (pp, "[");
1168 m_index->dump_to_pp (pp, simple);
1169 pp_string (pp, "]");
1170 //pp_string (pp, ")");
1171 }
1172 else
1173 {
1174 pp_string (pp, "element_region(");
1175 get_parent_region ()->dump_to_pp (pp, simple);
1176 pp_string (pp, ", ");
1177 print_quoted_type (pp, get_type ());
1178 pp_string (pp, ", ");
1179 m_index->dump_to_pp (pp, simple);
1180 pp_printf (pp, ")");
1181 }
1182 }
1183
1184 /* Implementation of region::get_relative_concrete_offset vfunc
1185 for element_region. */
1186
1187 bool
1188 element_region::get_relative_concrete_offset (bit_offset_t *out) const
1189 {
1190 if (tree idx_cst = m_index->maybe_get_constant ())
1191 {
1192 gcc_assert (TREE_CODE (idx_cst) == INTEGER_CST);
1193
1194 tree elem_type = get_type ();
1195 offset_int element_idx = wi::to_offset (idx_cst);
1196
1197 /* First, use int_size_in_bytes, to reject the case where we
1198 have an incomplete type, or a non-constant value. */
1199 HOST_WIDE_INT hwi_byte_size = int_size_in_bytes (elem_type);
1200 if (hwi_byte_size > 0)
1201 {
1202 offset_int element_bit_size
1203 = hwi_byte_size << LOG2_BITS_PER_UNIT;
1204 offset_int element_bit_offset
1205 = element_idx * element_bit_size;
1206 *out = element_bit_offset;
1207 return true;
1208 }
1209 }
1210 return false;
1211 }
1212
1213 /* class offset_region : public region. */
1214
1215 /* Implementation of region::accept vfunc for offset_region. */
1216
1217 void
1218 offset_region::accept (visitor *v) const
1219 {
1220 region::accept (v);
1221 m_byte_offset->accept (v);
1222 }
1223
1224 /* Implementation of region::dump_to_pp vfunc for offset_region. */
1225
1226 void
1227 offset_region::dump_to_pp (pretty_printer *pp, bool simple) const
1228 {
1229 if (simple)
1230 {
1231 //pp_string (pp, "(");
1232 get_parent_region ()->dump_to_pp (pp, simple);
1233 pp_string (pp, "+");
1234 m_byte_offset->dump_to_pp (pp, simple);
1235 //pp_string (pp, ")");
1236 }
1237 else
1238 {
1239 pp_string (pp, "offset_region(");
1240 get_parent_region ()->dump_to_pp (pp, simple);
1241 pp_string (pp, ", ");
1242 print_quoted_type (pp, get_type ());
1243 pp_string (pp, ", ");
1244 m_byte_offset->dump_to_pp (pp, simple);
1245 pp_printf (pp, ")");
1246 }
1247 }
1248
1249 /* Implementation of region::get_relative_concrete_offset vfunc
1250 for offset_region. */
1251
1252 bool
1253 offset_region::get_relative_concrete_offset (bit_offset_t *out) const
1254 {
1255 if (tree byte_offset_cst = m_byte_offset->maybe_get_constant ())
1256 {
1257 gcc_assert (TREE_CODE (byte_offset_cst) == INTEGER_CST);
1258 /* Use a signed value for the byte offset, to handle
1259 negative offsets. */
1260 HOST_WIDE_INT byte_offset
1261 = wi::to_offset (byte_offset_cst).to_shwi ();
1262 HOST_WIDE_INT bit_offset = byte_offset * BITS_PER_UNIT;
1263 *out = bit_offset;
1264 return true;
1265 }
1266 return false;
1267 }
1268
1269 /* class sized_region : public region. */
1270
1271 /* Implementation of region::accept vfunc for sized_region. */
1272
1273 void
1274 sized_region::accept (visitor *v) const
1275 {
1276 region::accept (v);
1277 m_byte_size_sval->accept (v);
1278 }
1279
1280 /* Implementation of region::dump_to_pp vfunc for sized_region. */
1281
1282 void
1283 sized_region::dump_to_pp (pretty_printer *pp, bool simple) const
1284 {
1285 if (simple)
1286 {
1287 pp_string (pp, "SIZED_REG(");
1288 get_parent_region ()->dump_to_pp (pp, simple);
1289 pp_string (pp, ", ");
1290 m_byte_size_sval->dump_to_pp (pp, simple);
1291 pp_string (pp, ")");
1292 }
1293 else
1294 {
1295 pp_string (pp, "sized_region(");
1296 get_parent_region ()->dump_to_pp (pp, simple);
1297 pp_string (pp, ", ");
1298 m_byte_size_sval->dump_to_pp (pp, simple);
1299 pp_printf (pp, ")");
1300 }
1301 }
1302
1303 /* Implementation of region::get_byte_size vfunc for sized_region. */
1304
1305 bool
1306 sized_region::get_byte_size (byte_size_t *out) const
1307 {
1308 if (tree cst = m_byte_size_sval->maybe_get_constant ())
1309 {
1310 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1311 *out = tree_to_uhwi (cst);
1312 return true;
1313 }
1314 return false;
1315 }
1316
1317 /* Implementation of region::get_bit_size vfunc for sized_region. */
1318
1319 bool
1320 sized_region::get_bit_size (bit_size_t *out) const
1321 {
1322 byte_size_t byte_size;
1323 if (!get_byte_size (&byte_size))
1324 return false;
1325 *out = byte_size * BITS_PER_UNIT;
1326 return true;
1327 }
1328
1329 /* class cast_region : public region. */
1330
1331 /* Implementation of region::accept vfunc for cast_region. */
1332
1333 void
1334 cast_region::accept (visitor *v) const
1335 {
1336 region::accept (v);
1337 m_original_region->accept (v);
1338 }
1339
1340 /* Implementation of region::dump_to_pp vfunc for cast_region. */
1341
1342 void
1343 cast_region::dump_to_pp (pretty_printer *pp, bool simple) const
1344 {
1345 if (simple)
1346 {
1347 pp_string (pp, "CAST_REG(");
1348 print_quoted_type (pp, get_type ());
1349 pp_string (pp, ", ");
1350 m_original_region->dump_to_pp (pp, simple);
1351 pp_string (pp, ")");
1352 }
1353 else
1354 {
1355 pp_string (pp, "cast_region(");
1356 m_original_region->dump_to_pp (pp, simple);
1357 pp_string (pp, ", ");
1358 print_quoted_type (pp, get_type ());
1359 pp_printf (pp, ")");
1360 }
1361 }
1362
1363 /* class heap_allocated_region : public region. */
1364
1365 /* Implementation of region::dump_to_pp vfunc for heap_allocated_region. */
1366
1367 void
1368 heap_allocated_region::dump_to_pp (pretty_printer *pp, bool simple) const
1369 {
1370 if (simple)
1371 pp_printf (pp, "HEAP_ALLOCATED_REGION(%i)", get_id ());
1372 else
1373 pp_printf (pp, "heap_allocated_region(%i)", get_id ());
1374 }
1375
1376 /* class alloca_region : public region. */
1377
1378 /* Implementation of region::dump_to_pp vfunc for alloca_region. */
1379
1380 void
1381 alloca_region::dump_to_pp (pretty_printer *pp, bool simple) const
1382 {
1383 if (simple)
1384 pp_string (pp, "ALLOCA_REGION");
1385 else
1386 pp_string (pp, "alloca_region()");
1387 }
1388
1389 /* class string_region : public region. */
1390
1391 /* Implementation of region::dump_to_pp vfunc for string_region. */
1392
1393 void
1394 string_region::dump_to_pp (pretty_printer *pp, bool simple) const
1395 {
1396 if (simple)
1397 dump_tree (pp, m_string_cst);
1398 else
1399 {
1400 pp_string (pp, "string_region(");
1401 dump_tree (pp, m_string_cst);
1402 if (!flag_dump_noaddr)
1403 {
1404 pp_string (pp, " (");
1405 pp_pointer (pp, m_string_cst);
1406 pp_string (pp, "))");
1407 }
1408 }
1409 }
1410
1411 /* class bit_range_region : public region. */
1412
1413 /* Implementation of region::dump_to_pp vfunc for bit_range_region. */
1414
1415 void
1416 bit_range_region::dump_to_pp (pretty_printer *pp, bool simple) const
1417 {
1418 if (simple)
1419 {
1420 pp_string (pp, "BIT_RANGE_REG(");
1421 get_parent_region ()->dump_to_pp (pp, simple);
1422 pp_string (pp, ", ");
1423 m_bits.dump_to_pp (pp);
1424 pp_string (pp, ")");
1425 }
1426 else
1427 {
1428 pp_string (pp, "bit_range_region(");
1429 get_parent_region ()->dump_to_pp (pp, simple);
1430 pp_string (pp, ", ");
1431 m_bits.dump_to_pp (pp);
1432 pp_printf (pp, ")");
1433 }
1434 }
1435
1436 /* Implementation of region::get_byte_size vfunc for bit_range_region. */
1437
1438 bool
1439 bit_range_region::get_byte_size (byte_size_t *out) const
1440 {
1441 if (m_bits.m_size_in_bits % BITS_PER_UNIT == 0)
1442 {
1443 *out = m_bits.m_size_in_bits / BITS_PER_UNIT;
1444 return true;
1445 }
1446 return false;
1447 }
1448
1449 /* Implementation of region::get_bit_size vfunc for bit_range_region. */
1450
1451 bool
1452 bit_range_region::get_bit_size (bit_size_t *out) const
1453 {
1454 *out = m_bits.m_size_in_bits;
1455 return true;
1456 }
1457
1458 /* Implementation of region::get_byte_size_sval vfunc for bit_range_region. */
1459
1460 const svalue *
1461 bit_range_region::get_byte_size_sval (region_model_manager *mgr) const
1462 {
1463 if (m_bits.m_size_in_bits % BITS_PER_UNIT != 0)
1464 return mgr->get_or_create_unknown_svalue (size_type_node);
1465
1466 HOST_WIDE_INT num_bytes = m_bits.m_size_in_bits.to_shwi () / BITS_PER_UNIT;
1467 return mgr->get_or_create_int_cst (size_type_node, num_bytes);
1468 }
1469
1470 /* Implementation of region::get_relative_concrete_offset vfunc for
1471 bit_range_region. */
1472
1473 bool
1474 bit_range_region::get_relative_concrete_offset (bit_offset_t *out) const
1475 {
1476 *out = m_bits.get_start_bit_offset ();
1477 return true;
1478 }
1479
1480 /* class unknown_region : public region. */
1481
1482 /* Implementation of region::dump_to_pp vfunc for unknown_region. */
1483
1484 void
1485 unknown_region::dump_to_pp (pretty_printer *pp, bool /*simple*/) const
1486 {
1487 pp_string (pp, "UNKNOWN_REGION");
1488 }
1489
1490 } // namespace ana
1491
1492 #endif /* #if ENABLE_ANALYZER */