]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/analyzer/region.cc
analyzer: fix ICE on untracked decl_regions [PR106374]
[thirdparty/gcc.git] / gcc / analyzer / region.cc
1 /* Regions of memory.
2 Copyright (C) 2019-2022 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "diagnostic-core.h"
26 #include "gimple-pretty-print.h"
27 #include "function.h"
28 #include "basic-block.h"
29 #include "gimple.h"
30 #include "gimple-iterator.h"
31 #include "diagnostic-core.h"
32 #include "graphviz.h"
33 #include "options.h"
34 #include "cgraph.h"
35 #include "tree-dfa.h"
36 #include "stringpool.h"
37 #include "convert.h"
38 #include "target.h"
39 #include "fold-const.h"
40 #include "tree-pretty-print.h"
41 #include "diagnostic-color.h"
42 #include "diagnostic-metadata.h"
43 #include "tristate.h"
44 #include "bitmap.h"
45 #include "selftest.h"
46 #include "function.h"
47 #include "json.h"
48 #include "analyzer/analyzer.h"
49 #include "analyzer/analyzer-logging.h"
50 #include "ordered-hash-map.h"
51 #include "options.h"
52 #include "cgraph.h"
53 #include "cfg.h"
54 #include "digraph.h"
55 #include "analyzer/supergraph.h"
56 #include "sbitmap.h"
57 #include "analyzer/call-string.h"
58 #include "analyzer/program-point.h"
59 #include "analyzer/store.h"
60 #include "analyzer/region.h"
61 #include "analyzer/region-model.h"
62 #include "analyzer/sm.h"
63 #include "analyzer/program-state.h"
64
65 #if ENABLE_ANALYZER
66
67 namespace ana {
68
69 /* class region and its various subclasses. */
70
71 /* class region. */
72
73 region::~region ()
74 {
75 delete m_cached_offset;
76 }
77
78 /* Compare REG1 and REG2 by id. */
79
80 int
81 region::cmp_ids (const region *reg1, const region *reg2)
82 {
83 return (long)reg1->get_id () - (long)reg2->get_id ();
84 }
85
86 /* Determine the base region for this region: when considering bindings
87 for this region, the base region is the ancestor which identifies
88 which cluster they should be partitioned into.
89 Regions within the same struct/union/array are in the same cluster.
90 Different decls are in different clusters. */
91
92 const region *
93 region::get_base_region () const
94 {
95 const region *iter = this;
96 while (iter)
97 {
98 switch (iter->get_kind ())
99 {
100 case RK_FIELD:
101 case RK_ELEMENT:
102 case RK_OFFSET:
103 case RK_SIZED:
104 case RK_BIT_RANGE:
105 iter = iter->get_parent_region ();
106 continue;
107 case RK_CAST:
108 iter = iter->dyn_cast_cast_region ()->get_original_region ();
109 continue;
110 default:
111 return iter;
112 }
113 }
114 return iter;
115 }
116
117 /* Return true if get_base_region() == this for this region. */
118
119 bool
120 region::base_region_p () const
121 {
122 switch (get_kind ())
123 {
124 /* Region kinds representing a descendent of a base region. */
125 case RK_FIELD:
126 case RK_ELEMENT:
127 case RK_OFFSET:
128 case RK_SIZED:
129 case RK_CAST:
130 case RK_BIT_RANGE:
131 return false;
132
133 default:
134 return true;
135 }
136 }
137
138 /* Return true if this region is ELDER or one of its descendents. */
139
140 bool
141 region::descendent_of_p (const region *elder) const
142 {
143 const region *iter = this;
144 while (iter)
145 {
146 if (iter == elder)
147 return true;
148 if (iter->get_kind () == RK_CAST)
149 iter = iter->dyn_cast_cast_region ()->get_original_region ();
150 else
151 iter = iter->get_parent_region ();
152 }
153 return false;
154 }
155
156 /* If this region is a frame_region, or a descendent of one, return it.
157 Otherwise return NULL. */
158
159 const frame_region *
160 region::maybe_get_frame_region () const
161 {
162 const region *iter = this;
163 while (iter)
164 {
165 if (const frame_region *frame_reg = iter->dyn_cast_frame_region ())
166 return frame_reg;
167 if (iter->get_kind () == RK_CAST)
168 iter = iter->dyn_cast_cast_region ()->get_original_region ();
169 else
170 iter = iter->get_parent_region ();
171 }
172 return NULL;
173 }
174
175 /* Get the memory space of this region. */
176
177 enum memory_space
178 region::get_memory_space () const
179 {
180 const region *iter = this;
181 while (iter)
182 {
183 switch (iter->get_kind ())
184 {
185 default:
186 break;
187 case RK_GLOBALS:
188 return MEMSPACE_GLOBALS;
189 case RK_CODE:
190 case RK_FUNCTION:
191 case RK_LABEL:
192 return MEMSPACE_CODE;
193 case RK_FRAME:
194 case RK_STACK:
195 case RK_ALLOCA:
196 return MEMSPACE_STACK;
197 case RK_HEAP:
198 case RK_HEAP_ALLOCATED:
199 return MEMSPACE_HEAP;
200 case RK_STRING:
201 return MEMSPACE_READONLY_DATA;
202 }
203 if (iter->get_kind () == RK_CAST)
204 iter = iter->dyn_cast_cast_region ()->get_original_region ();
205 else
206 iter = iter->get_parent_region ();
207 }
208 return MEMSPACE_UNKNOWN;
209 }
210
211 /* Subroutine for use by region_model_manager::get_or_create_initial_value.
212 Return true if this region has an initial_svalue.
213 Return false if attempting to use INIT_VAL(this_region) should give
214 the "UNINITIALIZED" poison value. */
215
216 bool
217 region::can_have_initial_svalue_p () const
218 {
219 const region *base_reg = get_base_region ();
220
221 /* Check for memory spaces that are uninitialized by default. */
222 enum memory_space mem_space = base_reg->get_memory_space ();
223 switch (mem_space)
224 {
225 default:
226 gcc_unreachable ();
227 case MEMSPACE_UNKNOWN:
228 case MEMSPACE_CODE:
229 case MEMSPACE_GLOBALS:
230 case MEMSPACE_READONLY_DATA:
231 /* Such regions have initial_svalues. */
232 return true;
233
234 case MEMSPACE_HEAP:
235 /* Heap allocations are uninitialized by default. */
236 return false;
237
238 case MEMSPACE_STACK:
239 if (tree decl = base_reg->maybe_get_decl ())
240 {
241 /* See the assertion in frame_region::get_region_for_local for the
242 tree codes we need to handle here. */
243 switch (TREE_CODE (decl))
244 {
245 default:
246 gcc_unreachable ();
247
248 case PARM_DECL:
249 /* Parameters have initial values. */
250 return true;
251
252 case VAR_DECL:
253 case RESULT_DECL:
254 /* Function locals don't have initial values. */
255 return false;
256
257 case SSA_NAME:
258 {
259 tree ssa_name = decl;
260 /* SSA names that are the default defn of a PARM_DECL
261 have initial_svalues; other SSA names don't. */
262 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
263 && SSA_NAME_VAR (ssa_name)
264 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == PARM_DECL)
265 return true;
266 else
267 return false;
268 }
269 }
270 }
271
272 /* If we have an on-stack region that isn't associated with a decl
273 or SSA name, then we have VLA/alloca, which is uninitialized. */
274 return false;
275 }
276 }
277
278 /* If this region is a decl_region, return the decl.
279 Otherwise return NULL. */
280
281 tree
282 region::maybe_get_decl () const
283 {
284 if (const decl_region *decl_reg = dyn_cast_decl_region ())
285 return decl_reg->get_decl ();
286 return NULL_TREE;
287 }
288
289 /* Get the region_offset for this region (calculating it on the
290 first call and caching it internally). */
291
292 region_offset
293 region::get_offset () const
294 {
295 if(!m_cached_offset)
296 m_cached_offset = new region_offset (calc_offset ());
297 return *m_cached_offset;
298 }
299
300 /* Base class implementation of region::get_byte_size vfunc.
301 If the size of this region (in bytes) is known statically, write it to *OUT
302 and return true.
303 Otherwise return false. */
304
305 bool
306 region::get_byte_size (byte_size_t *out) const
307 {
308 tree type = get_type ();
309
310 /* Bail out e.g. for heap-allocated regions. */
311 if (!type)
312 return false;
313
314 HOST_WIDE_INT bytes = int_size_in_bytes (type);
315 if (bytes == -1)
316 return false;
317 *out = bytes;
318 return true;
319 }
320
321 /* Base implementation of region::get_byte_size_sval vfunc. */
322
323 const svalue *
324 region::get_byte_size_sval (region_model_manager *mgr) const
325 {
326 tree type = get_type ();
327
328 /* Bail out e.g. for heap-allocated regions. */
329 if (!type)
330 return mgr->get_or_create_unknown_svalue (size_type_node);
331
332 HOST_WIDE_INT bytes = int_size_in_bytes (type);
333 if (bytes == -1)
334 return mgr->get_or_create_unknown_svalue (size_type_node);
335
336 tree byte_size = size_in_bytes (type);
337 if (TREE_TYPE (byte_size) != size_type_node)
338 byte_size = fold_build1 (NOP_EXPR, size_type_node, byte_size);
339 return mgr->get_or_create_constant_svalue (byte_size);
340 }
341
342 /* Attempt to get the size of TYPE in bits.
343 If successful, return true and write the size to *OUT.
344 Otherwise return false. */
345
346 bool
347 int_size_in_bits (const_tree type, bit_size_t *out)
348 {
349 if (INTEGRAL_TYPE_P (type))
350 {
351 *out = TYPE_PRECISION (type);
352 return true;
353 }
354
355 tree sz = TYPE_SIZE (type);
356 if (sz && tree_fits_uhwi_p (sz))
357 {
358 *out = TREE_INT_CST_LOW (sz);
359 return true;
360 }
361 else
362 return false;
363 }
364
365 /* If the size of this region (in bits) is known statically, write it to *OUT
366 and return true.
367 Otherwise return false. */
368
369 bool
370 region::get_bit_size (bit_size_t *out) const
371 {
372 tree type = get_type ();
373
374 /* Bail out e.g. for heap-allocated regions. */
375 if (!type)
376 return false;
377
378 return int_size_in_bits (type, out);
379 }
380
381 /* Get the field within RECORD_TYPE at BIT_OFFSET. */
382
383 tree
384 get_field_at_bit_offset (tree record_type, bit_offset_t bit_offset)
385 {
386 gcc_assert (TREE_CODE (record_type) == RECORD_TYPE);
387 if (bit_offset < 0)
388 return NULL;
389
390 /* Find the first field that has an offset > BIT_OFFSET,
391 then return the one preceding it.
392 Skip other trees within the chain, such as FUNCTION_DECLs. */
393 tree last_field = NULL_TREE;
394 for (tree iter = TYPE_FIELDS (record_type); iter != NULL_TREE;
395 iter = DECL_CHAIN (iter))
396 {
397 if (TREE_CODE (iter) == FIELD_DECL)
398 {
399 int iter_field_offset = int_bit_position (iter);
400 if (bit_offset < iter_field_offset)
401 return last_field;
402 last_field = iter;
403 }
404 }
405 return last_field;
406 }
407
408 /* Populate *OUT with descendent regions of type TYPE that match
409 RELATIVE_BIT_OFFSET and SIZE_IN_BITS within this region. */
410
411 void
412 region::get_subregions_for_binding (region_model_manager *mgr,
413 bit_offset_t relative_bit_offset,
414 bit_size_t size_in_bits,
415 tree type,
416 auto_vec <const region *> *out) const
417 {
418 if (get_type () == NULL_TREE || type == NULL_TREE)
419 return;
420 if (relative_bit_offset == 0
421 && types_compatible_p (get_type (), type))
422 {
423 out->safe_push (this);
424 return;
425 }
426 switch (TREE_CODE (get_type ()))
427 {
428 case ARRAY_TYPE:
429 {
430 tree element_type = TREE_TYPE (get_type ());
431 HOST_WIDE_INT hwi_byte_size = int_size_in_bytes (element_type);
432 if (hwi_byte_size > 0)
433 {
434 HOST_WIDE_INT bits_per_element
435 = hwi_byte_size << LOG2_BITS_PER_UNIT;
436 HOST_WIDE_INT element_index
437 = (relative_bit_offset.to_shwi () / bits_per_element);
438 tree element_index_cst
439 = build_int_cst (integer_type_node, element_index);
440 HOST_WIDE_INT inner_bit_offset
441 = relative_bit_offset.to_shwi () % bits_per_element;
442 const region *subregion = mgr->get_element_region
443 (this, element_type,
444 mgr->get_or_create_constant_svalue (element_index_cst));
445 subregion->get_subregions_for_binding (mgr, inner_bit_offset,
446 size_in_bits, type, out);
447 }
448 }
449 break;
450 case RECORD_TYPE:
451 {
452 /* The bit offset might be *within* one of the fields (such as
453 with nested structs).
454 So we want to find the enclosing field, adjust the offset,
455 and repeat. */
456 if (tree field = get_field_at_bit_offset (get_type (),
457 relative_bit_offset))
458 {
459 int field_bit_offset = int_bit_position (field);
460 const region *subregion = mgr->get_field_region (this, field);
461 subregion->get_subregions_for_binding
462 (mgr, relative_bit_offset - field_bit_offset,
463 size_in_bits, type, out);
464 }
465 }
466 break;
467 case UNION_TYPE:
468 {
469 for (tree field = TYPE_FIELDS (get_type ()); field != NULL_TREE;
470 field = DECL_CHAIN (field))
471 {
472 if (TREE_CODE (field) != FIELD_DECL)
473 continue;
474 const region *subregion = mgr->get_field_region (this, field);
475 subregion->get_subregions_for_binding (mgr,
476 relative_bit_offset,
477 size_in_bits,
478 type,
479 out);
480 }
481 }
482 break;
483 default:
484 /* Do nothing. */
485 break;
486 }
487 }
488
489 /* Walk from this region up to the base region within its cluster, calculating
490 the offset relative to the base region, either as an offset in bits,
491 or a symbolic offset. */
492
493 region_offset
494 region::calc_offset () const
495 {
496 const region *iter_region = this;
497 bit_offset_t accum_bit_offset = 0;
498
499 while (iter_region)
500 {
501 switch (iter_region->get_kind ())
502 {
503 case RK_FIELD:
504 case RK_ELEMENT:
505 case RK_OFFSET:
506 case RK_BIT_RANGE:
507 {
508 bit_offset_t rel_bit_offset;
509 if (!iter_region->get_relative_concrete_offset (&rel_bit_offset))
510 return region_offset::make_symbolic
511 (iter_region->get_parent_region ());
512 accum_bit_offset += rel_bit_offset;
513 iter_region = iter_region->get_parent_region ();
514 }
515 continue;
516
517 case RK_SIZED:
518 iter_region = iter_region->get_parent_region ();
519 continue;
520
521 case RK_CAST:
522 {
523 const cast_region *cast_reg
524 = as_a <const cast_region *> (iter_region);
525 iter_region = cast_reg->get_original_region ();
526 }
527 continue;
528
529 default:
530 return region_offset::make_concrete (iter_region, accum_bit_offset);
531 }
532 }
533 return region_offset::make_concrete (iter_region, accum_bit_offset);
534 }
535
536 /* Base implementation of region::get_relative_concrete_offset vfunc. */
537
538 bool
539 region::get_relative_concrete_offset (bit_offset_t *) const
540 {
541 return false;
542 }
543
544 /* Attempt to get the position and size of this region expressed as a
545 concrete range of bytes relative to its parent.
546 If successful, return true and write to *OUT.
547 Otherwise return false. */
548
549 bool
550 region::get_relative_concrete_byte_range (byte_range *out) const
551 {
552 /* We must have a concrete offset relative to the parent. */
553 bit_offset_t rel_bit_offset;
554 if (!get_relative_concrete_offset (&rel_bit_offset))
555 return false;
556 /* ...which must be a whole number of bytes. */
557 if (rel_bit_offset % BITS_PER_UNIT != 0)
558 return false;
559 byte_offset_t start_byte_offset = rel_bit_offset / BITS_PER_UNIT;
560
561 /* We must have a concrete size, which must be a whole number
562 of bytes. */
563 byte_size_t num_bytes;
564 if (!get_byte_size (&num_bytes))
565 return false;
566
567 /* Success. */
568 *out = byte_range (start_byte_offset, num_bytes);
569 return true;
570 }
571
572 /* Dump a description of this region to stderr. */
573
574 DEBUG_FUNCTION void
575 region::dump (bool simple) const
576 {
577 pretty_printer pp;
578 pp_format_decoder (&pp) = default_tree_printer;
579 pp_show_color (&pp) = pp_show_color (global_dc->printer);
580 pp.buffer->stream = stderr;
581 dump_to_pp (&pp, simple);
582 pp_newline (&pp);
583 pp_flush (&pp);
584 }
585
586 /* Return a new json::string describing the region. */
587
588 json::value *
589 region::to_json () const
590 {
591 label_text desc = get_desc (true);
592 json::value *reg_js = new json::string (desc.get ());
593 return reg_js;
594 }
595
596 /* Generate a description of this region. */
597
598 DEBUG_FUNCTION label_text
599 region::get_desc (bool simple) const
600 {
601 pretty_printer pp;
602 pp_format_decoder (&pp) = default_tree_printer;
603 dump_to_pp (&pp, simple);
604 return label_text::take (xstrdup (pp_formatted_text (&pp)));
605 }
606
607 /* Base implementation of region::accept vfunc.
608 Subclass implementations should chain up to this. */
609
610 void
611 region::accept (visitor *v) const
612 {
613 v->visit_region (this);
614 if (m_parent)
615 m_parent->accept (v);
616 }
617
618 /* Return true if this is a symbolic region for deferencing an
619 unknown ptr.
620 We shouldn't attempt to bind values for this region (but
621 can unbind values for other regions). */
622
623 bool
624 region::symbolic_for_unknown_ptr_p () const
625 {
626 if (const symbolic_region *sym_reg = dyn_cast_symbolic_region ())
627 if (sym_reg->get_pointer ()->get_kind () == SK_UNKNOWN)
628 return true;
629 return false;
630 }
631
632 /* Return true if this is a region for a decl with name DECL_NAME.
633 Intended for use when debugging (for assertions and conditional
634 breakpoints). */
635
636 DEBUG_FUNCTION bool
637 region::is_named_decl_p (const char *decl_name) const
638 {
639 if (tree decl = maybe_get_decl ())
640 if (DECL_NAME (decl)
641 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (decl)), decl_name))
642 return true;
643 return false;
644 }
645
646 /* region's ctor. */
647
648 region::region (complexity c, unsigned id, const region *parent, tree type)
649 : m_complexity (c), m_id (id), m_parent (parent), m_type (type),
650 m_cached_offset (NULL)
651 {
652 gcc_assert (type == NULL_TREE || TYPE_P (type));
653 }
654
655 /* Comparator for use by vec<const region *>::qsort,
656 using their IDs to order them. */
657
658 int
659 region::cmp_ptr_ptr (const void *p1, const void *p2)
660 {
661 const region * const *reg1 = (const region * const *)p1;
662 const region * const *reg2 = (const region * const *)p2;
663
664 return cmp_ids (*reg1, *reg2);
665 }
666
667 /* Determine if a pointer to this region must be non-NULL.
668
669 Generally, pointers to regions must be non-NULL, but pointers
670 to symbolic_regions might, in fact, be NULL.
671
672 This allows us to simulate functions like malloc and calloc with:
673 - only one "outcome" from each statement,
674 - the idea that the pointer is on the heap if non-NULL
675 - the possibility that the pointer could be NULL
676 - the idea that successive values returned from malloc are non-equal
677 - to be able to zero-fill for calloc. */
678
679 bool
680 region::non_null_p () const
681 {
682 switch (get_kind ())
683 {
684 default:
685 return true;
686 case RK_SYMBOLIC:
687 /* Are we within a symbolic_region? If so, it could be NULL, and we
688 have to fall back on the constraints. */
689 return false;
690 case RK_HEAP_ALLOCATED:
691 return false;
692 }
693 }
694
695 /* Return true iff this region is defined in terms of SVAL. */
696
697 bool
698 region::involves_p (const svalue *sval) const
699 {
700 if (const symbolic_region *symbolic_reg = dyn_cast_symbolic_region ())
701 {
702 if (symbolic_reg->get_pointer ()->involves_p (sval))
703 return true;
704 }
705
706 return false;
707 }
708
709 /* Comparator for trees to impose a deterministic ordering on
710 T1 and T2. */
711
712 static int
713 tree_cmp (const_tree t1, const_tree t2)
714 {
715 gcc_assert (t1);
716 gcc_assert (t2);
717
718 /* Test tree codes first. */
719 if (TREE_CODE (t1) != TREE_CODE (t2))
720 return TREE_CODE (t1) - TREE_CODE (t2);
721
722 /* From this point on, we know T1 and T2 have the same tree code. */
723
724 if (DECL_P (t1))
725 {
726 if (DECL_NAME (t1) && DECL_NAME (t2))
727 return strcmp (IDENTIFIER_POINTER (DECL_NAME (t1)),
728 IDENTIFIER_POINTER (DECL_NAME (t2)));
729 else
730 {
731 if (DECL_NAME (t1))
732 return -1;
733 else if (DECL_NAME (t2))
734 return 1;
735 else
736 return DECL_UID (t1) - DECL_UID (t2);
737 }
738 }
739
740 switch (TREE_CODE (t1))
741 {
742 case SSA_NAME:
743 {
744 if (SSA_NAME_VAR (t1) && SSA_NAME_VAR (t2))
745 {
746 int var_cmp = tree_cmp (SSA_NAME_VAR (t1), SSA_NAME_VAR (t2));
747 if (var_cmp)
748 return var_cmp;
749 return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2);
750 }
751 else
752 {
753 if (SSA_NAME_VAR (t1))
754 return -1;
755 else if (SSA_NAME_VAR (t2))
756 return 1;
757 else
758 return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2);
759 }
760 }
761 break;
762
763 case INTEGER_CST:
764 return tree_int_cst_compare (t1, t2);
765
766 case REAL_CST:
767 {
768 const real_value *rv1 = TREE_REAL_CST_PTR (t1);
769 const real_value *rv2 = TREE_REAL_CST_PTR (t2);
770 if (real_compare (UNORDERED_EXPR, rv1, rv2))
771 {
772 /* Impose an arbitrary order on NaNs relative to other NaNs
773 and to non-NaNs. */
774 if (int cmp_isnan = real_isnan (rv1) - real_isnan (rv2))
775 return cmp_isnan;
776 if (int cmp_issignaling_nan
777 = real_issignaling_nan (rv1) - real_issignaling_nan (rv2))
778 return cmp_issignaling_nan;
779 return real_isneg (rv1) - real_isneg (rv2);
780 }
781 if (real_compare (LT_EXPR, rv1, rv2))
782 return -1;
783 if (real_compare (GT_EXPR, rv1, rv2))
784 return 1;
785 return 0;
786 }
787
788 case STRING_CST:
789 return strcmp (TREE_STRING_POINTER (t1),
790 TREE_STRING_POINTER (t2));
791
792 default:
793 gcc_unreachable ();
794 break;
795 }
796
797 gcc_unreachable ();
798
799 return 0;
800 }
801
802 /* qsort comparator for trees to impose a deterministic ordering on
803 P1 and P2. */
804
805 int
806 tree_cmp (const void *p1, const void *p2)
807 {
808 const_tree t1 = *(const_tree const *)p1;
809 const_tree t2 = *(const_tree const *)p2;
810
811 return tree_cmp (t1, t2);
812 }
813
814 /* class frame_region : public space_region. */
815
816 frame_region::~frame_region ()
817 {
818 for (map_t::iterator iter = m_locals.begin ();
819 iter != m_locals.end ();
820 ++iter)
821 delete (*iter).second;
822 }
823
824 void
825 frame_region::accept (visitor *v) const
826 {
827 region::accept (v);
828 if (m_calling_frame)
829 m_calling_frame->accept (v);
830 }
831
832 /* Implementation of region::dump_to_pp vfunc for frame_region. */
833
834 void
835 frame_region::dump_to_pp (pretty_printer *pp, bool simple) const
836 {
837 if (simple)
838 pp_printf (pp, "frame: %qs@%i", function_name (m_fun), get_stack_depth ());
839 else
840 pp_printf (pp, "frame_region(%qs, index: %i, depth: %i)",
841 function_name (m_fun), m_index, get_stack_depth ());
842 }
843
844 const decl_region *
845 frame_region::get_region_for_local (region_model_manager *mgr,
846 tree expr,
847 const region_model_context *ctxt) const
848 {
849 if (CHECKING_P)
850 {
851 /* Verify that EXPR is a local or SSA name, and that it's for the
852 correct function for this stack frame. */
853 gcc_assert (TREE_CODE (expr) == PARM_DECL
854 || TREE_CODE (expr) == VAR_DECL
855 || TREE_CODE (expr) == SSA_NAME
856 || TREE_CODE (expr) == RESULT_DECL);
857 switch (TREE_CODE (expr))
858 {
859 default:
860 gcc_unreachable ();
861 case VAR_DECL:
862 gcc_assert (!is_global_var (expr));
863 /* Fall through. */
864 case PARM_DECL:
865 case RESULT_DECL:
866 gcc_assert (DECL_CONTEXT (expr) == m_fun->decl);
867 break;
868 case SSA_NAME:
869 {
870 if (tree var = SSA_NAME_VAR (expr))
871 {
872 if (DECL_P (var))
873 gcc_assert (DECL_CONTEXT (var) == m_fun->decl);
874 }
875 else if (ctxt)
876 if (const extrinsic_state *ext_state = ctxt->get_ext_state ())
877 if (const supergraph *sg
878 = ext_state->get_engine ()->get_supergraph ())
879 {
880 const gimple *def_stmt = SSA_NAME_DEF_STMT (expr);
881 const supernode *snode
882 = sg->get_supernode_for_stmt (def_stmt);
883 gcc_assert (snode->get_function () == m_fun);
884 }
885 }
886 break;
887 }
888 }
889
890 /* Ideally we'd use mutable here. */
891 map_t &mutable_locals = const_cast <map_t &> (m_locals);
892
893 if (decl_region **slot = mutable_locals.get (expr))
894 return *slot;
895 decl_region *reg
896 = new decl_region (mgr->alloc_region_id (), this, expr);
897 mutable_locals.put (expr, reg);
898 return reg;
899 }
900
901 /* class globals_region : public space_region. */
902
903 /* Implementation of region::dump_to_pp vfunc for globals_region. */
904
905 void
906 globals_region::dump_to_pp (pretty_printer *pp, bool simple) const
907 {
908 if (simple)
909 pp_string (pp, "::");
910 else
911 pp_string (pp, "globals");
912 }
913
914 /* class code_region : public map_region. */
915
916 /* Implementation of region::dump_to_pp vfunc for code_region. */
917
918 void
919 code_region::dump_to_pp (pretty_printer *pp, bool simple) const
920 {
921 if (simple)
922 pp_string (pp, "code region");
923 else
924 pp_string (pp, "code_region()");
925 }
926
927 /* class function_region : public region. */
928
929 /* Implementation of region::dump_to_pp vfunc for function_region. */
930
931 void
932 function_region::dump_to_pp (pretty_printer *pp, bool simple) const
933 {
934 if (simple)
935 {
936 dump_quoted_tree (pp, m_fndecl);
937 }
938 else
939 {
940 pp_string (pp, "function_region(");
941 dump_quoted_tree (pp, m_fndecl);
942 pp_string (pp, ")");
943 }
944 }
945
946 /* class label_region : public region. */
947
948 /* Implementation of region::dump_to_pp vfunc for label_region. */
949
950 void
951 label_region::dump_to_pp (pretty_printer *pp, bool simple) const
952 {
953 if (simple)
954 {
955 dump_quoted_tree (pp, m_label);
956 }
957 else
958 {
959 pp_string (pp, "label_region(");
960 dump_quoted_tree (pp, m_label);
961 pp_string (pp, ")");
962 }
963 }
964
965 /* class stack_region : public region. */
966
967 /* Implementation of region::dump_to_pp vfunc for stack_region. */
968
969 void
970 stack_region::dump_to_pp (pretty_printer *pp, bool simple) const
971 {
972 if (simple)
973 pp_string (pp, "stack region");
974 else
975 pp_string (pp, "stack_region()");
976 }
977
978 /* class heap_region : public region. */
979
980 /* Implementation of region::dump_to_pp vfunc for heap_region. */
981
982 void
983 heap_region::dump_to_pp (pretty_printer *pp, bool simple) const
984 {
985 if (simple)
986 pp_string (pp, "heap region");
987 else
988 pp_string (pp, "heap_region()");
989 }
990
991 /* class root_region : public region. */
992
993 /* root_region's ctor. */
994
995 root_region::root_region (unsigned id)
996 : region (complexity (1, 1), id, NULL, NULL_TREE)
997 {
998 }
999
1000 /* Implementation of region::dump_to_pp vfunc for root_region. */
1001
1002 void
1003 root_region::dump_to_pp (pretty_printer *pp, bool simple) const
1004 {
1005 if (simple)
1006 pp_string (pp, "root region");
1007 else
1008 pp_string (pp, "root_region()");
1009 }
1010
1011 /* class symbolic_region : public map_region. */
1012
1013 /* symbolic_region's ctor. */
1014
1015 symbolic_region::symbolic_region (unsigned id, region *parent,
1016 const svalue *sval_ptr)
1017 : region (complexity::from_pair (parent, sval_ptr), id, parent,
1018 (sval_ptr->get_type ()
1019 ? TREE_TYPE (sval_ptr->get_type ())
1020 : NULL_TREE)),
1021 m_sval_ptr (sval_ptr)
1022 {
1023 }
1024
1025 /* Implementation of region::accept vfunc for symbolic_region. */
1026
1027 void
1028 symbolic_region::accept (visitor *v) const
1029 {
1030 region::accept (v);
1031 m_sval_ptr->accept (v);
1032 }
1033
1034 /* Implementation of region::dump_to_pp vfunc for symbolic_region. */
1035
1036 void
1037 symbolic_region::dump_to_pp (pretty_printer *pp, bool simple) const
1038 {
1039 if (simple)
1040 {
1041 pp_string (pp, "(*");
1042 m_sval_ptr->dump_to_pp (pp, simple);
1043 pp_string (pp, ")");
1044 }
1045 else
1046 {
1047 pp_string (pp, "symbolic_region(");
1048 get_parent_region ()->dump_to_pp (pp, simple);
1049 if (get_type ())
1050 {
1051 pp_string (pp, ", ");
1052 print_quoted_type (pp, get_type ());
1053 }
1054 pp_string (pp, ", ");
1055 m_sval_ptr->dump_to_pp (pp, simple);
1056 pp_string (pp, ")");
1057 }
1058 }
1059
1060 /* class decl_region : public region. */
1061
1062 /* Implementation of region::dump_to_pp vfunc for decl_region. */
1063
1064 void
1065 decl_region::dump_to_pp (pretty_printer *pp, bool simple) const
1066 {
1067 if (simple)
1068 pp_printf (pp, "%E", m_decl);
1069 else
1070 {
1071 pp_string (pp, "decl_region(");
1072 get_parent_region ()->dump_to_pp (pp, simple);
1073 pp_string (pp, ", ");
1074 print_quoted_type (pp, get_type ());
1075 pp_printf (pp, ", %qE)", m_decl);
1076 }
1077 }
1078
1079 /* Get the stack depth for the frame containing this decl, or 0
1080 for a global. */
1081
1082 int
1083 decl_region::get_stack_depth () const
1084 {
1085 if (get_parent_region () == NULL)
1086 return 0;
1087 if (const frame_region *frame_reg
1088 = get_parent_region ()->dyn_cast_frame_region ())
1089 return frame_reg->get_stack_depth ();
1090 return 0;
1091 }
1092
1093 /* If the underlying decl is in the global constant pool,
1094 return an svalue representing the constant value.
1095 Otherwise return NULL. */
1096
1097 const svalue *
1098 decl_region::maybe_get_constant_value (region_model_manager *mgr) const
1099 {
1100 if (TREE_CODE (m_decl) == VAR_DECL
1101 && DECL_IN_CONSTANT_POOL (m_decl)
1102 && DECL_INITIAL (m_decl)
1103 && TREE_CODE (DECL_INITIAL (m_decl)) == CONSTRUCTOR)
1104 return get_svalue_for_constructor (DECL_INITIAL (m_decl), mgr);
1105 return NULL;
1106 }
1107
1108 /* Get an svalue for CTOR, a CONSTRUCTOR for this region's decl. */
1109
1110 const svalue *
1111 decl_region::get_svalue_for_constructor (tree ctor,
1112 region_model_manager *mgr) const
1113 {
1114 gcc_assert (!TREE_CLOBBER_P (ctor));
1115
1116 /* Create a binding map, applying ctor to it, using this
1117 decl_region as the base region when building child regions
1118 for offset calculations. */
1119 binding_map map;
1120 if (!map.apply_ctor_to_region (this, ctor, mgr))
1121 return mgr->get_or_create_unknown_svalue (get_type ());
1122
1123 /* Return a compound svalue for the map we built. */
1124 return mgr->get_or_create_compound_svalue (get_type (), map);
1125 }
1126
1127 /* For use on decl_regions for global variables.
1128
1129 Get an svalue for the initial value of this region at entry to
1130 "main" (either based on DECL_INITIAL, or implicit initialization to
1131 zero.
1132
1133 Return NULL if there is a problem. */
1134
1135 const svalue *
1136 decl_region::get_svalue_for_initializer (region_model_manager *mgr) const
1137 {
1138 tree init = DECL_INITIAL (m_decl);
1139 if (!init)
1140 {
1141 /* If we have an "extern" decl then there may be an initializer in
1142 another TU. */
1143 if (DECL_EXTERNAL (m_decl))
1144 return NULL;
1145
1146 /* Implicit initialization to zero; use a compound_svalue for it.
1147 Doing so requires that we have a concrete binding for this region,
1148 which can fail if we have a region with unknown size
1149 (e.g. "extern const char arr[];"). */
1150 const binding_key *binding
1151 = binding_key::make (mgr->get_store_manager (), this);
1152 if (binding->symbolic_p ())
1153 return NULL;
1154
1155 /* If we don't care about tracking the content of this region, then
1156 it's unused, and the value doesn't matter. */
1157 if (!tracked_p ())
1158 return NULL;
1159
1160 binding_cluster c (this);
1161 c.zero_fill_region (mgr->get_store_manager (), this);
1162 return mgr->get_or_create_compound_svalue (TREE_TYPE (m_decl),
1163 c.get_map ());
1164 }
1165
1166 /* LTO can write out error_mark_node as the DECL_INITIAL for simple scalar
1167 values (to avoid writing out an extra section). */
1168 if (init == error_mark_node)
1169 return NULL;
1170
1171 if (TREE_CODE (init) == CONSTRUCTOR)
1172 return get_svalue_for_constructor (init, mgr);
1173
1174 /* Reuse the get_rvalue logic from region_model. */
1175 region_model m (mgr);
1176 return m.get_rvalue (path_var (init, 0), NULL);
1177 }
1178
1179 /* Subroutine of symnode_requires_tracking_p; return true if REF
1180 might imply that we should be tracking the value of its decl. */
1181
1182 static bool
1183 ipa_ref_requires_tracking (ipa_ref *ref)
1184 {
1185 /* If we have a load/store/alias of the symbol, then we'll track
1186 the decl's value. */
1187 if (ref->use != IPA_REF_ADDR)
1188 return true;
1189
1190 if (ref->stmt == NULL)
1191 return true;
1192
1193 switch (ref->stmt->code)
1194 {
1195 default:
1196 return true;
1197 case GIMPLE_CALL:
1198 {
1199 cgraph_node *caller_cnode = dyn_cast <cgraph_node *> (ref->referring);
1200 if (caller_cnode == NULL)
1201 return true;
1202 cgraph_edge *edge = caller_cnode->get_edge (ref->stmt);
1203 if (!edge)
1204 return true;
1205 if (edge->callee == NULL)
1206 return true; /* e.g. call through function ptr. */
1207 if (edge->callee->definition)
1208 return true;
1209 /* If we get here, then this ref is a pointer passed to
1210 a function we don't have the definition for. */
1211 return false;
1212 }
1213 break;
1214 case GIMPLE_ASM:
1215 {
1216 const gasm *asm_stmt = as_a <const gasm *> (ref->stmt);
1217 if (gimple_asm_noutputs (asm_stmt) > 0)
1218 return true;
1219 if (gimple_asm_nclobbers (asm_stmt) > 0)
1220 return true;
1221 /* If we get here, then this ref is the decl being passed
1222 by pointer to asm with no outputs. */
1223 return false;
1224 }
1225 break;
1226 }
1227 }
1228
1229 /* Determine if the decl for SYMNODE should have binding_clusters
1230 in our state objects; return false to optimize away tracking
1231 certain decls in our state objects, as an optimization. */
1232
1233 static bool
1234 symnode_requires_tracking_p (symtab_node *symnode)
1235 {
1236 gcc_assert (symnode);
1237 if (symnode->externally_visible)
1238 return true;
1239 tree context_fndecl = DECL_CONTEXT (symnode->decl);
1240 if (context_fndecl == NULL)
1241 return true;
1242 if (TREE_CODE (context_fndecl) != FUNCTION_DECL)
1243 return true;
1244 for (auto ref : symnode->ref_list.referring)
1245 if (ipa_ref_requires_tracking (ref))
1246 return true;
1247
1248 /* If we get here, then we don't have uses of this decl that require
1249 tracking; we never read from it or write to it explicitly. */
1250 return false;
1251 }
1252
1253 /* Subroutine of decl_region ctor: determine whether this decl_region
1254 can have binding_clusters; return false to optimize away tracking
1255 of certain decls in our state objects, as an optimization. */
1256
1257 bool
1258 decl_region::calc_tracked_p (tree decl)
1259 {
1260 /* Precondition of symtab_node::get. */
1261 if (TREE_CODE (decl) == VAR_DECL
1262 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl) || in_lto_p))
1263 if (symtab_node *symnode = symtab_node::get (decl))
1264 return symnode_requires_tracking_p (symnode);
1265 return true;
1266 }
1267
1268 /* class field_region : public region. */
1269
1270 /* Implementation of region::dump_to_pp vfunc for field_region. */
1271
1272 void
1273 field_region::dump_to_pp (pretty_printer *pp, bool simple) const
1274 {
1275 if (simple)
1276 {
1277 get_parent_region ()->dump_to_pp (pp, simple);
1278 pp_string (pp, ".");
1279 pp_printf (pp, "%E", m_field);
1280 }
1281 else
1282 {
1283 pp_string (pp, "field_region(");
1284 get_parent_region ()->dump_to_pp (pp, simple);
1285 pp_string (pp, ", ");
1286 print_quoted_type (pp, get_type ());
1287 pp_printf (pp, ", %qE)", m_field);
1288 }
1289 }
1290
1291 /* Implementation of region::get_relative_concrete_offset vfunc
1292 for field_region. */
1293
1294 bool
1295 field_region::get_relative_concrete_offset (bit_offset_t *out) const
1296 {
1297 /* Compare with e.g. gimple-fold.cc's
1298 fold_nonarray_ctor_reference. */
1299 tree byte_offset = DECL_FIELD_OFFSET (m_field);
1300 if (TREE_CODE (byte_offset) != INTEGER_CST)
1301 return false;
1302 tree field_offset = DECL_FIELD_BIT_OFFSET (m_field);
1303 /* Compute bit offset of the field. */
1304 offset_int bitoffset
1305 = (wi::to_offset (field_offset)
1306 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
1307 *out = bitoffset;
1308 return true;
1309 }
1310
1311 /* class element_region : public region. */
1312
1313 /* Implementation of region::accept vfunc for element_region. */
1314
1315 void
1316 element_region::accept (visitor *v) const
1317 {
1318 region::accept (v);
1319 m_index->accept (v);
1320 }
1321
1322 /* Implementation of region::dump_to_pp vfunc for element_region. */
1323
1324 void
1325 element_region::dump_to_pp (pretty_printer *pp, bool simple) const
1326 {
1327 if (simple)
1328 {
1329 //pp_string (pp, "(");
1330 get_parent_region ()->dump_to_pp (pp, simple);
1331 pp_string (pp, "[");
1332 m_index->dump_to_pp (pp, simple);
1333 pp_string (pp, "]");
1334 //pp_string (pp, ")");
1335 }
1336 else
1337 {
1338 pp_string (pp, "element_region(");
1339 get_parent_region ()->dump_to_pp (pp, simple);
1340 pp_string (pp, ", ");
1341 print_quoted_type (pp, get_type ());
1342 pp_string (pp, ", ");
1343 m_index->dump_to_pp (pp, simple);
1344 pp_printf (pp, ")");
1345 }
1346 }
1347
1348 /* Implementation of region::get_relative_concrete_offset vfunc
1349 for element_region. */
1350
1351 bool
1352 element_region::get_relative_concrete_offset (bit_offset_t *out) const
1353 {
1354 if (tree idx_cst = m_index->maybe_get_constant ())
1355 {
1356 gcc_assert (TREE_CODE (idx_cst) == INTEGER_CST);
1357
1358 tree elem_type = get_type ();
1359 offset_int element_idx = wi::to_offset (idx_cst);
1360
1361 /* First, use int_size_in_bytes, to reject the case where we
1362 have an incomplete type, or a non-constant value. */
1363 HOST_WIDE_INT hwi_byte_size = int_size_in_bytes (elem_type);
1364 if (hwi_byte_size > 0)
1365 {
1366 offset_int element_bit_size
1367 = hwi_byte_size << LOG2_BITS_PER_UNIT;
1368 offset_int element_bit_offset
1369 = element_idx * element_bit_size;
1370 *out = element_bit_offset;
1371 return true;
1372 }
1373 }
1374 return false;
1375 }
1376
1377 /* class offset_region : public region. */
1378
1379 /* Implementation of region::accept vfunc for offset_region. */
1380
1381 void
1382 offset_region::accept (visitor *v) const
1383 {
1384 region::accept (v);
1385 m_byte_offset->accept (v);
1386 }
1387
1388 /* Implementation of region::dump_to_pp vfunc for offset_region. */
1389
1390 void
1391 offset_region::dump_to_pp (pretty_printer *pp, bool simple) const
1392 {
1393 if (simple)
1394 {
1395 //pp_string (pp, "(");
1396 get_parent_region ()->dump_to_pp (pp, simple);
1397 pp_string (pp, "+");
1398 m_byte_offset->dump_to_pp (pp, simple);
1399 //pp_string (pp, ")");
1400 }
1401 else
1402 {
1403 pp_string (pp, "offset_region(");
1404 get_parent_region ()->dump_to_pp (pp, simple);
1405 pp_string (pp, ", ");
1406 print_quoted_type (pp, get_type ());
1407 pp_string (pp, ", ");
1408 m_byte_offset->dump_to_pp (pp, simple);
1409 pp_printf (pp, ")");
1410 }
1411 }
1412
1413 /* Implementation of region::get_relative_concrete_offset vfunc
1414 for offset_region. */
1415
1416 bool
1417 offset_region::get_relative_concrete_offset (bit_offset_t *out) const
1418 {
1419 if (tree byte_offset_cst = m_byte_offset->maybe_get_constant ())
1420 {
1421 gcc_assert (TREE_CODE (byte_offset_cst) == INTEGER_CST);
1422 /* Use a signed value for the byte offset, to handle
1423 negative offsets. */
1424 HOST_WIDE_INT byte_offset
1425 = wi::to_offset (byte_offset_cst).to_shwi ();
1426 HOST_WIDE_INT bit_offset = byte_offset * BITS_PER_UNIT;
1427 *out = bit_offset;
1428 return true;
1429 }
1430 return false;
1431 }
1432
1433 /* class sized_region : public region. */
1434
1435 /* Implementation of region::accept vfunc for sized_region. */
1436
1437 void
1438 sized_region::accept (visitor *v) const
1439 {
1440 region::accept (v);
1441 m_byte_size_sval->accept (v);
1442 }
1443
1444 /* Implementation of region::dump_to_pp vfunc for sized_region. */
1445
1446 void
1447 sized_region::dump_to_pp (pretty_printer *pp, bool simple) const
1448 {
1449 if (simple)
1450 {
1451 pp_string (pp, "SIZED_REG(");
1452 get_parent_region ()->dump_to_pp (pp, simple);
1453 pp_string (pp, ", ");
1454 m_byte_size_sval->dump_to_pp (pp, simple);
1455 pp_string (pp, ")");
1456 }
1457 else
1458 {
1459 pp_string (pp, "sized_region(");
1460 get_parent_region ()->dump_to_pp (pp, simple);
1461 pp_string (pp, ", ");
1462 m_byte_size_sval->dump_to_pp (pp, simple);
1463 pp_printf (pp, ")");
1464 }
1465 }
1466
1467 /* Implementation of region::get_byte_size vfunc for sized_region. */
1468
1469 bool
1470 sized_region::get_byte_size (byte_size_t *out) const
1471 {
1472 if (tree cst = m_byte_size_sval->maybe_get_constant ())
1473 {
1474 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1475 *out = tree_to_uhwi (cst);
1476 return true;
1477 }
1478 return false;
1479 }
1480
1481 /* Implementation of region::get_bit_size vfunc for sized_region. */
1482
1483 bool
1484 sized_region::get_bit_size (bit_size_t *out) const
1485 {
1486 byte_size_t byte_size;
1487 if (!get_byte_size (&byte_size))
1488 return false;
1489 *out = byte_size * BITS_PER_UNIT;
1490 return true;
1491 }
1492
1493 /* class cast_region : public region. */
1494
1495 /* Implementation of region::accept vfunc for cast_region. */
1496
1497 void
1498 cast_region::accept (visitor *v) const
1499 {
1500 region::accept (v);
1501 m_original_region->accept (v);
1502 }
1503
1504 /* Implementation of region::dump_to_pp vfunc for cast_region. */
1505
1506 void
1507 cast_region::dump_to_pp (pretty_printer *pp, bool simple) const
1508 {
1509 if (simple)
1510 {
1511 pp_string (pp, "CAST_REG(");
1512 print_quoted_type (pp, get_type ());
1513 pp_string (pp, ", ");
1514 m_original_region->dump_to_pp (pp, simple);
1515 pp_string (pp, ")");
1516 }
1517 else
1518 {
1519 pp_string (pp, "cast_region(");
1520 m_original_region->dump_to_pp (pp, simple);
1521 pp_string (pp, ", ");
1522 print_quoted_type (pp, get_type ());
1523 pp_printf (pp, ")");
1524 }
1525 }
1526
1527 /* class heap_allocated_region : public region. */
1528
1529 /* Implementation of region::dump_to_pp vfunc for heap_allocated_region. */
1530
1531 void
1532 heap_allocated_region::dump_to_pp (pretty_printer *pp, bool simple) const
1533 {
1534 if (simple)
1535 pp_printf (pp, "HEAP_ALLOCATED_REGION(%i)", get_id ());
1536 else
1537 pp_printf (pp, "heap_allocated_region(%i)", get_id ());
1538 }
1539
1540 /* class alloca_region : public region. */
1541
1542 /* Implementation of region::dump_to_pp vfunc for alloca_region. */
1543
1544 void
1545 alloca_region::dump_to_pp (pretty_printer *pp, bool simple) const
1546 {
1547 if (simple)
1548 pp_printf (pp, "ALLOCA_REGION(%i)", get_id ());
1549 else
1550 pp_printf (pp, "alloca_region(%i)", get_id ());
1551 }
1552
1553 /* class string_region : public region. */
1554
1555 /* Implementation of region::dump_to_pp vfunc for string_region. */
1556
1557 void
1558 string_region::dump_to_pp (pretty_printer *pp, bool simple) const
1559 {
1560 if (simple)
1561 dump_tree (pp, m_string_cst);
1562 else
1563 {
1564 pp_string (pp, "string_region(");
1565 dump_tree (pp, m_string_cst);
1566 if (!flag_dump_noaddr)
1567 {
1568 pp_string (pp, " (");
1569 pp_pointer (pp, m_string_cst);
1570 pp_string (pp, "))");
1571 }
1572 }
1573 }
1574
1575 /* class bit_range_region : public region. */
1576
1577 /* Implementation of region::dump_to_pp vfunc for bit_range_region. */
1578
1579 void
1580 bit_range_region::dump_to_pp (pretty_printer *pp, bool simple) const
1581 {
1582 if (simple)
1583 {
1584 pp_string (pp, "BIT_RANGE_REG(");
1585 get_parent_region ()->dump_to_pp (pp, simple);
1586 pp_string (pp, ", ");
1587 m_bits.dump_to_pp (pp);
1588 pp_string (pp, ")");
1589 }
1590 else
1591 {
1592 pp_string (pp, "bit_range_region(");
1593 get_parent_region ()->dump_to_pp (pp, simple);
1594 pp_string (pp, ", ");
1595 m_bits.dump_to_pp (pp);
1596 pp_printf (pp, ")");
1597 }
1598 }
1599
1600 /* Implementation of region::get_byte_size vfunc for bit_range_region. */
1601
1602 bool
1603 bit_range_region::get_byte_size (byte_size_t *out) const
1604 {
1605 if (m_bits.m_size_in_bits % BITS_PER_UNIT == 0)
1606 {
1607 *out = m_bits.m_size_in_bits / BITS_PER_UNIT;
1608 return true;
1609 }
1610 return false;
1611 }
1612
1613 /* Implementation of region::get_bit_size vfunc for bit_range_region. */
1614
1615 bool
1616 bit_range_region::get_bit_size (bit_size_t *out) const
1617 {
1618 *out = m_bits.m_size_in_bits;
1619 return true;
1620 }
1621
1622 /* Implementation of region::get_byte_size_sval vfunc for bit_range_region. */
1623
1624 const svalue *
1625 bit_range_region::get_byte_size_sval (region_model_manager *mgr) const
1626 {
1627 if (m_bits.m_size_in_bits % BITS_PER_UNIT != 0)
1628 return mgr->get_or_create_unknown_svalue (size_type_node);
1629
1630 HOST_WIDE_INT num_bytes = m_bits.m_size_in_bits.to_shwi () / BITS_PER_UNIT;
1631 return mgr->get_or_create_int_cst (size_type_node, num_bytes);
1632 }
1633
1634 /* Implementation of region::get_relative_concrete_offset vfunc for
1635 bit_range_region. */
1636
1637 bool
1638 bit_range_region::get_relative_concrete_offset (bit_offset_t *out) const
1639 {
1640 *out = m_bits.get_start_bit_offset ();
1641 return true;
1642 }
1643
1644 /* class var_arg_region : public region. */
1645
1646 void
1647 var_arg_region::dump_to_pp (pretty_printer *pp, bool simple) const
1648 {
1649 if (simple)
1650 {
1651 pp_string (pp, "VAR_ARG_REG(");
1652 get_parent_region ()->dump_to_pp (pp, simple);
1653 pp_printf (pp, ", arg_idx: %d)", m_idx);
1654 }
1655 else
1656 {
1657 pp_string (pp, "var_arg_region(");
1658 get_parent_region ()->dump_to_pp (pp, simple);
1659 pp_printf (pp, ", arg_idx: %d)", m_idx);
1660 }
1661 }
1662
1663 /* Get the frame_region for this var_arg_region. */
1664
1665 const frame_region *
1666 var_arg_region::get_frame_region () const
1667 {
1668 gcc_assert (get_parent_region ());
1669 return as_a <const frame_region *> (get_parent_region ());
1670 }
1671
1672 /* class unknown_region : public region. */
1673
1674 /* Implementation of region::dump_to_pp vfunc for unknown_region. */
1675
1676 void
1677 unknown_region::dump_to_pp (pretty_printer *pp, bool /*simple*/) const
1678 {
1679 pp_string (pp, "UNKNOWN_REGION");
1680 }
1681
1682 } // namespace ana
1683
1684 #endif /* #if ENABLE_ANALYZER */