]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/analyzer/region.cc
analyzer: implement four new warnings for <stdarg.h> misuses [PR105103]
[thirdparty/gcc.git] / gcc / analyzer / region.cc
1 /* Regions of memory.
2 Copyright (C) 2019-2022 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "diagnostic-core.h"
26 #include "gimple-pretty-print.h"
27 #include "function.h"
28 #include "basic-block.h"
29 #include "gimple.h"
30 #include "gimple-iterator.h"
31 #include "diagnostic-core.h"
32 #include "graphviz.h"
33 #include "options.h"
34 #include "cgraph.h"
35 #include "tree-dfa.h"
36 #include "stringpool.h"
37 #include "convert.h"
38 #include "target.h"
39 #include "fold-const.h"
40 #include "tree-pretty-print.h"
41 #include "diagnostic-color.h"
42 #include "diagnostic-metadata.h"
43 #include "tristate.h"
44 #include "bitmap.h"
45 #include "selftest.h"
46 #include "function.h"
47 #include "json.h"
48 #include "analyzer/analyzer.h"
49 #include "analyzer/analyzer-logging.h"
50 #include "ordered-hash-map.h"
51 #include "options.h"
52 #include "cgraph.h"
53 #include "cfg.h"
54 #include "digraph.h"
55 #include "analyzer/supergraph.h"
56 #include "sbitmap.h"
57 #include "analyzer/call-string.h"
58 #include "analyzer/program-point.h"
59 #include "analyzer/store.h"
60 #include "analyzer/region.h"
61 #include "analyzer/region-model.h"
62 #include "analyzer/sm.h"
63 #include "analyzer/program-state.h"
64
65 #if ENABLE_ANALYZER
66
67 namespace ana {
68
69 /* class region and its various subclasses. */
70
71 /* class region. */
72
73 region::~region ()
74 {
75 delete m_cached_offset;
76 }
77
78 /* Compare REG1 and REG2 by id. */
79
80 int
81 region::cmp_ids (const region *reg1, const region *reg2)
82 {
83 return (long)reg1->get_id () - (long)reg2->get_id ();
84 }
85
86 /* Determine the base region for this region: when considering bindings
87 for this region, the base region is the ancestor which identifies
88 which cluster they should be partitioned into.
89 Regions within the same struct/union/array are in the same cluster.
90 Different decls are in different clusters. */
91
92 const region *
93 region::get_base_region () const
94 {
95 const region *iter = this;
96 while (iter)
97 {
98 switch (iter->get_kind ())
99 {
100 case RK_FIELD:
101 case RK_ELEMENT:
102 case RK_OFFSET:
103 case RK_SIZED:
104 case RK_BIT_RANGE:
105 iter = iter->get_parent_region ();
106 continue;
107 case RK_CAST:
108 iter = iter->dyn_cast_cast_region ()->get_original_region ();
109 continue;
110 default:
111 return iter;
112 }
113 }
114 return iter;
115 }
116
117 /* Return true if get_base_region() == this for this region. */
118
119 bool
120 region::base_region_p () const
121 {
122 switch (get_kind ())
123 {
124 /* Region kinds representing a descendent of a base region. */
125 case RK_FIELD:
126 case RK_ELEMENT:
127 case RK_OFFSET:
128 case RK_SIZED:
129 case RK_CAST:
130 case RK_BIT_RANGE:
131 return false;
132
133 default:
134 return true;
135 }
136 }
137
138 /* Return true if this region is ELDER or one of its descendents. */
139
140 bool
141 region::descendent_of_p (const region *elder) const
142 {
143 const region *iter = this;
144 while (iter)
145 {
146 if (iter == elder)
147 return true;
148 if (iter->get_kind () == RK_CAST)
149 iter = iter->dyn_cast_cast_region ()->get_original_region ();
150 else
151 iter = iter->get_parent_region ();
152 }
153 return false;
154 }
155
156 /* If this region is a frame_region, or a descendent of one, return it.
157 Otherwise return NULL. */
158
159 const frame_region *
160 region::maybe_get_frame_region () const
161 {
162 const region *iter = this;
163 while (iter)
164 {
165 if (const frame_region *frame_reg = iter->dyn_cast_frame_region ())
166 return frame_reg;
167 if (iter->get_kind () == RK_CAST)
168 iter = iter->dyn_cast_cast_region ()->get_original_region ();
169 else
170 iter = iter->get_parent_region ();
171 }
172 return NULL;
173 }
174
175 /* Get the memory space of this region. */
176
177 enum memory_space
178 region::get_memory_space () const
179 {
180 const region *iter = this;
181 while (iter)
182 {
183 switch (iter->get_kind ())
184 {
185 default:
186 break;
187 case RK_GLOBALS:
188 return MEMSPACE_GLOBALS;
189 case RK_CODE:
190 case RK_FUNCTION:
191 case RK_LABEL:
192 return MEMSPACE_CODE;
193 case RK_FRAME:
194 case RK_STACK:
195 case RK_ALLOCA:
196 return MEMSPACE_STACK;
197 case RK_HEAP:
198 case RK_HEAP_ALLOCATED:
199 return MEMSPACE_HEAP;
200 case RK_STRING:
201 return MEMSPACE_READONLY_DATA;
202 }
203 if (iter->get_kind () == RK_CAST)
204 iter = iter->dyn_cast_cast_region ()->get_original_region ();
205 else
206 iter = iter->get_parent_region ();
207 }
208 return MEMSPACE_UNKNOWN;
209 }
210
211 /* Subroutine for use by region_model_manager::get_or_create_initial_value.
212 Return true if this region has an initial_svalue.
213 Return false if attempting to use INIT_VAL(this_region) should give
214 the "UNINITIALIZED" poison value. */
215
216 bool
217 region::can_have_initial_svalue_p () const
218 {
219 const region *base_reg = get_base_region ();
220
221 /* Check for memory spaces that are uninitialized by default. */
222 enum memory_space mem_space = base_reg->get_memory_space ();
223 switch (mem_space)
224 {
225 default:
226 gcc_unreachable ();
227 case MEMSPACE_UNKNOWN:
228 case MEMSPACE_CODE:
229 case MEMSPACE_GLOBALS:
230 case MEMSPACE_READONLY_DATA:
231 /* Such regions have initial_svalues. */
232 return true;
233
234 case MEMSPACE_HEAP:
235 /* Heap allocations are uninitialized by default. */
236 return false;
237
238 case MEMSPACE_STACK:
239 if (tree decl = base_reg->maybe_get_decl ())
240 {
241 /* See the assertion in frame_region::get_region_for_local for the
242 tree codes we need to handle here. */
243 switch (TREE_CODE (decl))
244 {
245 default:
246 gcc_unreachable ();
247
248 case PARM_DECL:
249 /* Parameters have initial values. */
250 return true;
251
252 case VAR_DECL:
253 case RESULT_DECL:
254 /* Function locals don't have initial values. */
255 return false;
256
257 case SSA_NAME:
258 {
259 tree ssa_name = decl;
260 /* SSA names that are the default defn of a PARM_DECL
261 have initial_svalues; other SSA names don't. */
262 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
263 && SSA_NAME_VAR (ssa_name)
264 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == PARM_DECL)
265 return true;
266 else
267 return false;
268 }
269 }
270 }
271
272 /* If we have an on-stack region that isn't associated with a decl
273 or SSA name, then we have VLA/alloca, which is uninitialized. */
274 return false;
275 }
276 }
277
278 /* If this region is a decl_region, return the decl.
279 Otherwise return NULL. */
280
281 tree
282 region::maybe_get_decl () const
283 {
284 if (const decl_region *decl_reg = dyn_cast_decl_region ())
285 return decl_reg->get_decl ();
286 return NULL_TREE;
287 }
288
289 /* Get the region_offset for this region (calculating it on the
290 first call and caching it internally). */
291
292 region_offset
293 region::get_offset () const
294 {
295 if(!m_cached_offset)
296 m_cached_offset = new region_offset (calc_offset ());
297 return *m_cached_offset;
298 }
299
300 /* Base class implementation of region::get_byte_size vfunc.
301 If the size of this region (in bytes) is known statically, write it to *OUT
302 and return true.
303 Otherwise return false. */
304
305 bool
306 region::get_byte_size (byte_size_t *out) const
307 {
308 tree type = get_type ();
309
310 /* Bail out e.g. for heap-allocated regions. */
311 if (!type)
312 return false;
313
314 HOST_WIDE_INT bytes = int_size_in_bytes (type);
315 if (bytes == -1)
316 return false;
317 *out = bytes;
318 return true;
319 }
320
321 /* Base implementation of region::get_byte_size_sval vfunc. */
322
323 const svalue *
324 region::get_byte_size_sval (region_model_manager *mgr) const
325 {
326 tree type = get_type ();
327
328 /* Bail out e.g. for heap-allocated regions. */
329 if (!type)
330 return mgr->get_or_create_unknown_svalue (size_type_node);
331
332 HOST_WIDE_INT bytes = int_size_in_bytes (type);
333 if (bytes == -1)
334 return mgr->get_or_create_unknown_svalue (size_type_node);
335
336 tree byte_size = size_in_bytes (type);
337 if (TREE_TYPE (byte_size) != size_type_node)
338 byte_size = fold_build1 (NOP_EXPR, size_type_node, byte_size);
339 return mgr->get_or_create_constant_svalue (byte_size);
340 }
341
342 /* Attempt to get the size of TYPE in bits.
343 If successful, return true and write the size to *OUT.
344 Otherwise return false. */
345
346 bool
347 int_size_in_bits (const_tree type, bit_size_t *out)
348 {
349 if (INTEGRAL_TYPE_P (type))
350 {
351 *out = TYPE_PRECISION (type);
352 return true;
353 }
354
355 tree sz = TYPE_SIZE (type);
356 if (sz && tree_fits_uhwi_p (sz))
357 {
358 *out = TREE_INT_CST_LOW (sz);
359 return true;
360 }
361 else
362 return false;
363 }
364
365 /* If the size of this region (in bits) is known statically, write it to *OUT
366 and return true.
367 Otherwise return false. */
368
369 bool
370 region::get_bit_size (bit_size_t *out) const
371 {
372 tree type = get_type ();
373
374 /* Bail out e.g. for heap-allocated regions. */
375 if (!type)
376 return false;
377
378 return int_size_in_bits (type, out);
379 }
380
381 /* Get the field within RECORD_TYPE at BIT_OFFSET. */
382
383 tree
384 get_field_at_bit_offset (tree record_type, bit_offset_t bit_offset)
385 {
386 gcc_assert (TREE_CODE (record_type) == RECORD_TYPE);
387 if (bit_offset < 0)
388 return NULL;
389
390 /* Find the first field that has an offset > BIT_OFFSET,
391 then return the one preceding it.
392 Skip other trees within the chain, such as FUNCTION_DECLs. */
393 tree last_field = NULL_TREE;
394 for (tree iter = TYPE_FIELDS (record_type); iter != NULL_TREE;
395 iter = DECL_CHAIN (iter))
396 {
397 if (TREE_CODE (iter) == FIELD_DECL)
398 {
399 int iter_field_offset = int_bit_position (iter);
400 if (bit_offset < iter_field_offset)
401 return last_field;
402 last_field = iter;
403 }
404 }
405 return last_field;
406 }
407
408 /* Populate *OUT with descendent regions of type TYPE that match
409 RELATIVE_BIT_OFFSET and SIZE_IN_BITS within this region. */
410
411 void
412 region::get_subregions_for_binding (region_model_manager *mgr,
413 bit_offset_t relative_bit_offset,
414 bit_size_t size_in_bits,
415 tree type,
416 auto_vec <const region *> *out) const
417 {
418 if (get_type () == NULL_TREE || type == NULL_TREE)
419 return;
420 if (relative_bit_offset == 0
421 && types_compatible_p (get_type (), type))
422 {
423 out->safe_push (this);
424 return;
425 }
426 switch (TREE_CODE (get_type ()))
427 {
428 case ARRAY_TYPE:
429 {
430 tree element_type = TREE_TYPE (get_type ());
431 HOST_WIDE_INT hwi_byte_size = int_size_in_bytes (element_type);
432 if (hwi_byte_size > 0)
433 {
434 HOST_WIDE_INT bits_per_element
435 = hwi_byte_size << LOG2_BITS_PER_UNIT;
436 HOST_WIDE_INT element_index
437 = (relative_bit_offset.to_shwi () / bits_per_element);
438 tree element_index_cst
439 = build_int_cst (integer_type_node, element_index);
440 HOST_WIDE_INT inner_bit_offset
441 = relative_bit_offset.to_shwi () % bits_per_element;
442 const region *subregion = mgr->get_element_region
443 (this, element_type,
444 mgr->get_or_create_constant_svalue (element_index_cst));
445 subregion->get_subregions_for_binding (mgr, inner_bit_offset,
446 size_in_bits, type, out);
447 }
448 }
449 break;
450 case RECORD_TYPE:
451 {
452 /* The bit offset might be *within* one of the fields (such as
453 with nested structs).
454 So we want to find the enclosing field, adjust the offset,
455 and repeat. */
456 if (tree field = get_field_at_bit_offset (get_type (),
457 relative_bit_offset))
458 {
459 int field_bit_offset = int_bit_position (field);
460 const region *subregion = mgr->get_field_region (this, field);
461 subregion->get_subregions_for_binding
462 (mgr, relative_bit_offset - field_bit_offset,
463 size_in_bits, type, out);
464 }
465 }
466 break;
467 case UNION_TYPE:
468 {
469 for (tree field = TYPE_FIELDS (get_type ()); field != NULL_TREE;
470 field = DECL_CHAIN (field))
471 {
472 if (TREE_CODE (field) != FIELD_DECL)
473 continue;
474 const region *subregion = mgr->get_field_region (this, field);
475 subregion->get_subregions_for_binding (mgr,
476 relative_bit_offset,
477 size_in_bits,
478 type,
479 out);
480 }
481 }
482 break;
483 default:
484 /* Do nothing. */
485 break;
486 }
487 }
488
489 /* Walk from this region up to the base region within its cluster, calculating
490 the offset relative to the base region, either as an offset in bits,
491 or a symbolic offset. */
492
493 region_offset
494 region::calc_offset () const
495 {
496 const region *iter_region = this;
497 bit_offset_t accum_bit_offset = 0;
498
499 while (iter_region)
500 {
501 switch (iter_region->get_kind ())
502 {
503 case RK_FIELD:
504 case RK_ELEMENT:
505 case RK_OFFSET:
506 case RK_BIT_RANGE:
507 {
508 bit_offset_t rel_bit_offset;
509 if (!iter_region->get_relative_concrete_offset (&rel_bit_offset))
510 return region_offset::make_symbolic
511 (iter_region->get_parent_region ());
512 accum_bit_offset += rel_bit_offset;
513 iter_region = iter_region->get_parent_region ();
514 }
515 continue;
516
517 case RK_SIZED:
518 iter_region = iter_region->get_parent_region ();
519 continue;
520
521 case RK_CAST:
522 {
523 const cast_region *cast_reg
524 = as_a <const cast_region *> (iter_region);
525 iter_region = cast_reg->get_original_region ();
526 }
527 continue;
528
529 default:
530 return region_offset::make_concrete (iter_region, accum_bit_offset);
531 }
532 }
533 return region_offset::make_concrete (iter_region, accum_bit_offset);
534 }
535
536 /* Base implementation of region::get_relative_concrete_offset vfunc. */
537
538 bool
539 region::get_relative_concrete_offset (bit_offset_t *) const
540 {
541 return false;
542 }
543
544 /* Attempt to get the position and size of this region expressed as a
545 concrete range of bytes relative to its parent.
546 If successful, return true and write to *OUT.
547 Otherwise return false. */
548
549 bool
550 region::get_relative_concrete_byte_range (byte_range *out) const
551 {
552 /* We must have a concrete offset relative to the parent. */
553 bit_offset_t rel_bit_offset;
554 if (!get_relative_concrete_offset (&rel_bit_offset))
555 return false;
556 /* ...which must be a whole number of bytes. */
557 if (rel_bit_offset % BITS_PER_UNIT != 0)
558 return false;
559 byte_offset_t start_byte_offset = rel_bit_offset / BITS_PER_UNIT;
560
561 /* We must have a concrete size, which must be a whole number
562 of bytes. */
563 byte_size_t num_bytes;
564 if (!get_byte_size (&num_bytes))
565 return false;
566
567 /* Success. */
568 *out = byte_range (start_byte_offset, num_bytes);
569 return true;
570 }
571
572 /* Dump a description of this region to stderr. */
573
574 DEBUG_FUNCTION void
575 region::dump (bool simple) const
576 {
577 pretty_printer pp;
578 pp_format_decoder (&pp) = default_tree_printer;
579 pp_show_color (&pp) = pp_show_color (global_dc->printer);
580 pp.buffer->stream = stderr;
581 dump_to_pp (&pp, simple);
582 pp_newline (&pp);
583 pp_flush (&pp);
584 }
585
586 /* Return a new json::string describing the region. */
587
588 json::value *
589 region::to_json () const
590 {
591 label_text desc = get_desc (true);
592 json::value *reg_js = new json::string (desc.m_buffer);
593 desc.maybe_free ();
594 return reg_js;
595 }
596
597 /* Generate a description of this region. */
598
599 DEBUG_FUNCTION label_text
600 region::get_desc (bool simple) const
601 {
602 pretty_printer pp;
603 pp_format_decoder (&pp) = default_tree_printer;
604 dump_to_pp (&pp, simple);
605 return label_text::take (xstrdup (pp_formatted_text (&pp)));
606 }
607
608 /* Base implementation of region::accept vfunc.
609 Subclass implementations should chain up to this. */
610
611 void
612 region::accept (visitor *v) const
613 {
614 v->visit_region (this);
615 if (m_parent)
616 m_parent->accept (v);
617 }
618
619 /* Return true if this is a symbolic region for deferencing an
620 unknown ptr.
621 We shouldn't attempt to bind values for this region (but
622 can unbind values for other regions). */
623
624 bool
625 region::symbolic_for_unknown_ptr_p () const
626 {
627 if (const symbolic_region *sym_reg = dyn_cast_symbolic_region ())
628 if (sym_reg->get_pointer ()->get_kind () == SK_UNKNOWN)
629 return true;
630 return false;
631 }
632
633 /* Return true if this is a region for a decl with name DECL_NAME.
634 Intended for use when debugging (for assertions and conditional
635 breakpoints). */
636
637 DEBUG_FUNCTION bool
638 region::is_named_decl_p (const char *decl_name) const
639 {
640 if (tree decl = maybe_get_decl ())
641 if (DECL_NAME (decl)
642 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (decl)), decl_name))
643 return true;
644 return false;
645 }
646
647 /* region's ctor. */
648
649 region::region (complexity c, unsigned id, const region *parent, tree type)
650 : m_complexity (c), m_id (id), m_parent (parent), m_type (type),
651 m_cached_offset (NULL)
652 {
653 gcc_assert (type == NULL_TREE || TYPE_P (type));
654 }
655
656 /* Comparator for use by vec<const region *>::qsort,
657 using their IDs to order them. */
658
659 int
660 region::cmp_ptr_ptr (const void *p1, const void *p2)
661 {
662 const region * const *reg1 = (const region * const *)p1;
663 const region * const *reg2 = (const region * const *)p2;
664
665 return cmp_ids (*reg1, *reg2);
666 }
667
668 /* Determine if a pointer to this region must be non-NULL.
669
670 Generally, pointers to regions must be non-NULL, but pointers
671 to symbolic_regions might, in fact, be NULL.
672
673 This allows us to simulate functions like malloc and calloc with:
674 - only one "outcome" from each statement,
675 - the idea that the pointer is on the heap if non-NULL
676 - the possibility that the pointer could be NULL
677 - the idea that successive values returned from malloc are non-equal
678 - to be able to zero-fill for calloc. */
679
680 bool
681 region::non_null_p () const
682 {
683 switch (get_kind ())
684 {
685 default:
686 return true;
687 case RK_SYMBOLIC:
688 /* Are we within a symbolic_region? If so, it could be NULL, and we
689 have to fall back on the constraints. */
690 return false;
691 case RK_HEAP_ALLOCATED:
692 return false;
693 }
694 }
695
696 /* Return true iff this region is defined in terms of SVAL. */
697
698 bool
699 region::involves_p (const svalue *sval) const
700 {
701 if (const symbolic_region *symbolic_reg = dyn_cast_symbolic_region ())
702 {
703 if (symbolic_reg->get_pointer ()->involves_p (sval))
704 return true;
705 }
706
707 return false;
708 }
709
710 /* Comparator for trees to impose a deterministic ordering on
711 T1 and T2. */
712
713 static int
714 tree_cmp (const_tree t1, const_tree t2)
715 {
716 gcc_assert (t1);
717 gcc_assert (t2);
718
719 /* Test tree codes first. */
720 if (TREE_CODE (t1) != TREE_CODE (t2))
721 return TREE_CODE (t1) - TREE_CODE (t2);
722
723 /* From this point on, we know T1 and T2 have the same tree code. */
724
725 if (DECL_P (t1))
726 {
727 if (DECL_NAME (t1) && DECL_NAME (t2))
728 return strcmp (IDENTIFIER_POINTER (DECL_NAME (t1)),
729 IDENTIFIER_POINTER (DECL_NAME (t2)));
730 else
731 {
732 if (DECL_NAME (t1))
733 return -1;
734 else if (DECL_NAME (t2))
735 return 1;
736 else
737 return DECL_UID (t1) - DECL_UID (t2);
738 }
739 }
740
741 switch (TREE_CODE (t1))
742 {
743 case SSA_NAME:
744 {
745 if (SSA_NAME_VAR (t1) && SSA_NAME_VAR (t2))
746 {
747 int var_cmp = tree_cmp (SSA_NAME_VAR (t1), SSA_NAME_VAR (t2));
748 if (var_cmp)
749 return var_cmp;
750 return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2);
751 }
752 else
753 {
754 if (SSA_NAME_VAR (t1))
755 return -1;
756 else if (SSA_NAME_VAR (t2))
757 return 1;
758 else
759 return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2);
760 }
761 }
762 break;
763
764 case INTEGER_CST:
765 return tree_int_cst_compare (t1, t2);
766
767 case REAL_CST:
768 {
769 const real_value *rv1 = TREE_REAL_CST_PTR (t1);
770 const real_value *rv2 = TREE_REAL_CST_PTR (t2);
771 if (real_compare (UNORDERED_EXPR, rv1, rv2))
772 {
773 /* Impose an arbitrary order on NaNs relative to other NaNs
774 and to non-NaNs. */
775 if (int cmp_isnan = real_isnan (rv1) - real_isnan (rv2))
776 return cmp_isnan;
777 if (int cmp_issignaling_nan
778 = real_issignaling_nan (rv1) - real_issignaling_nan (rv2))
779 return cmp_issignaling_nan;
780 return real_isneg (rv1) - real_isneg (rv2);
781 }
782 if (real_compare (LT_EXPR, rv1, rv2))
783 return -1;
784 if (real_compare (GT_EXPR, rv1, rv2))
785 return 1;
786 return 0;
787 }
788
789 case STRING_CST:
790 return strcmp (TREE_STRING_POINTER (t1),
791 TREE_STRING_POINTER (t2));
792
793 default:
794 gcc_unreachable ();
795 break;
796 }
797
798 gcc_unreachable ();
799
800 return 0;
801 }
802
803 /* qsort comparator for trees to impose a deterministic ordering on
804 P1 and P2. */
805
806 int
807 tree_cmp (const void *p1, const void *p2)
808 {
809 const_tree t1 = *(const_tree const *)p1;
810 const_tree t2 = *(const_tree const *)p2;
811
812 return tree_cmp (t1, t2);
813 }
814
815 /* class frame_region : public space_region. */
816
817 frame_region::~frame_region ()
818 {
819 for (map_t::iterator iter = m_locals.begin ();
820 iter != m_locals.end ();
821 ++iter)
822 delete (*iter).second;
823 }
824
825 void
826 frame_region::accept (visitor *v) const
827 {
828 region::accept (v);
829 if (m_calling_frame)
830 m_calling_frame->accept (v);
831 }
832
833 /* Implementation of region::dump_to_pp vfunc for frame_region. */
834
835 void
836 frame_region::dump_to_pp (pretty_printer *pp, bool simple) const
837 {
838 if (simple)
839 pp_printf (pp, "frame: %qs@%i", function_name (m_fun), get_stack_depth ());
840 else
841 pp_printf (pp, "frame_region(%qs, index: %i, depth: %i)",
842 function_name (m_fun), m_index, get_stack_depth ());
843 }
844
845 const decl_region *
846 frame_region::get_region_for_local (region_model_manager *mgr,
847 tree expr,
848 const region_model_context *ctxt) const
849 {
850 if (CHECKING_P)
851 {
852 /* Verify that EXPR is a local or SSA name, and that it's for the
853 correct function for this stack frame. */
854 gcc_assert (TREE_CODE (expr) == PARM_DECL
855 || TREE_CODE (expr) == VAR_DECL
856 || TREE_CODE (expr) == SSA_NAME
857 || TREE_CODE (expr) == RESULT_DECL);
858 switch (TREE_CODE (expr))
859 {
860 default:
861 gcc_unreachable ();
862 case VAR_DECL:
863 gcc_assert (!is_global_var (expr));
864 /* Fall through. */
865 case PARM_DECL:
866 case RESULT_DECL:
867 gcc_assert (DECL_CONTEXT (expr) == m_fun->decl);
868 break;
869 case SSA_NAME:
870 {
871 if (tree var = SSA_NAME_VAR (expr))
872 {
873 if (DECL_P (var))
874 gcc_assert (DECL_CONTEXT (var) == m_fun->decl);
875 }
876 else if (ctxt)
877 if (const extrinsic_state *ext_state = ctxt->get_ext_state ())
878 if (const supergraph *sg
879 = ext_state->get_engine ()->get_supergraph ())
880 {
881 const gimple *def_stmt = SSA_NAME_DEF_STMT (expr);
882 const supernode *snode
883 = sg->get_supernode_for_stmt (def_stmt);
884 gcc_assert (snode->get_function () == m_fun);
885 }
886 }
887 break;
888 }
889 }
890
891 /* Ideally we'd use mutable here. */
892 map_t &mutable_locals = const_cast <map_t &> (m_locals);
893
894 if (decl_region **slot = mutable_locals.get (expr))
895 return *slot;
896 decl_region *reg
897 = new decl_region (mgr->alloc_region_id (), this, expr);
898 mutable_locals.put (expr, reg);
899 return reg;
900 }
901
902 /* class globals_region : public space_region. */
903
904 /* Implementation of region::dump_to_pp vfunc for globals_region. */
905
906 void
907 globals_region::dump_to_pp (pretty_printer *pp, bool simple) const
908 {
909 if (simple)
910 pp_string (pp, "::");
911 else
912 pp_string (pp, "globals");
913 }
914
915 /* class code_region : public map_region. */
916
917 /* Implementation of region::dump_to_pp vfunc for code_region. */
918
919 void
920 code_region::dump_to_pp (pretty_printer *pp, bool simple) const
921 {
922 if (simple)
923 pp_string (pp, "code region");
924 else
925 pp_string (pp, "code_region()");
926 }
927
928 /* class function_region : public region. */
929
930 /* Implementation of region::dump_to_pp vfunc for function_region. */
931
932 void
933 function_region::dump_to_pp (pretty_printer *pp, bool simple) const
934 {
935 if (simple)
936 {
937 dump_quoted_tree (pp, m_fndecl);
938 }
939 else
940 {
941 pp_string (pp, "function_region(");
942 dump_quoted_tree (pp, m_fndecl);
943 pp_string (pp, ")");
944 }
945 }
946
947 /* class label_region : public region. */
948
949 /* Implementation of region::dump_to_pp vfunc for label_region. */
950
951 void
952 label_region::dump_to_pp (pretty_printer *pp, bool simple) const
953 {
954 if (simple)
955 {
956 dump_quoted_tree (pp, m_label);
957 }
958 else
959 {
960 pp_string (pp, "label_region(");
961 dump_quoted_tree (pp, m_label);
962 pp_string (pp, ")");
963 }
964 }
965
966 /* class stack_region : public region. */
967
968 /* Implementation of region::dump_to_pp vfunc for stack_region. */
969
970 void
971 stack_region::dump_to_pp (pretty_printer *pp, bool simple) const
972 {
973 if (simple)
974 pp_string (pp, "stack region");
975 else
976 pp_string (pp, "stack_region()");
977 }
978
979 /* class heap_region : public region. */
980
981 /* Implementation of region::dump_to_pp vfunc for heap_region. */
982
983 void
984 heap_region::dump_to_pp (pretty_printer *pp, bool simple) const
985 {
986 if (simple)
987 pp_string (pp, "heap region");
988 else
989 pp_string (pp, "heap_region()");
990 }
991
992 /* class root_region : public region. */
993
994 /* root_region's ctor. */
995
996 root_region::root_region (unsigned id)
997 : region (complexity (1, 1), id, NULL, NULL_TREE)
998 {
999 }
1000
1001 /* Implementation of region::dump_to_pp vfunc for root_region. */
1002
1003 void
1004 root_region::dump_to_pp (pretty_printer *pp, bool simple) const
1005 {
1006 if (simple)
1007 pp_string (pp, "root region");
1008 else
1009 pp_string (pp, "root_region()");
1010 }
1011
1012 /* class symbolic_region : public map_region. */
1013
1014 /* symbolic_region's ctor. */
1015
1016 symbolic_region::symbolic_region (unsigned id, region *parent,
1017 const svalue *sval_ptr)
1018 : region (complexity::from_pair (parent, sval_ptr), id, parent,
1019 (sval_ptr->get_type ()
1020 ? TREE_TYPE (sval_ptr->get_type ())
1021 : NULL_TREE)),
1022 m_sval_ptr (sval_ptr)
1023 {
1024 }
1025
1026 /* Implementation of region::accept vfunc for symbolic_region. */
1027
1028 void
1029 symbolic_region::accept (visitor *v) const
1030 {
1031 region::accept (v);
1032 m_sval_ptr->accept (v);
1033 }
1034
1035 /* Implementation of region::dump_to_pp vfunc for symbolic_region. */
1036
1037 void
1038 symbolic_region::dump_to_pp (pretty_printer *pp, bool simple) const
1039 {
1040 if (simple)
1041 {
1042 pp_string (pp, "(*");
1043 m_sval_ptr->dump_to_pp (pp, simple);
1044 pp_string (pp, ")");
1045 }
1046 else
1047 {
1048 pp_string (pp, "symbolic_region(");
1049 get_parent_region ()->dump_to_pp (pp, simple);
1050 if (get_type ())
1051 {
1052 pp_string (pp, ", ");
1053 print_quoted_type (pp, get_type ());
1054 }
1055 pp_string (pp, ", ");
1056 m_sval_ptr->dump_to_pp (pp, simple);
1057 pp_string (pp, ")");
1058 }
1059 }
1060
1061 /* class decl_region : public region. */
1062
1063 /* Implementation of region::dump_to_pp vfunc for decl_region. */
1064
1065 void
1066 decl_region::dump_to_pp (pretty_printer *pp, bool simple) const
1067 {
1068 if (simple)
1069 pp_printf (pp, "%E", m_decl);
1070 else
1071 {
1072 pp_string (pp, "decl_region(");
1073 get_parent_region ()->dump_to_pp (pp, simple);
1074 pp_string (pp, ", ");
1075 print_quoted_type (pp, get_type ());
1076 pp_printf (pp, ", %qE)", m_decl);
1077 }
1078 }
1079
1080 /* Get the stack depth for the frame containing this decl, or 0
1081 for a global. */
1082
1083 int
1084 decl_region::get_stack_depth () const
1085 {
1086 if (get_parent_region () == NULL)
1087 return 0;
1088 if (const frame_region *frame_reg
1089 = get_parent_region ()->dyn_cast_frame_region ())
1090 return frame_reg->get_stack_depth ();
1091 return 0;
1092 }
1093
1094 /* If the underlying decl is in the global constant pool,
1095 return an svalue representing the constant value.
1096 Otherwise return NULL. */
1097
1098 const svalue *
1099 decl_region::maybe_get_constant_value (region_model_manager *mgr) const
1100 {
1101 if (TREE_CODE (m_decl) == VAR_DECL
1102 && DECL_IN_CONSTANT_POOL (m_decl)
1103 && DECL_INITIAL (m_decl)
1104 && TREE_CODE (DECL_INITIAL (m_decl)) == CONSTRUCTOR)
1105 return get_svalue_for_constructor (DECL_INITIAL (m_decl), mgr);
1106 return NULL;
1107 }
1108
1109 /* Get an svalue for CTOR, a CONSTRUCTOR for this region's decl. */
1110
1111 const svalue *
1112 decl_region::get_svalue_for_constructor (tree ctor,
1113 region_model_manager *mgr) const
1114 {
1115 gcc_assert (!TREE_CLOBBER_P (ctor));
1116
1117 /* Create a binding map, applying ctor to it, using this
1118 decl_region as the base region when building child regions
1119 for offset calculations. */
1120 binding_map map;
1121 if (!map.apply_ctor_to_region (this, ctor, mgr))
1122 return mgr->get_or_create_unknown_svalue (get_type ());
1123
1124 /* Return a compound svalue for the map we built. */
1125 return mgr->get_or_create_compound_svalue (get_type (), map);
1126 }
1127
1128 /* For use on decl_regions for global variables.
1129
1130 Get an svalue for the initial value of this region at entry to
1131 "main" (either based on DECL_INITIAL, or implicit initialization to
1132 zero.
1133
1134 Return NULL if there is a problem. */
1135
1136 const svalue *
1137 decl_region::get_svalue_for_initializer (region_model_manager *mgr) const
1138 {
1139 tree init = DECL_INITIAL (m_decl);
1140 if (!init)
1141 {
1142 /* If we have an "extern" decl then there may be an initializer in
1143 another TU. */
1144 if (DECL_EXTERNAL (m_decl))
1145 return NULL;
1146
1147 /* Implicit initialization to zero; use a compound_svalue for it.
1148 Doing so requires that we have a concrete binding for this region,
1149 which can fail if we have a region with unknown size
1150 (e.g. "extern const char arr[];"). */
1151 const binding_key *binding
1152 = binding_key::make (mgr->get_store_manager (), this);
1153 if (binding->symbolic_p ())
1154 return NULL;
1155
1156 binding_cluster c (this);
1157 c.zero_fill_region (mgr->get_store_manager (), this);
1158 return mgr->get_or_create_compound_svalue (TREE_TYPE (m_decl),
1159 c.get_map ());
1160 }
1161
1162 /* LTO can write out error_mark_node as the DECL_INITIAL for simple scalar
1163 values (to avoid writing out an extra section). */
1164 if (init == error_mark_node)
1165 return NULL;
1166
1167 if (TREE_CODE (init) == CONSTRUCTOR)
1168 return get_svalue_for_constructor (init, mgr);
1169
1170 /* Reuse the get_rvalue logic from region_model. */
1171 region_model m (mgr);
1172 return m.get_rvalue (path_var (init, 0), NULL);
1173 }
1174
1175 /* Subroutine of symnode_requires_tracking_p; return true if REF
1176 might imply that we should be tracking the value of its decl. */
1177
1178 static bool
1179 ipa_ref_requires_tracking (ipa_ref *ref)
1180 {
1181 /* If we have a load/store/alias of the symbol, then we'll track
1182 the decl's value. */
1183 if (ref->use != IPA_REF_ADDR)
1184 return true;
1185
1186 if (ref->stmt == NULL)
1187 return true;
1188
1189 switch (ref->stmt->code)
1190 {
1191 default:
1192 return true;
1193 case GIMPLE_CALL:
1194 {
1195 cgraph_node *caller_cnode = dyn_cast <cgraph_node *> (ref->referring);
1196 if (caller_cnode == NULL)
1197 return true;
1198 cgraph_edge *edge = caller_cnode->get_edge (ref->stmt);
1199 if (!edge)
1200 return true;
1201 if (edge->callee == NULL)
1202 return true; /* e.g. call through function ptr. */
1203 if (edge->callee->definition)
1204 return true;
1205 /* If we get here, then this ref is a pointer passed to
1206 a function we don't have the definition for. */
1207 return false;
1208 }
1209 break;
1210 case GIMPLE_ASM:
1211 {
1212 const gasm *asm_stmt = as_a <const gasm *> (ref->stmt);
1213 if (gimple_asm_noutputs (asm_stmt) > 0)
1214 return true;
1215 if (gimple_asm_nclobbers (asm_stmt) > 0)
1216 return true;
1217 /* If we get here, then this ref is the decl being passed
1218 by pointer to asm with no outputs. */
1219 return false;
1220 }
1221 break;
1222 }
1223 }
1224
1225 /* Determine if the decl for SYMNODE should have binding_clusters
1226 in our state objects; return false to optimize away tracking
1227 certain decls in our state objects, as an optimization. */
1228
1229 static bool
1230 symnode_requires_tracking_p (symtab_node *symnode)
1231 {
1232 gcc_assert (symnode);
1233 if (symnode->externally_visible)
1234 return true;
1235 tree context_fndecl = DECL_CONTEXT (symnode->decl);
1236 if (context_fndecl == NULL)
1237 return true;
1238 if (TREE_CODE (context_fndecl) != FUNCTION_DECL)
1239 return true;
1240 for (auto ref : symnode->ref_list.referring)
1241 if (ipa_ref_requires_tracking (ref))
1242 return true;
1243
1244 /* If we get here, then we don't have uses of this decl that require
1245 tracking; we never read from it or write to it explicitly. */
1246 return false;
1247 }
1248
1249 /* Subroutine of decl_region ctor: determine whether this decl_region
1250 can have binding_clusters; return false to optimize away tracking
1251 of certain decls in our state objects, as an optimization. */
1252
1253 bool
1254 decl_region::calc_tracked_p (tree decl)
1255 {
1256 /* Precondition of symtab_node::get. */
1257 if (TREE_CODE (decl) == VAR_DECL
1258 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl) || in_lto_p))
1259 if (symtab_node *symnode = symtab_node::get (decl))
1260 return symnode_requires_tracking_p (symnode);
1261 return true;
1262 }
1263
1264 /* class field_region : public region. */
1265
1266 /* Implementation of region::dump_to_pp vfunc for field_region. */
1267
1268 void
1269 field_region::dump_to_pp (pretty_printer *pp, bool simple) const
1270 {
1271 if (simple)
1272 {
1273 get_parent_region ()->dump_to_pp (pp, simple);
1274 pp_string (pp, ".");
1275 pp_printf (pp, "%E", m_field);
1276 }
1277 else
1278 {
1279 pp_string (pp, "field_region(");
1280 get_parent_region ()->dump_to_pp (pp, simple);
1281 pp_string (pp, ", ");
1282 print_quoted_type (pp, get_type ());
1283 pp_printf (pp, ", %qE)", m_field);
1284 }
1285 }
1286
1287 /* Implementation of region::get_relative_concrete_offset vfunc
1288 for field_region. */
1289
1290 bool
1291 field_region::get_relative_concrete_offset (bit_offset_t *out) const
1292 {
1293 /* Compare with e.g. gimple-fold.cc's
1294 fold_nonarray_ctor_reference. */
1295 tree byte_offset = DECL_FIELD_OFFSET (m_field);
1296 if (TREE_CODE (byte_offset) != INTEGER_CST)
1297 return false;
1298 tree field_offset = DECL_FIELD_BIT_OFFSET (m_field);
1299 /* Compute bit offset of the field. */
1300 offset_int bitoffset
1301 = (wi::to_offset (field_offset)
1302 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
1303 *out = bitoffset;
1304 return true;
1305 }
1306
1307 /* class element_region : public region. */
1308
1309 /* Implementation of region::accept vfunc for element_region. */
1310
1311 void
1312 element_region::accept (visitor *v) const
1313 {
1314 region::accept (v);
1315 m_index->accept (v);
1316 }
1317
1318 /* Implementation of region::dump_to_pp vfunc for element_region. */
1319
1320 void
1321 element_region::dump_to_pp (pretty_printer *pp, bool simple) const
1322 {
1323 if (simple)
1324 {
1325 //pp_string (pp, "(");
1326 get_parent_region ()->dump_to_pp (pp, simple);
1327 pp_string (pp, "[");
1328 m_index->dump_to_pp (pp, simple);
1329 pp_string (pp, "]");
1330 //pp_string (pp, ")");
1331 }
1332 else
1333 {
1334 pp_string (pp, "element_region(");
1335 get_parent_region ()->dump_to_pp (pp, simple);
1336 pp_string (pp, ", ");
1337 print_quoted_type (pp, get_type ());
1338 pp_string (pp, ", ");
1339 m_index->dump_to_pp (pp, simple);
1340 pp_printf (pp, ")");
1341 }
1342 }
1343
1344 /* Implementation of region::get_relative_concrete_offset vfunc
1345 for element_region. */
1346
1347 bool
1348 element_region::get_relative_concrete_offset (bit_offset_t *out) const
1349 {
1350 if (tree idx_cst = m_index->maybe_get_constant ())
1351 {
1352 gcc_assert (TREE_CODE (idx_cst) == INTEGER_CST);
1353
1354 tree elem_type = get_type ();
1355 offset_int element_idx = wi::to_offset (idx_cst);
1356
1357 /* First, use int_size_in_bytes, to reject the case where we
1358 have an incomplete type, or a non-constant value. */
1359 HOST_WIDE_INT hwi_byte_size = int_size_in_bytes (elem_type);
1360 if (hwi_byte_size > 0)
1361 {
1362 offset_int element_bit_size
1363 = hwi_byte_size << LOG2_BITS_PER_UNIT;
1364 offset_int element_bit_offset
1365 = element_idx * element_bit_size;
1366 *out = element_bit_offset;
1367 return true;
1368 }
1369 }
1370 return false;
1371 }
1372
1373 /* class offset_region : public region. */
1374
1375 /* Implementation of region::accept vfunc for offset_region. */
1376
1377 void
1378 offset_region::accept (visitor *v) const
1379 {
1380 region::accept (v);
1381 m_byte_offset->accept (v);
1382 }
1383
1384 /* Implementation of region::dump_to_pp vfunc for offset_region. */
1385
1386 void
1387 offset_region::dump_to_pp (pretty_printer *pp, bool simple) const
1388 {
1389 if (simple)
1390 {
1391 //pp_string (pp, "(");
1392 get_parent_region ()->dump_to_pp (pp, simple);
1393 pp_string (pp, "+");
1394 m_byte_offset->dump_to_pp (pp, simple);
1395 //pp_string (pp, ")");
1396 }
1397 else
1398 {
1399 pp_string (pp, "offset_region(");
1400 get_parent_region ()->dump_to_pp (pp, simple);
1401 pp_string (pp, ", ");
1402 print_quoted_type (pp, get_type ());
1403 pp_string (pp, ", ");
1404 m_byte_offset->dump_to_pp (pp, simple);
1405 pp_printf (pp, ")");
1406 }
1407 }
1408
1409 /* Implementation of region::get_relative_concrete_offset vfunc
1410 for offset_region. */
1411
1412 bool
1413 offset_region::get_relative_concrete_offset (bit_offset_t *out) const
1414 {
1415 if (tree byte_offset_cst = m_byte_offset->maybe_get_constant ())
1416 {
1417 gcc_assert (TREE_CODE (byte_offset_cst) == INTEGER_CST);
1418 /* Use a signed value for the byte offset, to handle
1419 negative offsets. */
1420 HOST_WIDE_INT byte_offset
1421 = wi::to_offset (byte_offset_cst).to_shwi ();
1422 HOST_WIDE_INT bit_offset = byte_offset * BITS_PER_UNIT;
1423 *out = bit_offset;
1424 return true;
1425 }
1426 return false;
1427 }
1428
1429 /* class sized_region : public region. */
1430
1431 /* Implementation of region::accept vfunc for sized_region. */
1432
1433 void
1434 sized_region::accept (visitor *v) const
1435 {
1436 region::accept (v);
1437 m_byte_size_sval->accept (v);
1438 }
1439
1440 /* Implementation of region::dump_to_pp vfunc for sized_region. */
1441
1442 void
1443 sized_region::dump_to_pp (pretty_printer *pp, bool simple) const
1444 {
1445 if (simple)
1446 {
1447 pp_string (pp, "SIZED_REG(");
1448 get_parent_region ()->dump_to_pp (pp, simple);
1449 pp_string (pp, ", ");
1450 m_byte_size_sval->dump_to_pp (pp, simple);
1451 pp_string (pp, ")");
1452 }
1453 else
1454 {
1455 pp_string (pp, "sized_region(");
1456 get_parent_region ()->dump_to_pp (pp, simple);
1457 pp_string (pp, ", ");
1458 m_byte_size_sval->dump_to_pp (pp, simple);
1459 pp_printf (pp, ")");
1460 }
1461 }
1462
1463 /* Implementation of region::get_byte_size vfunc for sized_region. */
1464
1465 bool
1466 sized_region::get_byte_size (byte_size_t *out) const
1467 {
1468 if (tree cst = m_byte_size_sval->maybe_get_constant ())
1469 {
1470 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1471 *out = tree_to_uhwi (cst);
1472 return true;
1473 }
1474 return false;
1475 }
1476
1477 /* Implementation of region::get_bit_size vfunc for sized_region. */
1478
1479 bool
1480 sized_region::get_bit_size (bit_size_t *out) const
1481 {
1482 byte_size_t byte_size;
1483 if (!get_byte_size (&byte_size))
1484 return false;
1485 *out = byte_size * BITS_PER_UNIT;
1486 return true;
1487 }
1488
1489 /* class cast_region : public region. */
1490
1491 /* Implementation of region::accept vfunc for cast_region. */
1492
1493 void
1494 cast_region::accept (visitor *v) const
1495 {
1496 region::accept (v);
1497 m_original_region->accept (v);
1498 }
1499
1500 /* Implementation of region::dump_to_pp vfunc for cast_region. */
1501
1502 void
1503 cast_region::dump_to_pp (pretty_printer *pp, bool simple) const
1504 {
1505 if (simple)
1506 {
1507 pp_string (pp, "CAST_REG(");
1508 print_quoted_type (pp, get_type ());
1509 pp_string (pp, ", ");
1510 m_original_region->dump_to_pp (pp, simple);
1511 pp_string (pp, ")");
1512 }
1513 else
1514 {
1515 pp_string (pp, "cast_region(");
1516 m_original_region->dump_to_pp (pp, simple);
1517 pp_string (pp, ", ");
1518 print_quoted_type (pp, get_type ());
1519 pp_printf (pp, ")");
1520 }
1521 }
1522
1523 /* class heap_allocated_region : public region. */
1524
1525 /* Implementation of region::dump_to_pp vfunc for heap_allocated_region. */
1526
1527 void
1528 heap_allocated_region::dump_to_pp (pretty_printer *pp, bool simple) const
1529 {
1530 if (simple)
1531 pp_printf (pp, "HEAP_ALLOCATED_REGION(%i)", get_id ());
1532 else
1533 pp_printf (pp, "heap_allocated_region(%i)", get_id ());
1534 }
1535
1536 /* class alloca_region : public region. */
1537
1538 /* Implementation of region::dump_to_pp vfunc for alloca_region. */
1539
1540 void
1541 alloca_region::dump_to_pp (pretty_printer *pp, bool simple) const
1542 {
1543 if (simple)
1544 pp_printf (pp, "ALLOCA_REGION(%i)", get_id ());
1545 else
1546 pp_printf (pp, "alloca_region(%i)", get_id ());
1547 }
1548
1549 /* class string_region : public region. */
1550
1551 /* Implementation of region::dump_to_pp vfunc for string_region. */
1552
1553 void
1554 string_region::dump_to_pp (pretty_printer *pp, bool simple) const
1555 {
1556 if (simple)
1557 dump_tree (pp, m_string_cst);
1558 else
1559 {
1560 pp_string (pp, "string_region(");
1561 dump_tree (pp, m_string_cst);
1562 if (!flag_dump_noaddr)
1563 {
1564 pp_string (pp, " (");
1565 pp_pointer (pp, m_string_cst);
1566 pp_string (pp, "))");
1567 }
1568 }
1569 }
1570
1571 /* class bit_range_region : public region. */
1572
1573 /* Implementation of region::dump_to_pp vfunc for bit_range_region. */
1574
1575 void
1576 bit_range_region::dump_to_pp (pretty_printer *pp, bool simple) const
1577 {
1578 if (simple)
1579 {
1580 pp_string (pp, "BIT_RANGE_REG(");
1581 get_parent_region ()->dump_to_pp (pp, simple);
1582 pp_string (pp, ", ");
1583 m_bits.dump_to_pp (pp);
1584 pp_string (pp, ")");
1585 }
1586 else
1587 {
1588 pp_string (pp, "bit_range_region(");
1589 get_parent_region ()->dump_to_pp (pp, simple);
1590 pp_string (pp, ", ");
1591 m_bits.dump_to_pp (pp);
1592 pp_printf (pp, ")");
1593 }
1594 }
1595
1596 /* Implementation of region::get_byte_size vfunc for bit_range_region. */
1597
1598 bool
1599 bit_range_region::get_byte_size (byte_size_t *out) const
1600 {
1601 if (m_bits.m_size_in_bits % BITS_PER_UNIT == 0)
1602 {
1603 *out = m_bits.m_size_in_bits / BITS_PER_UNIT;
1604 return true;
1605 }
1606 return false;
1607 }
1608
1609 /* Implementation of region::get_bit_size vfunc for bit_range_region. */
1610
1611 bool
1612 bit_range_region::get_bit_size (bit_size_t *out) const
1613 {
1614 *out = m_bits.m_size_in_bits;
1615 return true;
1616 }
1617
1618 /* Implementation of region::get_byte_size_sval vfunc for bit_range_region. */
1619
1620 const svalue *
1621 bit_range_region::get_byte_size_sval (region_model_manager *mgr) const
1622 {
1623 if (m_bits.m_size_in_bits % BITS_PER_UNIT != 0)
1624 return mgr->get_or_create_unknown_svalue (size_type_node);
1625
1626 HOST_WIDE_INT num_bytes = m_bits.m_size_in_bits.to_shwi () / BITS_PER_UNIT;
1627 return mgr->get_or_create_int_cst (size_type_node, num_bytes);
1628 }
1629
1630 /* Implementation of region::get_relative_concrete_offset vfunc for
1631 bit_range_region. */
1632
1633 bool
1634 bit_range_region::get_relative_concrete_offset (bit_offset_t *out) const
1635 {
1636 *out = m_bits.get_start_bit_offset ();
1637 return true;
1638 }
1639
1640 /* class var_arg_region : public region. */
1641
1642 void
1643 var_arg_region::dump_to_pp (pretty_printer *pp, bool simple) const
1644 {
1645 if (simple)
1646 {
1647 pp_string (pp, "VAR_ARG_REG(");
1648 get_parent_region ()->dump_to_pp (pp, simple);
1649 pp_printf (pp, ", arg_idx: %d)", m_idx);
1650 }
1651 else
1652 {
1653 pp_string (pp, "var_arg_region(");
1654 get_parent_region ()->dump_to_pp (pp, simple);
1655 pp_printf (pp, ", arg_idx: %d)", m_idx);
1656 }
1657 }
1658
1659 /* Get the frame_region for this var_arg_region. */
1660
1661 const frame_region *
1662 var_arg_region::get_frame_region () const
1663 {
1664 gcc_assert (get_parent_region ());
1665 return as_a <const frame_region *> (get_parent_region ());
1666 }
1667
1668 /* class unknown_region : public region. */
1669
1670 /* Implementation of region::dump_to_pp vfunc for unknown_region. */
1671
1672 void
1673 unknown_region::dump_to_pp (pretty_printer *pp, bool /*simple*/) const
1674 {
1675 pp_string (pp, "UNKNOWN_REGION");
1676 }
1677
1678 } // namespace ana
1679
1680 #endif /* #if ENABLE_ANALYZER */