]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/analyzer/region.cc
analyzer: add caching to globals with initializers [PR110112]
[thirdparty/gcc.git] / gcc / analyzer / region.cc
1 /* Regions of memory.
2 Copyright (C) 2019-2023 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #define INCLUDE_MEMORY
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tree.h"
26 #include "diagnostic-core.h"
27 #include "gimple-pretty-print.h"
28 #include "function.h"
29 #include "basic-block.h"
30 #include "gimple.h"
31 #include "gimple-iterator.h"
32 #include "diagnostic-core.h"
33 #include "graphviz.h"
34 #include "options.h"
35 #include "cgraph.h"
36 #include "tree-dfa.h"
37 #include "stringpool.h"
38 #include "convert.h"
39 #include "target.h"
40 #include "fold-const.h"
41 #include "tree-pretty-print.h"
42 #include "diagnostic-color.h"
43 #include "diagnostic-metadata.h"
44 #include "bitmap.h"
45 #include "analyzer/analyzer.h"
46 #include "analyzer/analyzer-logging.h"
47 #include "ordered-hash-map.h"
48 #include "options.h"
49 #include "cgraph.h"
50 #include "cfg.h"
51 #include "digraph.h"
52 #include "analyzer/supergraph.h"
53 #include "sbitmap.h"
54 #include "analyzer/call-string.h"
55 #include "analyzer/program-point.h"
56 #include "analyzer/store.h"
57 #include "analyzer/region.h"
58 #include "analyzer/region-model.h"
59 #include "analyzer/sm.h"
60 #include "analyzer/program-state.h"
61
62 #if ENABLE_ANALYZER
63
64 namespace ana {
65
66 /* class region and its various subclasses. */
67
68 /* class region. */
69
70 region::~region ()
71 {
72 delete m_cached_offset;
73 }
74
75 /* Compare REG1 and REG2 by id. */
76
77 int
78 region::cmp_ids (const region *reg1, const region *reg2)
79 {
80 return (long)reg1->get_id () - (long)reg2->get_id ();
81 }
82
83 /* Determine the base region for this region: when considering bindings
84 for this region, the base region is the ancestor which identifies
85 which cluster they should be partitioned into.
86 Regions within the same struct/union/array are in the same cluster.
87 Different decls are in different clusters. */
88
89 const region *
90 region::get_base_region () const
91 {
92 const region *iter = this;
93 while (iter)
94 {
95 switch (iter->get_kind ())
96 {
97 case RK_FIELD:
98 case RK_ELEMENT:
99 case RK_OFFSET:
100 case RK_SIZED:
101 case RK_BIT_RANGE:
102 iter = iter->get_parent_region ();
103 continue;
104 case RK_CAST:
105 iter = iter->dyn_cast_cast_region ()->get_original_region ();
106 continue;
107 default:
108 return iter;
109 }
110 }
111 return iter;
112 }
113
114 /* Return true if get_base_region() == this for this region. */
115
116 bool
117 region::base_region_p () const
118 {
119 switch (get_kind ())
120 {
121 /* Region kinds representing a descendent of a base region. */
122 case RK_FIELD:
123 case RK_ELEMENT:
124 case RK_OFFSET:
125 case RK_SIZED:
126 case RK_CAST:
127 case RK_BIT_RANGE:
128 return false;
129
130 default:
131 return true;
132 }
133 }
134
135 /* Return true if this region is ELDER or one of its descendents. */
136
137 bool
138 region::descendent_of_p (const region *elder) const
139 {
140 const region *iter = this;
141 while (iter)
142 {
143 if (iter == elder)
144 return true;
145 if (iter->get_kind () == RK_CAST)
146 iter = iter->dyn_cast_cast_region ()->get_original_region ();
147 else
148 iter = iter->get_parent_region ();
149 }
150 return false;
151 }
152
153 /* If this region is a frame_region, or a descendent of one, return it.
154 Otherwise return NULL. */
155
156 const frame_region *
157 region::maybe_get_frame_region () const
158 {
159 const region *iter = this;
160 while (iter)
161 {
162 if (const frame_region *frame_reg = iter->dyn_cast_frame_region ())
163 return frame_reg;
164 if (iter->get_kind () == RK_CAST)
165 iter = iter->dyn_cast_cast_region ()->get_original_region ();
166 else
167 iter = iter->get_parent_region ();
168 }
169 return NULL;
170 }
171
172 /* Get the memory space of this region. */
173
174 enum memory_space
175 region::get_memory_space () const
176 {
177 const region *iter = this;
178 while (iter)
179 {
180 switch (iter->get_kind ())
181 {
182 default:
183 break;
184 case RK_GLOBALS:
185 return MEMSPACE_GLOBALS;
186 case RK_CODE:
187 case RK_FUNCTION:
188 case RK_LABEL:
189 return MEMSPACE_CODE;
190 case RK_FRAME:
191 case RK_STACK:
192 case RK_ALLOCA:
193 return MEMSPACE_STACK;
194 case RK_HEAP:
195 case RK_HEAP_ALLOCATED:
196 return MEMSPACE_HEAP;
197 case RK_STRING:
198 return MEMSPACE_READONLY_DATA;
199 }
200 if (iter->get_kind () == RK_CAST)
201 iter = iter->dyn_cast_cast_region ()->get_original_region ();
202 else
203 iter = iter->get_parent_region ();
204 }
205 return MEMSPACE_UNKNOWN;
206 }
207
208 /* Subroutine for use by region_model_manager::get_or_create_initial_value.
209 Return true if this region has an initial_svalue.
210 Return false if attempting to use INIT_VAL(this_region) should give
211 the "UNINITIALIZED" poison value. */
212
213 bool
214 region::can_have_initial_svalue_p () const
215 {
216 const region *base_reg = get_base_region ();
217
218 /* Check for memory spaces that are uninitialized by default. */
219 enum memory_space mem_space = base_reg->get_memory_space ();
220 switch (mem_space)
221 {
222 default:
223 gcc_unreachable ();
224 case MEMSPACE_UNKNOWN:
225 case MEMSPACE_CODE:
226 case MEMSPACE_GLOBALS:
227 case MEMSPACE_READONLY_DATA:
228 /* Such regions have initial_svalues. */
229 return true;
230
231 case MEMSPACE_HEAP:
232 /* Heap allocations are uninitialized by default. */
233 return false;
234
235 case MEMSPACE_STACK:
236 if (tree decl = base_reg->maybe_get_decl ())
237 {
238 /* See the assertion in frame_region::get_region_for_local for the
239 tree codes we need to handle here. */
240 switch (TREE_CODE (decl))
241 {
242 default:
243 gcc_unreachable ();
244
245 case PARM_DECL:
246 /* Parameters have initial values. */
247 return true;
248
249 case VAR_DECL:
250 case RESULT_DECL:
251 /* Function locals don't have initial values. */
252 return false;
253
254 case SSA_NAME:
255 {
256 tree ssa_name = decl;
257 /* SSA names that are the default defn of a PARM_DECL
258 have initial_svalues; other SSA names don't. */
259 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
260 && SSA_NAME_VAR (ssa_name)
261 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == PARM_DECL)
262 return true;
263 else
264 return false;
265 }
266 }
267 }
268
269 /* If we have an on-stack region that isn't associated with a decl
270 or SSA name, then we have VLA/alloca, which is uninitialized. */
271 return false;
272 }
273 }
274
275 /* For regions within a global decl, get the svalue for the initial
276 value of this region when the program starts, caching the result. */
277
278 const svalue *
279 region::get_initial_value_at_main (region_model_manager *mgr) const
280 {
281 if (!m_cached_init_sval_at_main)
282 m_cached_init_sval_at_main = calc_initial_value_at_main (mgr);
283 return m_cached_init_sval_at_main;
284 }
285
286 /* Implementation of region::get_initial_value_at_main. */
287
288 const svalue *
289 region::calc_initial_value_at_main (region_model_manager *mgr) const
290 {
291 const decl_region *base_reg = get_base_region ()->dyn_cast_decl_region ();
292 gcc_assert (base_reg);
293
294 /* Attempt to get the initializer value for base_reg. */
295 if (const svalue *base_reg_init
296 = base_reg->get_svalue_for_initializer (mgr))
297 {
298 if (this == base_reg)
299 return base_reg_init;
300 else
301 {
302 /* Get the value for REG within base_reg_init. */
303 binding_cluster c (base_reg);
304 c.bind (mgr->get_store_manager (), base_reg, base_reg_init);
305 const svalue *sval
306 = c.get_any_binding (mgr->get_store_manager (), this);
307 if (sval)
308 {
309 if (get_type ())
310 sval = mgr->get_or_create_cast (get_type (), sval);
311 return sval;
312 }
313 }
314 }
315
316 /* Otherwise, return INIT_VAL(REG). */
317 return mgr->get_or_create_initial_value (this);
318 }
319
320 /* If this region is a decl_region, return the decl.
321 Otherwise return NULL. */
322
323 tree
324 region::maybe_get_decl () const
325 {
326 if (const decl_region *decl_reg = dyn_cast_decl_region ())
327 return decl_reg->get_decl ();
328 return NULL_TREE;
329 }
330
331 /* Get the region_offset for this region (calculating it on the
332 first call and caching it internally). */
333
334 region_offset
335 region::get_offset (region_model_manager *mgr) const
336 {
337 if(!m_cached_offset)
338 m_cached_offset = new region_offset (calc_offset (mgr));
339 return *m_cached_offset;
340 }
341
342 /* Base class implementation of region::get_byte_size vfunc.
343 If the size of this region (in bytes) is known statically, write it to *OUT
344 and return true.
345 Otherwise return false. */
346
347 bool
348 region::get_byte_size (byte_size_t *out) const
349 {
350 tree type = get_type ();
351
352 /* Bail out e.g. for heap-allocated regions. */
353 if (!type)
354 return false;
355
356 HOST_WIDE_INT bytes = int_size_in_bytes (type);
357 if (bytes == -1)
358 return false;
359 *out = bytes;
360 return true;
361 }
362
363 /* Base implementation of region::get_byte_size_sval vfunc. */
364
365 const svalue *
366 region::get_byte_size_sval (region_model_manager *mgr) const
367 {
368 tree type = get_type ();
369
370 /* Bail out e.g. for heap-allocated regions. */
371 if (!type)
372 return mgr->get_or_create_unknown_svalue (size_type_node);
373
374 HOST_WIDE_INT bytes = int_size_in_bytes (type);
375 if (bytes == -1)
376 return mgr->get_or_create_unknown_svalue (size_type_node);
377
378 tree byte_size = size_in_bytes (type);
379 if (TREE_TYPE (byte_size) != size_type_node)
380 byte_size = fold_build1 (NOP_EXPR, size_type_node, byte_size);
381 return mgr->get_or_create_constant_svalue (byte_size);
382 }
383
384 /* Attempt to get the size of TYPE in bits.
385 If successful, return true and write the size to *OUT.
386 Otherwise return false. */
387
388 bool
389 int_size_in_bits (const_tree type, bit_size_t *out)
390 {
391 if (INTEGRAL_TYPE_P (type))
392 {
393 *out = TYPE_PRECISION (type);
394 return true;
395 }
396
397 tree sz = TYPE_SIZE (type);
398 if (sz && tree_fits_uhwi_p (sz))
399 {
400 *out = TREE_INT_CST_LOW (sz);
401 return true;
402 }
403 else
404 return false;
405 }
406
407 /* If the size of this region (in bits) is known statically, write it to *OUT
408 and return true.
409 Otherwise return false. */
410
411 bool
412 region::get_bit_size (bit_size_t *out) const
413 {
414 tree type = get_type ();
415
416 /* Bail out e.g. for heap-allocated regions. */
417 if (!type)
418 return false;
419
420 return int_size_in_bits (type, out);
421 }
422
423 /* Get the field within RECORD_TYPE at BIT_OFFSET. */
424
425 tree
426 get_field_at_bit_offset (tree record_type, bit_offset_t bit_offset)
427 {
428 gcc_assert (TREE_CODE (record_type) == RECORD_TYPE);
429 if (bit_offset < 0)
430 return NULL;
431
432 /* Find the first field that has an offset > BIT_OFFSET,
433 then return the one preceding it.
434 Skip other trees within the chain, such as FUNCTION_DECLs. */
435 tree last_field = NULL_TREE;
436 for (tree iter = TYPE_FIELDS (record_type); iter != NULL_TREE;
437 iter = DECL_CHAIN (iter))
438 {
439 if (TREE_CODE (iter) == FIELD_DECL)
440 {
441 int iter_field_offset = int_bit_position (iter);
442 if (bit_offset < iter_field_offset)
443 return last_field;
444 last_field = iter;
445 }
446 }
447 return last_field;
448 }
449
450 /* Populate *OUT with descendent regions of type TYPE that match
451 RELATIVE_BIT_OFFSET and SIZE_IN_BITS within this region. */
452
453 void
454 region::get_subregions_for_binding (region_model_manager *mgr,
455 bit_offset_t relative_bit_offset,
456 bit_size_t size_in_bits,
457 tree type,
458 auto_vec <const region *> *out) const
459 {
460 if (get_type () == NULL_TREE || type == NULL_TREE)
461 return;
462 if (relative_bit_offset == 0
463 && types_compatible_p (get_type (), type))
464 {
465 out->safe_push (this);
466 return;
467 }
468 switch (TREE_CODE (get_type ()))
469 {
470 case ARRAY_TYPE:
471 {
472 tree element_type = TREE_TYPE (get_type ());
473 HOST_WIDE_INT hwi_byte_size = int_size_in_bytes (element_type);
474 if (hwi_byte_size > 0)
475 {
476 HOST_WIDE_INT bits_per_element
477 = hwi_byte_size << LOG2_BITS_PER_UNIT;
478 HOST_WIDE_INT element_index
479 = (relative_bit_offset.to_shwi () / bits_per_element);
480 tree element_index_cst
481 = build_int_cst (integer_type_node, element_index);
482 HOST_WIDE_INT inner_bit_offset
483 = relative_bit_offset.to_shwi () % bits_per_element;
484 const region *subregion = mgr->get_element_region
485 (this, element_type,
486 mgr->get_or_create_constant_svalue (element_index_cst));
487 subregion->get_subregions_for_binding (mgr, inner_bit_offset,
488 size_in_bits, type, out);
489 }
490 }
491 break;
492 case RECORD_TYPE:
493 {
494 /* The bit offset might be *within* one of the fields (such as
495 with nested structs).
496 So we want to find the enclosing field, adjust the offset,
497 and repeat. */
498 if (tree field = get_field_at_bit_offset (get_type (),
499 relative_bit_offset))
500 {
501 int field_bit_offset = int_bit_position (field);
502 const region *subregion = mgr->get_field_region (this, field);
503 subregion->get_subregions_for_binding
504 (mgr, relative_bit_offset - field_bit_offset,
505 size_in_bits, type, out);
506 }
507 }
508 break;
509 case UNION_TYPE:
510 {
511 for (tree field = TYPE_FIELDS (get_type ()); field != NULL_TREE;
512 field = DECL_CHAIN (field))
513 {
514 if (TREE_CODE (field) != FIELD_DECL)
515 continue;
516 const region *subregion = mgr->get_field_region (this, field);
517 subregion->get_subregions_for_binding (mgr,
518 relative_bit_offset,
519 size_in_bits,
520 type,
521 out);
522 }
523 }
524 break;
525 default:
526 /* Do nothing. */
527 break;
528 }
529 }
530
531 /* Walk from this region up to the base region within its cluster, calculating
532 the offset relative to the base region, either as an offset in bits,
533 or a symbolic offset. */
534
535 region_offset
536 region::calc_offset (region_model_manager *mgr) const
537 {
538 const region *iter_region = this;
539 bit_offset_t accum_bit_offset = 0;
540 const svalue *accum_byte_sval = NULL;
541
542 while (iter_region)
543 {
544 switch (iter_region->get_kind ())
545 {
546 case RK_FIELD:
547 case RK_ELEMENT:
548 case RK_OFFSET:
549 case RK_BIT_RANGE:
550 if (accum_byte_sval)
551 {
552 const svalue *sval
553 = iter_region->get_relative_symbolic_offset (mgr);
554 accum_byte_sval
555 = mgr->get_or_create_binop (sval->get_type (), PLUS_EXPR,
556 accum_byte_sval, sval);
557 iter_region = iter_region->get_parent_region ();
558 }
559 else
560 {
561 bit_offset_t rel_bit_offset;
562 if (iter_region->get_relative_concrete_offset (&rel_bit_offset))
563 {
564 accum_bit_offset += rel_bit_offset;
565 iter_region = iter_region->get_parent_region ();
566 }
567 else
568 {
569 /* If the iter_region is not concrete anymore, convert the
570 accumulated bits to a svalue in bytes and revisit the
571 iter_region collecting the symbolic value. */
572 byte_offset_t byte_offset = accum_bit_offset / BITS_PER_UNIT;
573 tree offset_tree = wide_int_to_tree (integer_type_node,
574 byte_offset);
575 accum_byte_sval
576 = mgr->get_or_create_constant_svalue (offset_tree);
577 }
578 }
579 continue;
580 case RK_SIZED:
581 iter_region = iter_region->get_parent_region ();
582 continue;
583
584 case RK_CAST:
585 {
586 const cast_region *cast_reg
587 = as_a <const cast_region *> (iter_region);
588 iter_region = cast_reg->get_original_region ();
589 }
590 continue;
591
592 default:
593 return accum_byte_sval
594 ? region_offset::make_symbolic (iter_region,
595 accum_byte_sval)
596 : region_offset::make_concrete (iter_region,
597 accum_bit_offset);
598 }
599 }
600
601 return accum_byte_sval ? region_offset::make_symbolic (iter_region,
602 accum_byte_sval)
603 : region_offset::make_concrete (iter_region,
604 accum_bit_offset);
605 }
606
607 /* Base implementation of region::get_relative_concrete_offset vfunc. */
608
609 bool
610 region::get_relative_concrete_offset (bit_offset_t *) const
611 {
612 return false;
613 }
614
615 /* Base implementation of region::get_relative_symbolic_offset vfunc. */
616
617 const svalue *
618 region::get_relative_symbolic_offset (region_model_manager *mgr) const
619 {
620 return mgr->get_or_create_unknown_svalue (integer_type_node);
621 }
622
623 /* Attempt to get the position and size of this region expressed as a
624 concrete range of bytes relative to its parent.
625 If successful, return true and write to *OUT.
626 Otherwise return false. */
627
628 bool
629 region::get_relative_concrete_byte_range (byte_range *out) const
630 {
631 /* We must have a concrete offset relative to the parent. */
632 bit_offset_t rel_bit_offset;
633 if (!get_relative_concrete_offset (&rel_bit_offset))
634 return false;
635 /* ...which must be a whole number of bytes. */
636 if (rel_bit_offset % BITS_PER_UNIT != 0)
637 return false;
638 byte_offset_t start_byte_offset = rel_bit_offset / BITS_PER_UNIT;
639
640 /* We must have a concrete size, which must be a whole number
641 of bytes. */
642 byte_size_t num_bytes;
643 if (!get_byte_size (&num_bytes))
644 return false;
645
646 /* Success. */
647 *out = byte_range (start_byte_offset, num_bytes);
648 return true;
649 }
650
651 /* Dump a description of this region to stderr. */
652
653 DEBUG_FUNCTION void
654 region::dump (bool simple) const
655 {
656 pretty_printer pp;
657 pp_format_decoder (&pp) = default_tree_printer;
658 pp_show_color (&pp) = pp_show_color (global_dc->printer);
659 pp.buffer->stream = stderr;
660 dump_to_pp (&pp, simple);
661 pp_newline (&pp);
662 pp_flush (&pp);
663 }
664
665 /* Return a new json::string describing the region. */
666
667 json::value *
668 region::to_json () const
669 {
670 label_text desc = get_desc (true);
671 json::value *reg_js = new json::string (desc.get ());
672 return reg_js;
673 }
674
675 /* Generate a description of this region. */
676
677 DEBUG_FUNCTION label_text
678 region::get_desc (bool simple) const
679 {
680 pretty_printer pp;
681 pp_format_decoder (&pp) = default_tree_printer;
682 dump_to_pp (&pp, simple);
683 return label_text::take (xstrdup (pp_formatted_text (&pp)));
684 }
685
686 /* Base implementation of region::accept vfunc.
687 Subclass implementations should chain up to this. */
688
689 void
690 region::accept (visitor *v) const
691 {
692 v->visit_region (this);
693 if (m_parent)
694 m_parent->accept (v);
695 }
696
697 /* Return true if this is a symbolic region for deferencing an
698 unknown ptr.
699 We shouldn't attempt to bind values for this region (but
700 can unbind values for other regions). */
701
702 bool
703 region::symbolic_for_unknown_ptr_p () const
704 {
705 if (const symbolic_region *sym_reg = dyn_cast_symbolic_region ())
706 if (sym_reg->get_pointer ()->get_kind () == SK_UNKNOWN)
707 return true;
708 return false;
709 }
710
711 /* Return true if this is a symbolic region. */
712
713 bool
714 region::symbolic_p () const
715 {
716 return get_kind () == RK_SYMBOLIC;
717 }
718
719 /* Return true if this region is known to be zero bits in size. */
720
721 bool
722 region::empty_p () const
723 {
724 bit_size_t num_bits;
725 if (get_bit_size (&num_bits))
726 if (num_bits == 0)
727 return true;
728 return false;
729 }
730
731 /* Return true if this is a region for a decl with name DECL_NAME.
732 Intended for use when debugging (for assertions and conditional
733 breakpoints). */
734
735 DEBUG_FUNCTION bool
736 region::is_named_decl_p (const char *decl_name) const
737 {
738 if (tree decl = maybe_get_decl ())
739 if (DECL_NAME (decl)
740 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (decl)), decl_name))
741 return true;
742 return false;
743 }
744
745 /* region's ctor. */
746
747 region::region (complexity c, unsigned id, const region *parent, tree type)
748 : m_complexity (c), m_id (id), m_parent (parent), m_type (type),
749 m_cached_offset (NULL), m_cached_init_sval_at_main (NULL)
750 {
751 gcc_assert (type == NULL_TREE || TYPE_P (type));
752 }
753
754 /* Comparator for use by vec<const region *>::qsort,
755 using their IDs to order them. */
756
757 int
758 region::cmp_ptr_ptr (const void *p1, const void *p2)
759 {
760 const region * const *reg1 = (const region * const *)p1;
761 const region * const *reg2 = (const region * const *)p2;
762
763 return cmp_ids (*reg1, *reg2);
764 }
765
766 /* Determine if a pointer to this region must be non-NULL.
767
768 Generally, pointers to regions must be non-NULL, but pointers
769 to symbolic_regions might, in fact, be NULL.
770
771 This allows us to simulate functions like malloc and calloc with:
772 - only one "outcome" from each statement,
773 - the idea that the pointer is on the heap if non-NULL
774 - the possibility that the pointer could be NULL
775 - the idea that successive values returned from malloc are non-equal
776 - to be able to zero-fill for calloc. */
777
778 bool
779 region::non_null_p () const
780 {
781 switch (get_kind ())
782 {
783 default:
784 return true;
785 case RK_SYMBOLIC:
786 /* Are we within a symbolic_region? If so, it could be NULL, and we
787 have to fall back on the constraints. */
788 return false;
789 case RK_HEAP_ALLOCATED:
790 return false;
791 }
792 }
793
794 /* Return true iff this region is defined in terms of SVAL. */
795
796 bool
797 region::involves_p (const svalue *sval) const
798 {
799 if (const symbolic_region *symbolic_reg = dyn_cast_symbolic_region ())
800 {
801 if (symbolic_reg->get_pointer ()->involves_p (sval))
802 return true;
803 }
804
805 return false;
806 }
807
808 /* Comparator for trees to impose a deterministic ordering on
809 T1 and T2. */
810
811 static int
812 tree_cmp (const_tree t1, const_tree t2)
813 {
814 gcc_assert (t1);
815 gcc_assert (t2);
816
817 /* Test tree codes first. */
818 if (TREE_CODE (t1) != TREE_CODE (t2))
819 return TREE_CODE (t1) - TREE_CODE (t2);
820
821 /* From this point on, we know T1 and T2 have the same tree code. */
822
823 if (DECL_P (t1))
824 {
825 if (DECL_NAME (t1) && DECL_NAME (t2))
826 return strcmp (IDENTIFIER_POINTER (DECL_NAME (t1)),
827 IDENTIFIER_POINTER (DECL_NAME (t2)));
828 else
829 {
830 if (DECL_NAME (t1))
831 return -1;
832 else if (DECL_NAME (t2))
833 return 1;
834 else
835 return DECL_UID (t1) - DECL_UID (t2);
836 }
837 }
838
839 switch (TREE_CODE (t1))
840 {
841 case SSA_NAME:
842 {
843 if (SSA_NAME_VAR (t1) && SSA_NAME_VAR (t2))
844 {
845 int var_cmp = tree_cmp (SSA_NAME_VAR (t1), SSA_NAME_VAR (t2));
846 if (var_cmp)
847 return var_cmp;
848 return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2);
849 }
850 else
851 {
852 if (SSA_NAME_VAR (t1))
853 return -1;
854 else if (SSA_NAME_VAR (t2))
855 return 1;
856 else
857 return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2);
858 }
859 }
860 break;
861
862 case INTEGER_CST:
863 return tree_int_cst_compare (t1, t2);
864
865 case REAL_CST:
866 {
867 const real_value *rv1 = TREE_REAL_CST_PTR (t1);
868 const real_value *rv2 = TREE_REAL_CST_PTR (t2);
869 if (real_compare (UNORDERED_EXPR, rv1, rv2))
870 {
871 /* Impose an arbitrary order on NaNs relative to other NaNs
872 and to non-NaNs. */
873 if (int cmp_isnan = real_isnan (rv1) - real_isnan (rv2))
874 return cmp_isnan;
875 if (int cmp_issignaling_nan
876 = real_issignaling_nan (rv1) - real_issignaling_nan (rv2))
877 return cmp_issignaling_nan;
878 return real_isneg (rv1) - real_isneg (rv2);
879 }
880 if (real_compare (LT_EXPR, rv1, rv2))
881 return -1;
882 if (real_compare (GT_EXPR, rv1, rv2))
883 return 1;
884 return 0;
885 }
886
887 case STRING_CST:
888 return strcmp (TREE_STRING_POINTER (t1),
889 TREE_STRING_POINTER (t2));
890
891 default:
892 gcc_unreachable ();
893 break;
894 }
895
896 gcc_unreachable ();
897
898 return 0;
899 }
900
901 /* qsort comparator for trees to impose a deterministic ordering on
902 P1 and P2. */
903
904 int
905 tree_cmp (const void *p1, const void *p2)
906 {
907 const_tree t1 = *(const_tree const *)p1;
908 const_tree t2 = *(const_tree const *)p2;
909
910 return tree_cmp (t1, t2);
911 }
912
913 /* class frame_region : public space_region. */
914
915 frame_region::~frame_region ()
916 {
917 for (map_t::iterator iter = m_locals.begin ();
918 iter != m_locals.end ();
919 ++iter)
920 delete (*iter).second;
921 }
922
923 void
924 frame_region::accept (visitor *v) const
925 {
926 region::accept (v);
927 if (m_calling_frame)
928 m_calling_frame->accept (v);
929 }
930
931 /* Implementation of region::dump_to_pp vfunc for frame_region. */
932
933 void
934 frame_region::dump_to_pp (pretty_printer *pp, bool simple) const
935 {
936 if (simple)
937 pp_printf (pp, "frame: %qs@%i", function_name (m_fun), get_stack_depth ());
938 else
939 pp_printf (pp, "frame_region(%qs, index: %i, depth: %i)",
940 function_name (m_fun), m_index, get_stack_depth ());
941 }
942
943 const decl_region *
944 frame_region::get_region_for_local (region_model_manager *mgr,
945 tree expr,
946 const region_model_context *ctxt) const
947 {
948 if (CHECKING_P)
949 {
950 /* Verify that EXPR is a local or SSA name, and that it's for the
951 correct function for this stack frame. */
952 gcc_assert (TREE_CODE (expr) == PARM_DECL
953 || TREE_CODE (expr) == VAR_DECL
954 || TREE_CODE (expr) == SSA_NAME
955 || TREE_CODE (expr) == RESULT_DECL);
956 switch (TREE_CODE (expr))
957 {
958 default:
959 gcc_unreachable ();
960 case VAR_DECL:
961 gcc_assert (!is_global_var (expr));
962 /* Fall through. */
963 case PARM_DECL:
964 case RESULT_DECL:
965 gcc_assert (DECL_CONTEXT (expr) == m_fun->decl);
966 break;
967 case SSA_NAME:
968 {
969 if (tree var = SSA_NAME_VAR (expr))
970 {
971 if (DECL_P (var))
972 gcc_assert (DECL_CONTEXT (var) == m_fun->decl);
973 }
974 else if (ctxt)
975 if (const extrinsic_state *ext_state = ctxt->get_ext_state ())
976 if (const supergraph *sg
977 = ext_state->get_engine ()->get_supergraph ())
978 {
979 const gimple *def_stmt = SSA_NAME_DEF_STMT (expr);
980 const supernode *snode
981 = sg->get_supernode_for_stmt (def_stmt);
982 gcc_assert (snode->get_function () == m_fun);
983 }
984 }
985 break;
986 }
987 }
988
989 /* Ideally we'd use mutable here. */
990 map_t &mutable_locals = const_cast <map_t &> (m_locals);
991
992 if (decl_region **slot = mutable_locals.get (expr))
993 return *slot;
994 decl_region *reg
995 = new decl_region (mgr->alloc_region_id (), this, expr);
996 mutable_locals.put (expr, reg);
997 return reg;
998 }
999
1000 /* class globals_region : public space_region. */
1001
1002 /* Implementation of region::dump_to_pp vfunc for globals_region. */
1003
1004 void
1005 globals_region::dump_to_pp (pretty_printer *pp, bool simple) const
1006 {
1007 if (simple)
1008 pp_string (pp, "::");
1009 else
1010 pp_string (pp, "globals");
1011 }
1012
1013 /* class code_region : public map_region. */
1014
1015 /* Implementation of region::dump_to_pp vfunc for code_region. */
1016
1017 void
1018 code_region::dump_to_pp (pretty_printer *pp, bool simple) const
1019 {
1020 if (simple)
1021 pp_string (pp, "code region");
1022 else
1023 pp_string (pp, "code_region()");
1024 }
1025
1026 /* class function_region : public region. */
1027
1028 /* Implementation of region::dump_to_pp vfunc for function_region. */
1029
1030 void
1031 function_region::dump_to_pp (pretty_printer *pp, bool simple) const
1032 {
1033 if (simple)
1034 {
1035 dump_quoted_tree (pp, m_fndecl);
1036 }
1037 else
1038 {
1039 pp_string (pp, "function_region(");
1040 dump_quoted_tree (pp, m_fndecl);
1041 pp_string (pp, ")");
1042 }
1043 }
1044
1045 /* class label_region : public region. */
1046
1047 /* Implementation of region::dump_to_pp vfunc for label_region. */
1048
1049 void
1050 label_region::dump_to_pp (pretty_printer *pp, bool simple) const
1051 {
1052 if (simple)
1053 {
1054 dump_quoted_tree (pp, m_label);
1055 }
1056 else
1057 {
1058 pp_string (pp, "label_region(");
1059 dump_quoted_tree (pp, m_label);
1060 pp_string (pp, ")");
1061 }
1062 }
1063
1064 /* class stack_region : public region. */
1065
1066 /* Implementation of region::dump_to_pp vfunc for stack_region. */
1067
1068 void
1069 stack_region::dump_to_pp (pretty_printer *pp, bool simple) const
1070 {
1071 if (simple)
1072 pp_string (pp, "stack region");
1073 else
1074 pp_string (pp, "stack_region()");
1075 }
1076
1077 /* class heap_region : public region. */
1078
1079 /* Implementation of region::dump_to_pp vfunc for heap_region. */
1080
1081 void
1082 heap_region::dump_to_pp (pretty_printer *pp, bool simple) const
1083 {
1084 if (simple)
1085 pp_string (pp, "heap region");
1086 else
1087 pp_string (pp, "heap_region()");
1088 }
1089
1090 /* class root_region : public region. */
1091
1092 /* root_region's ctor. */
1093
1094 root_region::root_region (unsigned id)
1095 : region (complexity (1, 1), id, NULL, NULL_TREE)
1096 {
1097 }
1098
1099 /* Implementation of region::dump_to_pp vfunc for root_region. */
1100
1101 void
1102 root_region::dump_to_pp (pretty_printer *pp, bool simple) const
1103 {
1104 if (simple)
1105 pp_string (pp, "root region");
1106 else
1107 pp_string (pp, "root_region()");
1108 }
1109
1110 /* class thread_local_region : public space_region. */
1111
1112 void
1113 thread_local_region::dump_to_pp (pretty_printer *pp, bool simple) const
1114 {
1115 if (simple)
1116 pp_string (pp, "thread_local_region");
1117 else
1118 pp_string (pp, "thread_local_region()");
1119 }
1120
1121 /* class symbolic_region : public map_region. */
1122
1123 /* symbolic_region's ctor. */
1124
1125 symbolic_region::symbolic_region (unsigned id, region *parent,
1126 const svalue *sval_ptr)
1127 : region (complexity::from_pair (parent, sval_ptr), id, parent,
1128 (sval_ptr->get_type ()
1129 ? TREE_TYPE (sval_ptr->get_type ())
1130 : NULL_TREE)),
1131 m_sval_ptr (sval_ptr)
1132 {
1133 }
1134
1135 /* Implementation of region::accept vfunc for symbolic_region. */
1136
1137 void
1138 symbolic_region::accept (visitor *v) const
1139 {
1140 region::accept (v);
1141 m_sval_ptr->accept (v);
1142 }
1143
1144 /* Implementation of region::dump_to_pp vfunc for symbolic_region. */
1145
1146 void
1147 symbolic_region::dump_to_pp (pretty_printer *pp, bool simple) const
1148 {
1149 if (simple)
1150 {
1151 pp_string (pp, "(*");
1152 m_sval_ptr->dump_to_pp (pp, simple);
1153 pp_string (pp, ")");
1154 }
1155 else
1156 {
1157 pp_string (pp, "symbolic_region(");
1158 get_parent_region ()->dump_to_pp (pp, simple);
1159 if (get_type ())
1160 {
1161 pp_string (pp, ", ");
1162 print_quoted_type (pp, get_type ());
1163 }
1164 pp_string (pp, ", ");
1165 m_sval_ptr->dump_to_pp (pp, simple);
1166 pp_string (pp, ")");
1167 }
1168 }
1169
1170 /* class decl_region : public region. */
1171
1172 /* Implementation of region::dump_to_pp vfunc for decl_region. */
1173
1174 void
1175 decl_region::dump_to_pp (pretty_printer *pp, bool simple) const
1176 {
1177 if (simple)
1178 pp_printf (pp, "%E", m_decl);
1179 else
1180 {
1181 pp_string (pp, "decl_region(");
1182 get_parent_region ()->dump_to_pp (pp, simple);
1183 pp_string (pp, ", ");
1184 print_quoted_type (pp, get_type ());
1185 pp_printf (pp, ", %qE)", m_decl);
1186 }
1187 }
1188
1189 /* Get the stack depth for the frame containing this decl, or 0
1190 for a global. */
1191
1192 int
1193 decl_region::get_stack_depth () const
1194 {
1195 if (get_parent_region () == NULL)
1196 return 0;
1197 if (const frame_region *frame_reg
1198 = get_parent_region ()->dyn_cast_frame_region ())
1199 return frame_reg->get_stack_depth ();
1200 return 0;
1201 }
1202
1203 /* If the underlying decl is in the global constant pool,
1204 return an svalue representing the constant value.
1205 Otherwise return NULL. */
1206
1207 const svalue *
1208 decl_region::maybe_get_constant_value (region_model_manager *mgr) const
1209 {
1210 if (VAR_P (m_decl)
1211 && DECL_IN_CONSTANT_POOL (m_decl)
1212 && DECL_INITIAL (m_decl)
1213 && TREE_CODE (DECL_INITIAL (m_decl)) == CONSTRUCTOR)
1214 return get_svalue_for_constructor (DECL_INITIAL (m_decl), mgr);
1215 return NULL;
1216 }
1217
1218 /* Implementation of decl_region::get_svalue_for_constructor
1219 for when the cached value hasn't yet been calculated. */
1220
1221 const svalue *
1222 decl_region::calc_svalue_for_constructor (tree ctor,
1223 region_model_manager *mgr) const
1224 {
1225 /* Create a binding map, applying ctor to it, using this
1226 decl_region as the base region when building child regions
1227 for offset calculations. */
1228 binding_map map;
1229 if (!map.apply_ctor_to_region (this, ctor, mgr))
1230 return mgr->get_or_create_unknown_svalue (get_type ());
1231
1232 /* Return a compound svalue for the map we built. */
1233 return mgr->get_or_create_compound_svalue (get_type (), map);
1234 }
1235
1236 /* Get an svalue for CTOR, a CONSTRUCTOR for this region's decl. */
1237
1238 const svalue *
1239 decl_region::get_svalue_for_constructor (tree ctor,
1240 region_model_manager *mgr) const
1241 {
1242 gcc_assert (!TREE_CLOBBER_P (ctor));
1243 gcc_assert (ctor == DECL_INITIAL (m_decl));
1244
1245 if (!m_ctor_svalue)
1246 m_ctor_svalue = calc_svalue_for_constructor (ctor, mgr);
1247
1248 return m_ctor_svalue;
1249 }
1250
1251 /* For use on decl_regions for global variables.
1252
1253 Get an svalue for the initial value of this region at entry to
1254 "main" (either based on DECL_INITIAL, or implicit initialization to
1255 zero.
1256
1257 Return NULL if there is a problem. */
1258
1259 const svalue *
1260 decl_region::get_svalue_for_initializer (region_model_manager *mgr) const
1261 {
1262 tree init = DECL_INITIAL (m_decl);
1263 if (!init)
1264 {
1265 /* If we have an "extern" decl then there may be an initializer in
1266 another TU. */
1267 if (DECL_EXTERNAL (m_decl))
1268 return NULL;
1269
1270 if (empty_p ())
1271 return NULL;
1272
1273 /* Implicit initialization to zero; use a compound_svalue for it.
1274 Doing so requires that we have a concrete binding for this region,
1275 which can fail if we have a region with unknown size
1276 (e.g. "extern const char arr[];"). */
1277 const binding_key *binding
1278 = binding_key::make (mgr->get_store_manager (), this);
1279 if (binding->symbolic_p ())
1280 return NULL;
1281
1282 /* If we don't care about tracking the content of this region, then
1283 it's unused, and the value doesn't matter. */
1284 if (!tracked_p ())
1285 return NULL;
1286
1287 binding_cluster c (this);
1288 c.zero_fill_region (mgr->get_store_manager (), this);
1289 return mgr->get_or_create_compound_svalue (TREE_TYPE (m_decl),
1290 c.get_map ());
1291 }
1292
1293 /* LTO can write out error_mark_node as the DECL_INITIAL for simple scalar
1294 values (to avoid writing out an extra section). */
1295 if (init == error_mark_node)
1296 return NULL;
1297
1298 if (TREE_CODE (init) == CONSTRUCTOR)
1299 return get_svalue_for_constructor (init, mgr);
1300
1301 /* Reuse the get_rvalue logic from region_model. */
1302 region_model m (mgr);
1303 return m.get_rvalue (path_var (init, 0), NULL);
1304 }
1305
1306 /* Subroutine of symnode_requires_tracking_p; return true if REF
1307 might imply that we should be tracking the value of its decl. */
1308
1309 static bool
1310 ipa_ref_requires_tracking (ipa_ref *ref)
1311 {
1312 /* If we have a load/store/alias of the symbol, then we'll track
1313 the decl's value. */
1314 if (ref->use != IPA_REF_ADDR)
1315 return true;
1316
1317 if (ref->stmt == NULL)
1318 return true;
1319
1320 switch (ref->stmt->code)
1321 {
1322 default:
1323 return true;
1324 case GIMPLE_CALL:
1325 {
1326 cgraph_node *caller_cnode = dyn_cast <cgraph_node *> (ref->referring);
1327 if (caller_cnode == NULL)
1328 return true;
1329 cgraph_edge *edge = caller_cnode->get_edge (ref->stmt);
1330 if (!edge)
1331 return true;
1332 if (edge->callee == NULL)
1333 return true; /* e.g. call through function ptr. */
1334 if (edge->callee->definition)
1335 return true;
1336 /* If we get here, then this ref is a pointer passed to
1337 a function we don't have the definition for. */
1338 return false;
1339 }
1340 break;
1341 case GIMPLE_ASM:
1342 {
1343 const gasm *asm_stmt = as_a <const gasm *> (ref->stmt);
1344 if (gimple_asm_noutputs (asm_stmt) > 0)
1345 return true;
1346 if (gimple_asm_nclobbers (asm_stmt) > 0)
1347 return true;
1348 /* If we get here, then this ref is the decl being passed
1349 by pointer to asm with no outputs. */
1350 return false;
1351 }
1352 break;
1353 }
1354 }
1355
1356 /* Determine if the decl for SYMNODE should have binding_clusters
1357 in our state objects; return false to optimize away tracking
1358 certain decls in our state objects, as an optimization. */
1359
1360 static bool
1361 symnode_requires_tracking_p (symtab_node *symnode)
1362 {
1363 gcc_assert (symnode);
1364 if (symnode->externally_visible)
1365 return true;
1366 tree context_fndecl = DECL_CONTEXT (symnode->decl);
1367 if (context_fndecl == NULL)
1368 return true;
1369 if (TREE_CODE (context_fndecl) != FUNCTION_DECL)
1370 return true;
1371 for (auto ref : symnode->ref_list.referring)
1372 if (ipa_ref_requires_tracking (ref))
1373 return true;
1374
1375 /* If we get here, then we don't have uses of this decl that require
1376 tracking; we never read from it or write to it explicitly. */
1377 return false;
1378 }
1379
1380 /* Subroutine of decl_region ctor: determine whether this decl_region
1381 can have binding_clusters; return false to optimize away tracking
1382 of certain decls in our state objects, as an optimization. */
1383
1384 bool
1385 decl_region::calc_tracked_p (tree decl)
1386 {
1387 /* Precondition of symtab_node::get. */
1388 if (TREE_CODE (decl) == VAR_DECL
1389 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl) || in_lto_p))
1390 if (symtab_node *symnode = symtab_node::get (decl))
1391 return symnode_requires_tracking_p (symnode);
1392 return true;
1393 }
1394
1395 /* class field_region : public region. */
1396
1397 /* Implementation of region::dump_to_pp vfunc for field_region. */
1398
1399 void
1400 field_region::dump_to_pp (pretty_printer *pp, bool simple) const
1401 {
1402 if (simple)
1403 {
1404 get_parent_region ()->dump_to_pp (pp, simple);
1405 pp_string (pp, ".");
1406 pp_printf (pp, "%E", m_field);
1407 }
1408 else
1409 {
1410 pp_string (pp, "field_region(");
1411 get_parent_region ()->dump_to_pp (pp, simple);
1412 pp_string (pp, ", ");
1413 print_quoted_type (pp, get_type ());
1414 pp_printf (pp, ", %qE)", m_field);
1415 }
1416 }
1417
1418 /* Implementation of region::get_relative_concrete_offset vfunc
1419 for field_region. */
1420
1421 bool
1422 field_region::get_relative_concrete_offset (bit_offset_t *out) const
1423 {
1424 /* Compare with e.g. gimple-fold.cc's
1425 fold_nonarray_ctor_reference. */
1426 tree byte_offset = DECL_FIELD_OFFSET (m_field);
1427 if (TREE_CODE (byte_offset) != INTEGER_CST)
1428 return false;
1429 tree field_offset = DECL_FIELD_BIT_OFFSET (m_field);
1430 /* Compute bit offset of the field. */
1431 offset_int bitoffset
1432 = (wi::to_offset (field_offset)
1433 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
1434 *out = bitoffset;
1435 return true;
1436 }
1437
1438
1439 /* Implementation of region::get_relative_symbolic_offset vfunc
1440 for field_region.
1441 If known, the returned svalue is equal to the offset converted to bytes and
1442 rounded off. */
1443
1444 const svalue *
1445 field_region::get_relative_symbolic_offset (region_model_manager *mgr) const
1446 {
1447 bit_offset_t out;
1448 if (get_relative_concrete_offset (&out))
1449 {
1450 tree cst_tree
1451 = wide_int_to_tree (integer_type_node, out / BITS_PER_UNIT);
1452 return mgr->get_or_create_constant_svalue (cst_tree);
1453 }
1454 return mgr->get_or_create_unknown_svalue (integer_type_node);
1455 }
1456
1457 /* class element_region : public region. */
1458
1459 /* Implementation of region::accept vfunc for element_region. */
1460
1461 void
1462 element_region::accept (visitor *v) const
1463 {
1464 region::accept (v);
1465 m_index->accept (v);
1466 }
1467
1468 /* Implementation of region::dump_to_pp vfunc for element_region. */
1469
1470 void
1471 element_region::dump_to_pp (pretty_printer *pp, bool simple) const
1472 {
1473 if (simple)
1474 {
1475 //pp_string (pp, "(");
1476 get_parent_region ()->dump_to_pp (pp, simple);
1477 pp_string (pp, "[");
1478 m_index->dump_to_pp (pp, simple);
1479 pp_string (pp, "]");
1480 //pp_string (pp, ")");
1481 }
1482 else
1483 {
1484 pp_string (pp, "element_region(");
1485 get_parent_region ()->dump_to_pp (pp, simple);
1486 pp_string (pp, ", ");
1487 print_quoted_type (pp, get_type ());
1488 pp_string (pp, ", ");
1489 m_index->dump_to_pp (pp, simple);
1490 pp_printf (pp, ")");
1491 }
1492 }
1493
1494 /* Implementation of region::get_relative_concrete_offset vfunc
1495 for element_region. */
1496
1497 bool
1498 element_region::get_relative_concrete_offset (bit_offset_t *out) const
1499 {
1500 if (tree idx_cst = m_index->maybe_get_constant ())
1501 {
1502 gcc_assert (TREE_CODE (idx_cst) == INTEGER_CST);
1503
1504 tree elem_type = get_type ();
1505 offset_int element_idx = wi::to_offset (idx_cst);
1506
1507 /* First, use int_size_in_bytes, to reject the case where we
1508 have an incomplete type, or a non-constant value. */
1509 HOST_WIDE_INT hwi_byte_size = int_size_in_bytes (elem_type);
1510 if (hwi_byte_size > 0)
1511 {
1512 offset_int element_bit_size
1513 = hwi_byte_size << LOG2_BITS_PER_UNIT;
1514 offset_int element_bit_offset
1515 = element_idx * element_bit_size;
1516 *out = element_bit_offset;
1517 return true;
1518 }
1519 }
1520 return false;
1521 }
1522
1523 /* Implementation of region::get_relative_symbolic_offset vfunc
1524 for element_region. */
1525
1526 const svalue *
1527 element_region::get_relative_symbolic_offset (region_model_manager *mgr) const
1528 {
1529 tree elem_type = get_type ();
1530
1531 /* First, use int_size_in_bytes, to reject the case where we
1532 have an incomplete type, or a non-constant value. */
1533 HOST_WIDE_INT hwi_byte_size = int_size_in_bytes (elem_type);
1534 if (hwi_byte_size > 0)
1535 {
1536 tree byte_size_tree = wide_int_to_tree (integer_type_node,
1537 hwi_byte_size);
1538 const svalue *byte_size_sval
1539 = mgr->get_or_create_constant_svalue (byte_size_tree);
1540 return mgr->get_or_create_binop (integer_type_node, MULT_EXPR,
1541 m_index, byte_size_sval);
1542 }
1543 return mgr->get_or_create_unknown_svalue (integer_type_node);
1544 }
1545
1546 /* class offset_region : public region. */
1547
1548 /* Implementation of region::accept vfunc for offset_region. */
1549
1550 void
1551 offset_region::accept (visitor *v) const
1552 {
1553 region::accept (v);
1554 m_byte_offset->accept (v);
1555 }
1556
1557 /* Implementation of region::dump_to_pp vfunc for offset_region. */
1558
1559 void
1560 offset_region::dump_to_pp (pretty_printer *pp, bool simple) const
1561 {
1562 if (simple)
1563 {
1564 //pp_string (pp, "(");
1565 get_parent_region ()->dump_to_pp (pp, simple);
1566 pp_string (pp, "+");
1567 m_byte_offset->dump_to_pp (pp, simple);
1568 //pp_string (pp, ")");
1569 }
1570 else
1571 {
1572 pp_string (pp, "offset_region(");
1573 get_parent_region ()->dump_to_pp (pp, simple);
1574 pp_string (pp, ", ");
1575 print_quoted_type (pp, get_type ());
1576 pp_string (pp, ", ");
1577 m_byte_offset->dump_to_pp (pp, simple);
1578 pp_printf (pp, ")");
1579 }
1580 }
1581
1582 /* Implementation of region::get_relative_concrete_offset vfunc
1583 for offset_region. */
1584
1585 bool
1586 offset_region::get_relative_concrete_offset (bit_offset_t *out) const
1587 {
1588 if (tree byte_offset_cst = m_byte_offset->maybe_get_constant ())
1589 {
1590 gcc_assert (TREE_CODE (byte_offset_cst) == INTEGER_CST);
1591 /* Use a signed value for the byte offset, to handle
1592 negative offsets. */
1593 HOST_WIDE_INT byte_offset
1594 = wi::to_offset (byte_offset_cst).to_shwi ();
1595 HOST_WIDE_INT bit_offset = byte_offset * BITS_PER_UNIT;
1596 *out = bit_offset;
1597 return true;
1598 }
1599 return false;
1600 }
1601
1602 /* Implementation of region::get_relative_symbolic_offset vfunc
1603 for offset_region. */
1604
1605 const svalue *
1606 offset_region::get_relative_symbolic_offset (region_model_manager *mgr
1607 ATTRIBUTE_UNUSED) const
1608 {
1609 return get_byte_offset ();
1610 }
1611
1612 /* Implementation of region::get_byte_size_sval vfunc for offset_region. */
1613
1614 const svalue *
1615 offset_region::get_byte_size_sval (region_model_manager *mgr) const
1616 {
1617 tree offset_cst = get_byte_offset ()->maybe_get_constant ();
1618 byte_size_t byte_size;
1619 /* If the offset points in the middle of the region,
1620 return the remaining bytes. */
1621 if (get_byte_size (&byte_size) && offset_cst)
1622 {
1623 byte_size_t offset = wi::to_offset (offset_cst);
1624 byte_range r (0, byte_size);
1625 if (r.contains_p (offset))
1626 {
1627 tree remaining_byte_size = wide_int_to_tree (size_type_node,
1628 byte_size - offset);
1629 return mgr->get_or_create_constant_svalue (remaining_byte_size);
1630 }
1631 }
1632
1633 return region::get_byte_size_sval (mgr);
1634 }
1635
1636 /* class sized_region : public region. */
1637
1638 /* Implementation of region::accept vfunc for sized_region. */
1639
1640 void
1641 sized_region::accept (visitor *v) const
1642 {
1643 region::accept (v);
1644 m_byte_size_sval->accept (v);
1645 }
1646
1647 /* Implementation of region::dump_to_pp vfunc for sized_region. */
1648
1649 void
1650 sized_region::dump_to_pp (pretty_printer *pp, bool simple) const
1651 {
1652 if (simple)
1653 {
1654 pp_string (pp, "SIZED_REG(");
1655 get_parent_region ()->dump_to_pp (pp, simple);
1656 pp_string (pp, ", ");
1657 m_byte_size_sval->dump_to_pp (pp, simple);
1658 pp_string (pp, ")");
1659 }
1660 else
1661 {
1662 pp_string (pp, "sized_region(");
1663 get_parent_region ()->dump_to_pp (pp, simple);
1664 pp_string (pp, ", ");
1665 m_byte_size_sval->dump_to_pp (pp, simple);
1666 pp_printf (pp, ")");
1667 }
1668 }
1669
1670 /* Implementation of region::get_byte_size vfunc for sized_region. */
1671
1672 bool
1673 sized_region::get_byte_size (byte_size_t *out) const
1674 {
1675 if (tree cst = m_byte_size_sval->maybe_get_constant ())
1676 {
1677 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1678 *out = tree_to_uhwi (cst);
1679 return true;
1680 }
1681 return false;
1682 }
1683
1684 /* Implementation of region::get_bit_size vfunc for sized_region. */
1685
1686 bool
1687 sized_region::get_bit_size (bit_size_t *out) const
1688 {
1689 byte_size_t byte_size;
1690 if (!get_byte_size (&byte_size))
1691 return false;
1692 *out = byte_size * BITS_PER_UNIT;
1693 return true;
1694 }
1695
1696 /* class cast_region : public region. */
1697
1698 /* Implementation of region::accept vfunc for cast_region. */
1699
1700 void
1701 cast_region::accept (visitor *v) const
1702 {
1703 region::accept (v);
1704 m_original_region->accept (v);
1705 }
1706
1707 /* Implementation of region::dump_to_pp vfunc for cast_region. */
1708
1709 void
1710 cast_region::dump_to_pp (pretty_printer *pp, bool simple) const
1711 {
1712 if (simple)
1713 {
1714 pp_string (pp, "CAST_REG(");
1715 print_quoted_type (pp, get_type ());
1716 pp_string (pp, ", ");
1717 m_original_region->dump_to_pp (pp, simple);
1718 pp_string (pp, ")");
1719 }
1720 else
1721 {
1722 pp_string (pp, "cast_region(");
1723 m_original_region->dump_to_pp (pp, simple);
1724 pp_string (pp, ", ");
1725 print_quoted_type (pp, get_type ());
1726 pp_printf (pp, ")");
1727 }
1728 }
1729
1730 /* Implementation of region::get_relative_concrete_offset vfunc
1731 for cast_region. */
1732
1733 bool
1734 cast_region::get_relative_concrete_offset (bit_offset_t *out) const
1735 {
1736 *out = (int) 0;
1737 return true;
1738 }
1739
1740 /* class heap_allocated_region : public region. */
1741
1742 /* Implementation of region::dump_to_pp vfunc for heap_allocated_region. */
1743
1744 void
1745 heap_allocated_region::dump_to_pp (pretty_printer *pp, bool simple) const
1746 {
1747 if (simple)
1748 pp_printf (pp, "HEAP_ALLOCATED_REGION(%i)", get_id ());
1749 else
1750 pp_printf (pp, "heap_allocated_region(%i)", get_id ());
1751 }
1752
1753 /* class alloca_region : public region. */
1754
1755 /* Implementation of region::dump_to_pp vfunc for alloca_region. */
1756
1757 void
1758 alloca_region::dump_to_pp (pretty_printer *pp, bool simple) const
1759 {
1760 if (simple)
1761 pp_printf (pp, "ALLOCA_REGION(%i)", get_id ());
1762 else
1763 pp_printf (pp, "alloca_region(%i)", get_id ());
1764 }
1765
1766 /* class string_region : public region. */
1767
1768 /* Implementation of region::dump_to_pp vfunc for string_region. */
1769
1770 void
1771 string_region::dump_to_pp (pretty_printer *pp, bool simple) const
1772 {
1773 if (simple)
1774 dump_tree (pp, m_string_cst);
1775 else
1776 {
1777 pp_string (pp, "string_region(");
1778 dump_tree (pp, m_string_cst);
1779 if (!flag_dump_noaddr)
1780 {
1781 pp_string (pp, " (");
1782 pp_pointer (pp, m_string_cst);
1783 pp_string (pp, "))");
1784 }
1785 }
1786 }
1787
1788 /* class bit_range_region : public region. */
1789
1790 /* Implementation of region::dump_to_pp vfunc for bit_range_region. */
1791
1792 void
1793 bit_range_region::dump_to_pp (pretty_printer *pp, bool simple) const
1794 {
1795 if (simple)
1796 {
1797 pp_string (pp, "BIT_RANGE_REG(");
1798 get_parent_region ()->dump_to_pp (pp, simple);
1799 pp_string (pp, ", ");
1800 m_bits.dump_to_pp (pp);
1801 pp_string (pp, ")");
1802 }
1803 else
1804 {
1805 pp_string (pp, "bit_range_region(");
1806 get_parent_region ()->dump_to_pp (pp, simple);
1807 pp_string (pp, ", ");
1808 m_bits.dump_to_pp (pp);
1809 pp_printf (pp, ")");
1810 }
1811 }
1812
1813 /* Implementation of region::get_byte_size vfunc for bit_range_region. */
1814
1815 bool
1816 bit_range_region::get_byte_size (byte_size_t *out) const
1817 {
1818 if (m_bits.m_size_in_bits % BITS_PER_UNIT == 0)
1819 {
1820 *out = m_bits.m_size_in_bits / BITS_PER_UNIT;
1821 return true;
1822 }
1823 return false;
1824 }
1825
1826 /* Implementation of region::get_bit_size vfunc for bit_range_region. */
1827
1828 bool
1829 bit_range_region::get_bit_size (bit_size_t *out) const
1830 {
1831 *out = m_bits.m_size_in_bits;
1832 return true;
1833 }
1834
1835 /* Implementation of region::get_byte_size_sval vfunc for bit_range_region. */
1836
1837 const svalue *
1838 bit_range_region::get_byte_size_sval (region_model_manager *mgr) const
1839 {
1840 if (m_bits.m_size_in_bits % BITS_PER_UNIT != 0)
1841 return mgr->get_or_create_unknown_svalue (size_type_node);
1842
1843 HOST_WIDE_INT num_bytes = m_bits.m_size_in_bits.to_shwi () / BITS_PER_UNIT;
1844 return mgr->get_or_create_int_cst (size_type_node, num_bytes);
1845 }
1846
1847 /* Implementation of region::get_relative_concrete_offset vfunc for
1848 bit_range_region. */
1849
1850 bool
1851 bit_range_region::get_relative_concrete_offset (bit_offset_t *out) const
1852 {
1853 *out = m_bits.get_start_bit_offset ();
1854 return true;
1855 }
1856
1857 /* Implementation of region::get_relative_symbolic_offset vfunc for
1858 bit_range_region.
1859 The returned svalue is equal to the offset converted to bytes and
1860 rounded off. */
1861
1862 const svalue *
1863 bit_range_region::get_relative_symbolic_offset (region_model_manager *mgr)
1864 const
1865 {
1866 byte_offset_t start_byte = m_bits.get_start_bit_offset () / BITS_PER_UNIT;
1867 tree start_bit_tree = wide_int_to_tree (integer_type_node, start_byte);
1868 return mgr->get_or_create_constant_svalue (start_bit_tree);
1869 }
1870
1871 /* class var_arg_region : public region. */
1872
1873 void
1874 var_arg_region::dump_to_pp (pretty_printer *pp, bool simple) const
1875 {
1876 if (simple)
1877 {
1878 pp_string (pp, "VAR_ARG_REG(");
1879 get_parent_region ()->dump_to_pp (pp, simple);
1880 pp_printf (pp, ", arg_idx: %d)", m_idx);
1881 }
1882 else
1883 {
1884 pp_string (pp, "var_arg_region(");
1885 get_parent_region ()->dump_to_pp (pp, simple);
1886 pp_printf (pp, ", arg_idx: %d)", m_idx);
1887 }
1888 }
1889
1890 /* Get the frame_region for this var_arg_region. */
1891
1892 const frame_region *
1893 var_arg_region::get_frame_region () const
1894 {
1895 gcc_assert (get_parent_region ());
1896 return as_a <const frame_region *> (get_parent_region ());
1897 }
1898
1899 /* class errno_region : public region. */
1900
1901 void
1902 errno_region::dump_to_pp (pretty_printer *pp, bool simple) const
1903 {
1904 if (simple)
1905 pp_string (pp, "errno_region");
1906 else
1907 pp_string (pp, "errno_region()");
1908 }
1909
1910 /* class unknown_region : public region. */
1911
1912 /* Implementation of region::dump_to_pp vfunc for unknown_region. */
1913
1914 void
1915 unknown_region::dump_to_pp (pretty_printer *pp, bool /*simple*/) const
1916 {
1917 pp_string (pp, "UNKNOWN_REGION");
1918 }
1919
1920 } // namespace ana
1921
1922 #endif /* #if ENABLE_ANALYZER */