]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/analyzer/region.cc
analyzer: simplify some includes
[thirdparty/gcc.git] / gcc / analyzer / region.cc
1 /* Regions of memory.
2 Copyright (C) 2019-2022 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "diagnostic-core.h"
26 #include "gimple-pretty-print.h"
27 #include "function.h"
28 #include "basic-block.h"
29 #include "gimple.h"
30 #include "gimple-iterator.h"
31 #include "diagnostic-core.h"
32 #include "graphviz.h"
33 #include "options.h"
34 #include "cgraph.h"
35 #include "tree-dfa.h"
36 #include "stringpool.h"
37 #include "convert.h"
38 #include "target.h"
39 #include "fold-const.h"
40 #include "tree-pretty-print.h"
41 #include "diagnostic-color.h"
42 #include "diagnostic-metadata.h"
43 #include "bitmap.h"
44 #include "analyzer/analyzer.h"
45 #include "analyzer/analyzer-logging.h"
46 #include "ordered-hash-map.h"
47 #include "options.h"
48 #include "cgraph.h"
49 #include "cfg.h"
50 #include "digraph.h"
51 #include "analyzer/supergraph.h"
52 #include "sbitmap.h"
53 #include "analyzer/call-string.h"
54 #include "analyzer/program-point.h"
55 #include "analyzer/store.h"
56 #include "analyzer/region.h"
57 #include "analyzer/region-model.h"
58 #include "analyzer/sm.h"
59 #include "analyzer/program-state.h"
60
61 #if ENABLE_ANALYZER
62
63 namespace ana {
64
65 /* class region and its various subclasses. */
66
67 /* class region. */
68
69 region::~region ()
70 {
71 delete m_cached_offset;
72 }
73
74 /* Compare REG1 and REG2 by id. */
75
76 int
77 region::cmp_ids (const region *reg1, const region *reg2)
78 {
79 return (long)reg1->get_id () - (long)reg2->get_id ();
80 }
81
82 /* Determine the base region for this region: when considering bindings
83 for this region, the base region is the ancestor which identifies
84 which cluster they should be partitioned into.
85 Regions within the same struct/union/array are in the same cluster.
86 Different decls are in different clusters. */
87
88 const region *
89 region::get_base_region () const
90 {
91 const region *iter = this;
92 while (iter)
93 {
94 switch (iter->get_kind ())
95 {
96 case RK_FIELD:
97 case RK_ELEMENT:
98 case RK_OFFSET:
99 case RK_SIZED:
100 case RK_BIT_RANGE:
101 iter = iter->get_parent_region ();
102 continue;
103 case RK_CAST:
104 iter = iter->dyn_cast_cast_region ()->get_original_region ();
105 continue;
106 default:
107 return iter;
108 }
109 }
110 return iter;
111 }
112
113 /* Return true if get_base_region() == this for this region. */
114
115 bool
116 region::base_region_p () const
117 {
118 switch (get_kind ())
119 {
120 /* Region kinds representing a descendent of a base region. */
121 case RK_FIELD:
122 case RK_ELEMENT:
123 case RK_OFFSET:
124 case RK_SIZED:
125 case RK_CAST:
126 case RK_BIT_RANGE:
127 return false;
128
129 default:
130 return true;
131 }
132 }
133
134 /* Return true if this region is ELDER or one of its descendents. */
135
136 bool
137 region::descendent_of_p (const region *elder) const
138 {
139 const region *iter = this;
140 while (iter)
141 {
142 if (iter == elder)
143 return true;
144 if (iter->get_kind () == RK_CAST)
145 iter = iter->dyn_cast_cast_region ()->get_original_region ();
146 else
147 iter = iter->get_parent_region ();
148 }
149 return false;
150 }
151
152 /* If this region is a frame_region, or a descendent of one, return it.
153 Otherwise return NULL. */
154
155 const frame_region *
156 region::maybe_get_frame_region () const
157 {
158 const region *iter = this;
159 while (iter)
160 {
161 if (const frame_region *frame_reg = iter->dyn_cast_frame_region ())
162 return frame_reg;
163 if (iter->get_kind () == RK_CAST)
164 iter = iter->dyn_cast_cast_region ()->get_original_region ();
165 else
166 iter = iter->get_parent_region ();
167 }
168 return NULL;
169 }
170
171 /* Get the memory space of this region. */
172
173 enum memory_space
174 region::get_memory_space () const
175 {
176 const region *iter = this;
177 while (iter)
178 {
179 switch (iter->get_kind ())
180 {
181 default:
182 break;
183 case RK_GLOBALS:
184 return MEMSPACE_GLOBALS;
185 case RK_CODE:
186 case RK_FUNCTION:
187 case RK_LABEL:
188 return MEMSPACE_CODE;
189 case RK_FRAME:
190 case RK_STACK:
191 case RK_ALLOCA:
192 return MEMSPACE_STACK;
193 case RK_HEAP:
194 case RK_HEAP_ALLOCATED:
195 return MEMSPACE_HEAP;
196 case RK_STRING:
197 return MEMSPACE_READONLY_DATA;
198 }
199 if (iter->get_kind () == RK_CAST)
200 iter = iter->dyn_cast_cast_region ()->get_original_region ();
201 else
202 iter = iter->get_parent_region ();
203 }
204 return MEMSPACE_UNKNOWN;
205 }
206
207 /* Subroutine for use by region_model_manager::get_or_create_initial_value.
208 Return true if this region has an initial_svalue.
209 Return false if attempting to use INIT_VAL(this_region) should give
210 the "UNINITIALIZED" poison value. */
211
212 bool
213 region::can_have_initial_svalue_p () const
214 {
215 const region *base_reg = get_base_region ();
216
217 /* Check for memory spaces that are uninitialized by default. */
218 enum memory_space mem_space = base_reg->get_memory_space ();
219 switch (mem_space)
220 {
221 default:
222 gcc_unreachable ();
223 case MEMSPACE_UNKNOWN:
224 case MEMSPACE_CODE:
225 case MEMSPACE_GLOBALS:
226 case MEMSPACE_READONLY_DATA:
227 /* Such regions have initial_svalues. */
228 return true;
229
230 case MEMSPACE_HEAP:
231 /* Heap allocations are uninitialized by default. */
232 return false;
233
234 case MEMSPACE_STACK:
235 if (tree decl = base_reg->maybe_get_decl ())
236 {
237 /* See the assertion in frame_region::get_region_for_local for the
238 tree codes we need to handle here. */
239 switch (TREE_CODE (decl))
240 {
241 default:
242 gcc_unreachable ();
243
244 case PARM_DECL:
245 /* Parameters have initial values. */
246 return true;
247
248 case VAR_DECL:
249 case RESULT_DECL:
250 /* Function locals don't have initial values. */
251 return false;
252
253 case SSA_NAME:
254 {
255 tree ssa_name = decl;
256 /* SSA names that are the default defn of a PARM_DECL
257 have initial_svalues; other SSA names don't. */
258 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
259 && SSA_NAME_VAR (ssa_name)
260 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == PARM_DECL)
261 return true;
262 else
263 return false;
264 }
265 }
266 }
267
268 /* If we have an on-stack region that isn't associated with a decl
269 or SSA name, then we have VLA/alloca, which is uninitialized. */
270 return false;
271 }
272 }
273
274 /* If this region is a decl_region, return the decl.
275 Otherwise return NULL. */
276
277 tree
278 region::maybe_get_decl () const
279 {
280 if (const decl_region *decl_reg = dyn_cast_decl_region ())
281 return decl_reg->get_decl ();
282 return NULL_TREE;
283 }
284
285 /* Get the region_offset for this region (calculating it on the
286 first call and caching it internally). */
287
288 region_offset
289 region::get_offset (region_model_manager *mgr) const
290 {
291 if(!m_cached_offset)
292 m_cached_offset = new region_offset (calc_offset (mgr));
293 return *m_cached_offset;
294 }
295
296 /* Base class implementation of region::get_byte_size vfunc.
297 If the size of this region (in bytes) is known statically, write it to *OUT
298 and return true.
299 Otherwise return false. */
300
301 bool
302 region::get_byte_size (byte_size_t *out) const
303 {
304 tree type = get_type ();
305
306 /* Bail out e.g. for heap-allocated regions. */
307 if (!type)
308 return false;
309
310 HOST_WIDE_INT bytes = int_size_in_bytes (type);
311 if (bytes == -1)
312 return false;
313 *out = bytes;
314 return true;
315 }
316
317 /* Base implementation of region::get_byte_size_sval vfunc. */
318
319 const svalue *
320 region::get_byte_size_sval (region_model_manager *mgr) const
321 {
322 tree type = get_type ();
323
324 /* Bail out e.g. for heap-allocated regions. */
325 if (!type)
326 return mgr->get_or_create_unknown_svalue (size_type_node);
327
328 HOST_WIDE_INT bytes = int_size_in_bytes (type);
329 if (bytes == -1)
330 return mgr->get_or_create_unknown_svalue (size_type_node);
331
332 tree byte_size = size_in_bytes (type);
333 if (TREE_TYPE (byte_size) != size_type_node)
334 byte_size = fold_build1 (NOP_EXPR, size_type_node, byte_size);
335 return mgr->get_or_create_constant_svalue (byte_size);
336 }
337
338 /* Attempt to get the size of TYPE in bits.
339 If successful, return true and write the size to *OUT.
340 Otherwise return false. */
341
342 bool
343 int_size_in_bits (const_tree type, bit_size_t *out)
344 {
345 if (INTEGRAL_TYPE_P (type))
346 {
347 *out = TYPE_PRECISION (type);
348 return true;
349 }
350
351 tree sz = TYPE_SIZE (type);
352 if (sz && tree_fits_uhwi_p (sz))
353 {
354 *out = TREE_INT_CST_LOW (sz);
355 return true;
356 }
357 else
358 return false;
359 }
360
361 /* If the size of this region (in bits) is known statically, write it to *OUT
362 and return true.
363 Otherwise return false. */
364
365 bool
366 region::get_bit_size (bit_size_t *out) const
367 {
368 tree type = get_type ();
369
370 /* Bail out e.g. for heap-allocated regions. */
371 if (!type)
372 return false;
373
374 return int_size_in_bits (type, out);
375 }
376
377 /* Get the field within RECORD_TYPE at BIT_OFFSET. */
378
379 tree
380 get_field_at_bit_offset (tree record_type, bit_offset_t bit_offset)
381 {
382 gcc_assert (TREE_CODE (record_type) == RECORD_TYPE);
383 if (bit_offset < 0)
384 return NULL;
385
386 /* Find the first field that has an offset > BIT_OFFSET,
387 then return the one preceding it.
388 Skip other trees within the chain, such as FUNCTION_DECLs. */
389 tree last_field = NULL_TREE;
390 for (tree iter = TYPE_FIELDS (record_type); iter != NULL_TREE;
391 iter = DECL_CHAIN (iter))
392 {
393 if (TREE_CODE (iter) == FIELD_DECL)
394 {
395 int iter_field_offset = int_bit_position (iter);
396 if (bit_offset < iter_field_offset)
397 return last_field;
398 last_field = iter;
399 }
400 }
401 return last_field;
402 }
403
404 /* Populate *OUT with descendent regions of type TYPE that match
405 RELATIVE_BIT_OFFSET and SIZE_IN_BITS within this region. */
406
407 void
408 region::get_subregions_for_binding (region_model_manager *mgr,
409 bit_offset_t relative_bit_offset,
410 bit_size_t size_in_bits,
411 tree type,
412 auto_vec <const region *> *out) const
413 {
414 if (get_type () == NULL_TREE || type == NULL_TREE)
415 return;
416 if (relative_bit_offset == 0
417 && types_compatible_p (get_type (), type))
418 {
419 out->safe_push (this);
420 return;
421 }
422 switch (TREE_CODE (get_type ()))
423 {
424 case ARRAY_TYPE:
425 {
426 tree element_type = TREE_TYPE (get_type ());
427 HOST_WIDE_INT hwi_byte_size = int_size_in_bytes (element_type);
428 if (hwi_byte_size > 0)
429 {
430 HOST_WIDE_INT bits_per_element
431 = hwi_byte_size << LOG2_BITS_PER_UNIT;
432 HOST_WIDE_INT element_index
433 = (relative_bit_offset.to_shwi () / bits_per_element);
434 tree element_index_cst
435 = build_int_cst (integer_type_node, element_index);
436 HOST_WIDE_INT inner_bit_offset
437 = relative_bit_offset.to_shwi () % bits_per_element;
438 const region *subregion = mgr->get_element_region
439 (this, element_type,
440 mgr->get_or_create_constant_svalue (element_index_cst));
441 subregion->get_subregions_for_binding (mgr, inner_bit_offset,
442 size_in_bits, type, out);
443 }
444 }
445 break;
446 case RECORD_TYPE:
447 {
448 /* The bit offset might be *within* one of the fields (such as
449 with nested structs).
450 So we want to find the enclosing field, adjust the offset,
451 and repeat. */
452 if (tree field = get_field_at_bit_offset (get_type (),
453 relative_bit_offset))
454 {
455 int field_bit_offset = int_bit_position (field);
456 const region *subregion = mgr->get_field_region (this, field);
457 subregion->get_subregions_for_binding
458 (mgr, relative_bit_offset - field_bit_offset,
459 size_in_bits, type, out);
460 }
461 }
462 break;
463 case UNION_TYPE:
464 {
465 for (tree field = TYPE_FIELDS (get_type ()); field != NULL_TREE;
466 field = DECL_CHAIN (field))
467 {
468 if (TREE_CODE (field) != FIELD_DECL)
469 continue;
470 const region *subregion = mgr->get_field_region (this, field);
471 subregion->get_subregions_for_binding (mgr,
472 relative_bit_offset,
473 size_in_bits,
474 type,
475 out);
476 }
477 }
478 break;
479 default:
480 /* Do nothing. */
481 break;
482 }
483 }
484
485 /* Walk from this region up to the base region within its cluster, calculating
486 the offset relative to the base region, either as an offset in bits,
487 or a symbolic offset. */
488
489 region_offset
490 region::calc_offset (region_model_manager *mgr) const
491 {
492 const region *iter_region = this;
493 bit_offset_t accum_bit_offset = 0;
494 const svalue *accum_byte_sval = NULL;
495
496 while (iter_region)
497 {
498 switch (iter_region->get_kind ())
499 {
500 case RK_FIELD:
501 case RK_ELEMENT:
502 case RK_OFFSET:
503 case RK_BIT_RANGE:
504 if (accum_byte_sval)
505 {
506 const svalue *sval
507 = iter_region->get_relative_symbolic_offset (mgr);
508 accum_byte_sval
509 = mgr->get_or_create_binop (sval->get_type (), PLUS_EXPR,
510 accum_byte_sval, sval);
511 iter_region = iter_region->get_parent_region ();
512 }
513 else
514 {
515 bit_offset_t rel_bit_offset;
516 if (iter_region->get_relative_concrete_offset (&rel_bit_offset))
517 {
518 accum_bit_offset += rel_bit_offset;
519 iter_region = iter_region->get_parent_region ();
520 }
521 else
522 {
523 /* If the iter_region is not concrete anymore, convert the
524 accumulated bits to a svalue in bytes and revisit the
525 iter_region collecting the symbolic value. */
526 byte_offset_t byte_offset = accum_bit_offset / BITS_PER_UNIT;
527 tree offset_tree = wide_int_to_tree (integer_type_node,
528 byte_offset);
529 accum_byte_sval
530 = mgr->get_or_create_constant_svalue (offset_tree);
531 }
532 }
533 continue;
534 case RK_SIZED:
535 iter_region = iter_region->get_parent_region ();
536 continue;
537
538 case RK_CAST:
539 {
540 const cast_region *cast_reg
541 = as_a <const cast_region *> (iter_region);
542 iter_region = cast_reg->get_original_region ();
543 }
544 continue;
545
546 default:
547 return accum_byte_sval
548 ? region_offset::make_symbolic (iter_region,
549 accum_byte_sval)
550 : region_offset::make_concrete (iter_region,
551 accum_bit_offset);
552 }
553 }
554
555 return accum_byte_sval ? region_offset::make_symbolic (iter_region,
556 accum_byte_sval)
557 : region_offset::make_concrete (iter_region,
558 accum_bit_offset);
559 }
560
561 /* Base implementation of region::get_relative_concrete_offset vfunc. */
562
563 bool
564 region::get_relative_concrete_offset (bit_offset_t *) const
565 {
566 return false;
567 }
568
569 /* Base implementation of region::get_relative_symbolic_offset vfunc. */
570
571 const svalue *
572 region::get_relative_symbolic_offset (region_model_manager *mgr) const
573 {
574 return mgr->get_or_create_unknown_svalue (integer_type_node);
575 }
576
577 /* Attempt to get the position and size of this region expressed as a
578 concrete range of bytes relative to its parent.
579 If successful, return true and write to *OUT.
580 Otherwise return false. */
581
582 bool
583 region::get_relative_concrete_byte_range (byte_range *out) const
584 {
585 /* We must have a concrete offset relative to the parent. */
586 bit_offset_t rel_bit_offset;
587 if (!get_relative_concrete_offset (&rel_bit_offset))
588 return false;
589 /* ...which must be a whole number of bytes. */
590 if (rel_bit_offset % BITS_PER_UNIT != 0)
591 return false;
592 byte_offset_t start_byte_offset = rel_bit_offset / BITS_PER_UNIT;
593
594 /* We must have a concrete size, which must be a whole number
595 of bytes. */
596 byte_size_t num_bytes;
597 if (!get_byte_size (&num_bytes))
598 return false;
599
600 /* Success. */
601 *out = byte_range (start_byte_offset, num_bytes);
602 return true;
603 }
604
605 /* Dump a description of this region to stderr. */
606
607 DEBUG_FUNCTION void
608 region::dump (bool simple) const
609 {
610 pretty_printer pp;
611 pp_format_decoder (&pp) = default_tree_printer;
612 pp_show_color (&pp) = pp_show_color (global_dc->printer);
613 pp.buffer->stream = stderr;
614 dump_to_pp (&pp, simple);
615 pp_newline (&pp);
616 pp_flush (&pp);
617 }
618
619 /* Return a new json::string describing the region. */
620
621 json::value *
622 region::to_json () const
623 {
624 label_text desc = get_desc (true);
625 json::value *reg_js = new json::string (desc.get ());
626 return reg_js;
627 }
628
629 /* Generate a description of this region. */
630
631 DEBUG_FUNCTION label_text
632 region::get_desc (bool simple) const
633 {
634 pretty_printer pp;
635 pp_format_decoder (&pp) = default_tree_printer;
636 dump_to_pp (&pp, simple);
637 return label_text::take (xstrdup (pp_formatted_text (&pp)));
638 }
639
640 /* Base implementation of region::accept vfunc.
641 Subclass implementations should chain up to this. */
642
643 void
644 region::accept (visitor *v) const
645 {
646 v->visit_region (this);
647 if (m_parent)
648 m_parent->accept (v);
649 }
650
651 /* Return true if this is a symbolic region for deferencing an
652 unknown ptr.
653 We shouldn't attempt to bind values for this region (but
654 can unbind values for other regions). */
655
656 bool
657 region::symbolic_for_unknown_ptr_p () const
658 {
659 if (const symbolic_region *sym_reg = dyn_cast_symbolic_region ())
660 if (sym_reg->get_pointer ()->get_kind () == SK_UNKNOWN)
661 return true;
662 return false;
663 }
664
665 /* Return true if this is a symbolic region. */
666
667 bool
668 region::symbolic_p () const
669 {
670 return get_kind () == RK_SYMBOLIC;
671 }
672
673 /* Return true if this is a region for a decl with name DECL_NAME.
674 Intended for use when debugging (for assertions and conditional
675 breakpoints). */
676
677 DEBUG_FUNCTION bool
678 region::is_named_decl_p (const char *decl_name) const
679 {
680 if (tree decl = maybe_get_decl ())
681 if (DECL_NAME (decl)
682 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (decl)), decl_name))
683 return true;
684 return false;
685 }
686
687 /* region's ctor. */
688
689 region::region (complexity c, unsigned id, const region *parent, tree type)
690 : m_complexity (c), m_id (id), m_parent (parent), m_type (type),
691 m_cached_offset (NULL)
692 {
693 gcc_assert (type == NULL_TREE || TYPE_P (type));
694 }
695
696 /* Comparator for use by vec<const region *>::qsort,
697 using their IDs to order them. */
698
699 int
700 region::cmp_ptr_ptr (const void *p1, const void *p2)
701 {
702 const region * const *reg1 = (const region * const *)p1;
703 const region * const *reg2 = (const region * const *)p2;
704
705 return cmp_ids (*reg1, *reg2);
706 }
707
708 /* Determine if a pointer to this region must be non-NULL.
709
710 Generally, pointers to regions must be non-NULL, but pointers
711 to symbolic_regions might, in fact, be NULL.
712
713 This allows us to simulate functions like malloc and calloc with:
714 - only one "outcome" from each statement,
715 - the idea that the pointer is on the heap if non-NULL
716 - the possibility that the pointer could be NULL
717 - the idea that successive values returned from malloc are non-equal
718 - to be able to zero-fill for calloc. */
719
720 bool
721 region::non_null_p () const
722 {
723 switch (get_kind ())
724 {
725 default:
726 return true;
727 case RK_SYMBOLIC:
728 /* Are we within a symbolic_region? If so, it could be NULL, and we
729 have to fall back on the constraints. */
730 return false;
731 case RK_HEAP_ALLOCATED:
732 return false;
733 }
734 }
735
736 /* Return true iff this region is defined in terms of SVAL. */
737
738 bool
739 region::involves_p (const svalue *sval) const
740 {
741 if (const symbolic_region *symbolic_reg = dyn_cast_symbolic_region ())
742 {
743 if (symbolic_reg->get_pointer ()->involves_p (sval))
744 return true;
745 }
746
747 return false;
748 }
749
750 /* Comparator for trees to impose a deterministic ordering on
751 T1 and T2. */
752
753 static int
754 tree_cmp (const_tree t1, const_tree t2)
755 {
756 gcc_assert (t1);
757 gcc_assert (t2);
758
759 /* Test tree codes first. */
760 if (TREE_CODE (t1) != TREE_CODE (t2))
761 return TREE_CODE (t1) - TREE_CODE (t2);
762
763 /* From this point on, we know T1 and T2 have the same tree code. */
764
765 if (DECL_P (t1))
766 {
767 if (DECL_NAME (t1) && DECL_NAME (t2))
768 return strcmp (IDENTIFIER_POINTER (DECL_NAME (t1)),
769 IDENTIFIER_POINTER (DECL_NAME (t2)));
770 else
771 {
772 if (DECL_NAME (t1))
773 return -1;
774 else if (DECL_NAME (t2))
775 return 1;
776 else
777 return DECL_UID (t1) - DECL_UID (t2);
778 }
779 }
780
781 switch (TREE_CODE (t1))
782 {
783 case SSA_NAME:
784 {
785 if (SSA_NAME_VAR (t1) && SSA_NAME_VAR (t2))
786 {
787 int var_cmp = tree_cmp (SSA_NAME_VAR (t1), SSA_NAME_VAR (t2));
788 if (var_cmp)
789 return var_cmp;
790 return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2);
791 }
792 else
793 {
794 if (SSA_NAME_VAR (t1))
795 return -1;
796 else if (SSA_NAME_VAR (t2))
797 return 1;
798 else
799 return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2);
800 }
801 }
802 break;
803
804 case INTEGER_CST:
805 return tree_int_cst_compare (t1, t2);
806
807 case REAL_CST:
808 {
809 const real_value *rv1 = TREE_REAL_CST_PTR (t1);
810 const real_value *rv2 = TREE_REAL_CST_PTR (t2);
811 if (real_compare (UNORDERED_EXPR, rv1, rv2))
812 {
813 /* Impose an arbitrary order on NaNs relative to other NaNs
814 and to non-NaNs. */
815 if (int cmp_isnan = real_isnan (rv1) - real_isnan (rv2))
816 return cmp_isnan;
817 if (int cmp_issignaling_nan
818 = real_issignaling_nan (rv1) - real_issignaling_nan (rv2))
819 return cmp_issignaling_nan;
820 return real_isneg (rv1) - real_isneg (rv2);
821 }
822 if (real_compare (LT_EXPR, rv1, rv2))
823 return -1;
824 if (real_compare (GT_EXPR, rv1, rv2))
825 return 1;
826 return 0;
827 }
828
829 case STRING_CST:
830 return strcmp (TREE_STRING_POINTER (t1),
831 TREE_STRING_POINTER (t2));
832
833 default:
834 gcc_unreachable ();
835 break;
836 }
837
838 gcc_unreachable ();
839
840 return 0;
841 }
842
843 /* qsort comparator for trees to impose a deterministic ordering on
844 P1 and P2. */
845
846 int
847 tree_cmp (const void *p1, const void *p2)
848 {
849 const_tree t1 = *(const_tree const *)p1;
850 const_tree t2 = *(const_tree const *)p2;
851
852 return tree_cmp (t1, t2);
853 }
854
855 /* class frame_region : public space_region. */
856
857 frame_region::~frame_region ()
858 {
859 for (map_t::iterator iter = m_locals.begin ();
860 iter != m_locals.end ();
861 ++iter)
862 delete (*iter).second;
863 }
864
865 void
866 frame_region::accept (visitor *v) const
867 {
868 region::accept (v);
869 if (m_calling_frame)
870 m_calling_frame->accept (v);
871 }
872
873 /* Implementation of region::dump_to_pp vfunc for frame_region. */
874
875 void
876 frame_region::dump_to_pp (pretty_printer *pp, bool simple) const
877 {
878 if (simple)
879 pp_printf (pp, "frame: %qs@%i", function_name (m_fun), get_stack_depth ());
880 else
881 pp_printf (pp, "frame_region(%qs, index: %i, depth: %i)",
882 function_name (m_fun), m_index, get_stack_depth ());
883 }
884
885 const decl_region *
886 frame_region::get_region_for_local (region_model_manager *mgr,
887 tree expr,
888 const region_model_context *ctxt) const
889 {
890 if (CHECKING_P)
891 {
892 /* Verify that EXPR is a local or SSA name, and that it's for the
893 correct function for this stack frame. */
894 gcc_assert (TREE_CODE (expr) == PARM_DECL
895 || TREE_CODE (expr) == VAR_DECL
896 || TREE_CODE (expr) == SSA_NAME
897 || TREE_CODE (expr) == RESULT_DECL);
898 switch (TREE_CODE (expr))
899 {
900 default:
901 gcc_unreachable ();
902 case VAR_DECL:
903 gcc_assert (!is_global_var (expr));
904 /* Fall through. */
905 case PARM_DECL:
906 case RESULT_DECL:
907 gcc_assert (DECL_CONTEXT (expr) == m_fun->decl);
908 break;
909 case SSA_NAME:
910 {
911 if (tree var = SSA_NAME_VAR (expr))
912 {
913 if (DECL_P (var))
914 gcc_assert (DECL_CONTEXT (var) == m_fun->decl);
915 }
916 else if (ctxt)
917 if (const extrinsic_state *ext_state = ctxt->get_ext_state ())
918 if (const supergraph *sg
919 = ext_state->get_engine ()->get_supergraph ())
920 {
921 const gimple *def_stmt = SSA_NAME_DEF_STMT (expr);
922 const supernode *snode
923 = sg->get_supernode_for_stmt (def_stmt);
924 gcc_assert (snode->get_function () == m_fun);
925 }
926 }
927 break;
928 }
929 }
930
931 /* Ideally we'd use mutable here. */
932 map_t &mutable_locals = const_cast <map_t &> (m_locals);
933
934 if (decl_region **slot = mutable_locals.get (expr))
935 return *slot;
936 decl_region *reg
937 = new decl_region (mgr->alloc_region_id (), this, expr);
938 mutable_locals.put (expr, reg);
939 return reg;
940 }
941
942 /* class globals_region : public space_region. */
943
944 /* Implementation of region::dump_to_pp vfunc for globals_region. */
945
946 void
947 globals_region::dump_to_pp (pretty_printer *pp, bool simple) const
948 {
949 if (simple)
950 pp_string (pp, "::");
951 else
952 pp_string (pp, "globals");
953 }
954
955 /* class code_region : public map_region. */
956
957 /* Implementation of region::dump_to_pp vfunc for code_region. */
958
959 void
960 code_region::dump_to_pp (pretty_printer *pp, bool simple) const
961 {
962 if (simple)
963 pp_string (pp, "code region");
964 else
965 pp_string (pp, "code_region()");
966 }
967
968 /* class function_region : public region. */
969
970 /* Implementation of region::dump_to_pp vfunc for function_region. */
971
972 void
973 function_region::dump_to_pp (pretty_printer *pp, bool simple) const
974 {
975 if (simple)
976 {
977 dump_quoted_tree (pp, m_fndecl);
978 }
979 else
980 {
981 pp_string (pp, "function_region(");
982 dump_quoted_tree (pp, m_fndecl);
983 pp_string (pp, ")");
984 }
985 }
986
987 /* class label_region : public region. */
988
989 /* Implementation of region::dump_to_pp vfunc for label_region. */
990
991 void
992 label_region::dump_to_pp (pretty_printer *pp, bool simple) const
993 {
994 if (simple)
995 {
996 dump_quoted_tree (pp, m_label);
997 }
998 else
999 {
1000 pp_string (pp, "label_region(");
1001 dump_quoted_tree (pp, m_label);
1002 pp_string (pp, ")");
1003 }
1004 }
1005
1006 /* class stack_region : public region. */
1007
1008 /* Implementation of region::dump_to_pp vfunc for stack_region. */
1009
1010 void
1011 stack_region::dump_to_pp (pretty_printer *pp, bool simple) const
1012 {
1013 if (simple)
1014 pp_string (pp, "stack region");
1015 else
1016 pp_string (pp, "stack_region()");
1017 }
1018
1019 /* class heap_region : public region. */
1020
1021 /* Implementation of region::dump_to_pp vfunc for heap_region. */
1022
1023 void
1024 heap_region::dump_to_pp (pretty_printer *pp, bool simple) const
1025 {
1026 if (simple)
1027 pp_string (pp, "heap region");
1028 else
1029 pp_string (pp, "heap_region()");
1030 }
1031
1032 /* class root_region : public region. */
1033
1034 /* root_region's ctor. */
1035
1036 root_region::root_region (unsigned id)
1037 : region (complexity (1, 1), id, NULL, NULL_TREE)
1038 {
1039 }
1040
1041 /* Implementation of region::dump_to_pp vfunc for root_region. */
1042
1043 void
1044 root_region::dump_to_pp (pretty_printer *pp, bool simple) const
1045 {
1046 if (simple)
1047 pp_string (pp, "root region");
1048 else
1049 pp_string (pp, "root_region()");
1050 }
1051
1052 /* class symbolic_region : public map_region. */
1053
1054 /* symbolic_region's ctor. */
1055
1056 symbolic_region::symbolic_region (unsigned id, region *parent,
1057 const svalue *sval_ptr)
1058 : region (complexity::from_pair (parent, sval_ptr), id, parent,
1059 (sval_ptr->get_type ()
1060 ? TREE_TYPE (sval_ptr->get_type ())
1061 : NULL_TREE)),
1062 m_sval_ptr (sval_ptr)
1063 {
1064 }
1065
1066 /* Implementation of region::accept vfunc for symbolic_region. */
1067
1068 void
1069 symbolic_region::accept (visitor *v) const
1070 {
1071 region::accept (v);
1072 m_sval_ptr->accept (v);
1073 }
1074
1075 /* Implementation of region::dump_to_pp vfunc for symbolic_region. */
1076
1077 void
1078 symbolic_region::dump_to_pp (pretty_printer *pp, bool simple) const
1079 {
1080 if (simple)
1081 {
1082 pp_string (pp, "(*");
1083 m_sval_ptr->dump_to_pp (pp, simple);
1084 pp_string (pp, ")");
1085 }
1086 else
1087 {
1088 pp_string (pp, "symbolic_region(");
1089 get_parent_region ()->dump_to_pp (pp, simple);
1090 if (get_type ())
1091 {
1092 pp_string (pp, ", ");
1093 print_quoted_type (pp, get_type ());
1094 }
1095 pp_string (pp, ", ");
1096 m_sval_ptr->dump_to_pp (pp, simple);
1097 pp_string (pp, ")");
1098 }
1099 }
1100
1101 /* class decl_region : public region. */
1102
1103 /* Implementation of region::dump_to_pp vfunc for decl_region. */
1104
1105 void
1106 decl_region::dump_to_pp (pretty_printer *pp, bool simple) const
1107 {
1108 if (simple)
1109 pp_printf (pp, "%E", m_decl);
1110 else
1111 {
1112 pp_string (pp, "decl_region(");
1113 get_parent_region ()->dump_to_pp (pp, simple);
1114 pp_string (pp, ", ");
1115 print_quoted_type (pp, get_type ());
1116 pp_printf (pp, ", %qE)", m_decl);
1117 }
1118 }
1119
1120 /* Get the stack depth for the frame containing this decl, or 0
1121 for a global. */
1122
1123 int
1124 decl_region::get_stack_depth () const
1125 {
1126 if (get_parent_region () == NULL)
1127 return 0;
1128 if (const frame_region *frame_reg
1129 = get_parent_region ()->dyn_cast_frame_region ())
1130 return frame_reg->get_stack_depth ();
1131 return 0;
1132 }
1133
1134 /* If the underlying decl is in the global constant pool,
1135 return an svalue representing the constant value.
1136 Otherwise return NULL. */
1137
1138 const svalue *
1139 decl_region::maybe_get_constant_value (region_model_manager *mgr) const
1140 {
1141 if (TREE_CODE (m_decl) == VAR_DECL
1142 && DECL_IN_CONSTANT_POOL (m_decl)
1143 && DECL_INITIAL (m_decl)
1144 && TREE_CODE (DECL_INITIAL (m_decl)) == CONSTRUCTOR)
1145 return get_svalue_for_constructor (DECL_INITIAL (m_decl), mgr);
1146 return NULL;
1147 }
1148
1149 /* Get an svalue for CTOR, a CONSTRUCTOR for this region's decl. */
1150
1151 const svalue *
1152 decl_region::get_svalue_for_constructor (tree ctor,
1153 region_model_manager *mgr) const
1154 {
1155 gcc_assert (!TREE_CLOBBER_P (ctor));
1156
1157 /* Create a binding map, applying ctor to it, using this
1158 decl_region as the base region when building child regions
1159 for offset calculations. */
1160 binding_map map;
1161 if (!map.apply_ctor_to_region (this, ctor, mgr))
1162 return mgr->get_or_create_unknown_svalue (get_type ());
1163
1164 /* Return a compound svalue for the map we built. */
1165 return mgr->get_or_create_compound_svalue (get_type (), map);
1166 }
1167
1168 /* For use on decl_regions for global variables.
1169
1170 Get an svalue for the initial value of this region at entry to
1171 "main" (either based on DECL_INITIAL, or implicit initialization to
1172 zero.
1173
1174 Return NULL if there is a problem. */
1175
1176 const svalue *
1177 decl_region::get_svalue_for_initializer (region_model_manager *mgr) const
1178 {
1179 tree init = DECL_INITIAL (m_decl);
1180 if (!init)
1181 {
1182 /* If we have an "extern" decl then there may be an initializer in
1183 another TU. */
1184 if (DECL_EXTERNAL (m_decl))
1185 return NULL;
1186
1187 /* Implicit initialization to zero; use a compound_svalue for it.
1188 Doing so requires that we have a concrete binding for this region,
1189 which can fail if we have a region with unknown size
1190 (e.g. "extern const char arr[];"). */
1191 const binding_key *binding
1192 = binding_key::make (mgr->get_store_manager (), this);
1193 if (binding->symbolic_p ())
1194 return NULL;
1195
1196 /* If we don't care about tracking the content of this region, then
1197 it's unused, and the value doesn't matter. */
1198 if (!tracked_p ())
1199 return NULL;
1200
1201 binding_cluster c (this);
1202 c.zero_fill_region (mgr->get_store_manager (), this);
1203 return mgr->get_or_create_compound_svalue (TREE_TYPE (m_decl),
1204 c.get_map ());
1205 }
1206
1207 /* LTO can write out error_mark_node as the DECL_INITIAL for simple scalar
1208 values (to avoid writing out an extra section). */
1209 if (init == error_mark_node)
1210 return NULL;
1211
1212 if (TREE_CODE (init) == CONSTRUCTOR)
1213 return get_svalue_for_constructor (init, mgr);
1214
1215 /* Reuse the get_rvalue logic from region_model. */
1216 region_model m (mgr);
1217 return m.get_rvalue (path_var (init, 0), NULL);
1218 }
1219
1220 /* Subroutine of symnode_requires_tracking_p; return true if REF
1221 might imply that we should be tracking the value of its decl. */
1222
1223 static bool
1224 ipa_ref_requires_tracking (ipa_ref *ref)
1225 {
1226 /* If we have a load/store/alias of the symbol, then we'll track
1227 the decl's value. */
1228 if (ref->use != IPA_REF_ADDR)
1229 return true;
1230
1231 if (ref->stmt == NULL)
1232 return true;
1233
1234 switch (ref->stmt->code)
1235 {
1236 default:
1237 return true;
1238 case GIMPLE_CALL:
1239 {
1240 cgraph_node *caller_cnode = dyn_cast <cgraph_node *> (ref->referring);
1241 if (caller_cnode == NULL)
1242 return true;
1243 cgraph_edge *edge = caller_cnode->get_edge (ref->stmt);
1244 if (!edge)
1245 return true;
1246 if (edge->callee == NULL)
1247 return true; /* e.g. call through function ptr. */
1248 if (edge->callee->definition)
1249 return true;
1250 /* If we get here, then this ref is a pointer passed to
1251 a function we don't have the definition for. */
1252 return false;
1253 }
1254 break;
1255 case GIMPLE_ASM:
1256 {
1257 const gasm *asm_stmt = as_a <const gasm *> (ref->stmt);
1258 if (gimple_asm_noutputs (asm_stmt) > 0)
1259 return true;
1260 if (gimple_asm_nclobbers (asm_stmt) > 0)
1261 return true;
1262 /* If we get here, then this ref is the decl being passed
1263 by pointer to asm with no outputs. */
1264 return false;
1265 }
1266 break;
1267 }
1268 }
1269
1270 /* Determine if the decl for SYMNODE should have binding_clusters
1271 in our state objects; return false to optimize away tracking
1272 certain decls in our state objects, as an optimization. */
1273
1274 static bool
1275 symnode_requires_tracking_p (symtab_node *symnode)
1276 {
1277 gcc_assert (symnode);
1278 if (symnode->externally_visible)
1279 return true;
1280 tree context_fndecl = DECL_CONTEXT (symnode->decl);
1281 if (context_fndecl == NULL)
1282 return true;
1283 if (TREE_CODE (context_fndecl) != FUNCTION_DECL)
1284 return true;
1285 for (auto ref : symnode->ref_list.referring)
1286 if (ipa_ref_requires_tracking (ref))
1287 return true;
1288
1289 /* If we get here, then we don't have uses of this decl that require
1290 tracking; we never read from it or write to it explicitly. */
1291 return false;
1292 }
1293
1294 /* Subroutine of decl_region ctor: determine whether this decl_region
1295 can have binding_clusters; return false to optimize away tracking
1296 of certain decls in our state objects, as an optimization. */
1297
1298 bool
1299 decl_region::calc_tracked_p (tree decl)
1300 {
1301 /* Precondition of symtab_node::get. */
1302 if (TREE_CODE (decl) == VAR_DECL
1303 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl) || in_lto_p))
1304 if (symtab_node *symnode = symtab_node::get (decl))
1305 return symnode_requires_tracking_p (symnode);
1306 return true;
1307 }
1308
1309 /* class field_region : public region. */
1310
1311 /* Implementation of region::dump_to_pp vfunc for field_region. */
1312
1313 void
1314 field_region::dump_to_pp (pretty_printer *pp, bool simple) const
1315 {
1316 if (simple)
1317 {
1318 get_parent_region ()->dump_to_pp (pp, simple);
1319 pp_string (pp, ".");
1320 pp_printf (pp, "%E", m_field);
1321 }
1322 else
1323 {
1324 pp_string (pp, "field_region(");
1325 get_parent_region ()->dump_to_pp (pp, simple);
1326 pp_string (pp, ", ");
1327 print_quoted_type (pp, get_type ());
1328 pp_printf (pp, ", %qE)", m_field);
1329 }
1330 }
1331
1332 /* Implementation of region::get_relative_concrete_offset vfunc
1333 for field_region. */
1334
1335 bool
1336 field_region::get_relative_concrete_offset (bit_offset_t *out) const
1337 {
1338 /* Compare with e.g. gimple-fold.cc's
1339 fold_nonarray_ctor_reference. */
1340 tree byte_offset = DECL_FIELD_OFFSET (m_field);
1341 if (TREE_CODE (byte_offset) != INTEGER_CST)
1342 return false;
1343 tree field_offset = DECL_FIELD_BIT_OFFSET (m_field);
1344 /* Compute bit offset of the field. */
1345 offset_int bitoffset
1346 = (wi::to_offset (field_offset)
1347 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
1348 *out = bitoffset;
1349 return true;
1350 }
1351
1352
1353 /* Implementation of region::get_relative_symbolic_offset vfunc
1354 for field_region.
1355 If known, the returned svalue is equal to the offset converted to bytes and
1356 rounded off. */
1357
1358 const svalue *
1359 field_region::get_relative_symbolic_offset (region_model_manager *mgr) const
1360 {
1361 bit_offset_t out;
1362 if (get_relative_concrete_offset (&out))
1363 {
1364 tree cst_tree
1365 = wide_int_to_tree (integer_type_node, out / BITS_PER_UNIT);
1366 return mgr->get_or_create_constant_svalue (cst_tree);
1367 }
1368 return mgr->get_or_create_unknown_svalue (integer_type_node);
1369 }
1370
1371 /* class element_region : public region. */
1372
1373 /* Implementation of region::accept vfunc for element_region. */
1374
1375 void
1376 element_region::accept (visitor *v) const
1377 {
1378 region::accept (v);
1379 m_index->accept (v);
1380 }
1381
1382 /* Implementation of region::dump_to_pp vfunc for element_region. */
1383
1384 void
1385 element_region::dump_to_pp (pretty_printer *pp, bool simple) const
1386 {
1387 if (simple)
1388 {
1389 //pp_string (pp, "(");
1390 get_parent_region ()->dump_to_pp (pp, simple);
1391 pp_string (pp, "[");
1392 m_index->dump_to_pp (pp, simple);
1393 pp_string (pp, "]");
1394 //pp_string (pp, ")");
1395 }
1396 else
1397 {
1398 pp_string (pp, "element_region(");
1399 get_parent_region ()->dump_to_pp (pp, simple);
1400 pp_string (pp, ", ");
1401 print_quoted_type (pp, get_type ());
1402 pp_string (pp, ", ");
1403 m_index->dump_to_pp (pp, simple);
1404 pp_printf (pp, ")");
1405 }
1406 }
1407
1408 /* Implementation of region::get_relative_concrete_offset vfunc
1409 for element_region. */
1410
1411 bool
1412 element_region::get_relative_concrete_offset (bit_offset_t *out) const
1413 {
1414 if (tree idx_cst = m_index->maybe_get_constant ())
1415 {
1416 gcc_assert (TREE_CODE (idx_cst) == INTEGER_CST);
1417
1418 tree elem_type = get_type ();
1419 offset_int element_idx = wi::to_offset (idx_cst);
1420
1421 /* First, use int_size_in_bytes, to reject the case where we
1422 have an incomplete type, or a non-constant value. */
1423 HOST_WIDE_INT hwi_byte_size = int_size_in_bytes (elem_type);
1424 if (hwi_byte_size > 0)
1425 {
1426 offset_int element_bit_size
1427 = hwi_byte_size << LOG2_BITS_PER_UNIT;
1428 offset_int element_bit_offset
1429 = element_idx * element_bit_size;
1430 *out = element_bit_offset;
1431 return true;
1432 }
1433 }
1434 return false;
1435 }
1436
1437 /* Implementation of region::get_relative_symbolic_offset vfunc
1438 for element_region. */
1439
1440 const svalue *
1441 element_region::get_relative_symbolic_offset (region_model_manager *mgr) const
1442 {
1443 tree elem_type = get_type ();
1444
1445 /* First, use int_size_in_bytes, to reject the case where we
1446 have an incomplete type, or a non-constant value. */
1447 HOST_WIDE_INT hwi_byte_size = int_size_in_bytes (elem_type);
1448 if (hwi_byte_size > 0)
1449 {
1450 tree byte_size_tree = wide_int_to_tree (integer_type_node,
1451 hwi_byte_size);
1452 const svalue *byte_size_sval
1453 = mgr->get_or_create_constant_svalue (byte_size_tree);
1454 return mgr->get_or_create_binop (integer_type_node, MULT_EXPR,
1455 m_index, byte_size_sval);
1456 }
1457 return mgr->get_or_create_unknown_svalue (integer_type_node);
1458 }
1459
1460 /* class offset_region : public region. */
1461
1462 /* Implementation of region::accept vfunc for offset_region. */
1463
1464 void
1465 offset_region::accept (visitor *v) const
1466 {
1467 region::accept (v);
1468 m_byte_offset->accept (v);
1469 }
1470
1471 /* Implementation of region::dump_to_pp vfunc for offset_region. */
1472
1473 void
1474 offset_region::dump_to_pp (pretty_printer *pp, bool simple) const
1475 {
1476 if (simple)
1477 {
1478 //pp_string (pp, "(");
1479 get_parent_region ()->dump_to_pp (pp, simple);
1480 pp_string (pp, "+");
1481 m_byte_offset->dump_to_pp (pp, simple);
1482 //pp_string (pp, ")");
1483 }
1484 else
1485 {
1486 pp_string (pp, "offset_region(");
1487 get_parent_region ()->dump_to_pp (pp, simple);
1488 pp_string (pp, ", ");
1489 print_quoted_type (pp, get_type ());
1490 pp_string (pp, ", ");
1491 m_byte_offset->dump_to_pp (pp, simple);
1492 pp_printf (pp, ")");
1493 }
1494 }
1495
1496 /* Implementation of region::get_relative_concrete_offset vfunc
1497 for offset_region. */
1498
1499 bool
1500 offset_region::get_relative_concrete_offset (bit_offset_t *out) const
1501 {
1502 if (tree byte_offset_cst = m_byte_offset->maybe_get_constant ())
1503 {
1504 gcc_assert (TREE_CODE (byte_offset_cst) == INTEGER_CST);
1505 /* Use a signed value for the byte offset, to handle
1506 negative offsets. */
1507 HOST_WIDE_INT byte_offset
1508 = wi::to_offset (byte_offset_cst).to_shwi ();
1509 HOST_WIDE_INT bit_offset = byte_offset * BITS_PER_UNIT;
1510 *out = bit_offset;
1511 return true;
1512 }
1513 return false;
1514 }
1515
1516 /* Implementation of region::get_relative_symbolic_offset vfunc
1517 for offset_region. */
1518
1519 const svalue *
1520 offset_region::get_relative_symbolic_offset (region_model_manager *mgr
1521 ATTRIBUTE_UNUSED) const
1522 {
1523 return get_byte_offset ();
1524 }
1525
1526 /* Implementation of region::get_byte_size_sval vfunc for offset_region. */
1527
1528 const svalue *
1529 offset_region::get_byte_size_sval (region_model_manager *mgr) const
1530 {
1531 tree offset_cst = get_byte_offset ()->maybe_get_constant ();
1532 byte_size_t byte_size;
1533 /* If the offset points in the middle of the region,
1534 return the remaining bytes. */
1535 if (get_byte_size (&byte_size) && offset_cst)
1536 {
1537 byte_size_t offset = wi::to_offset (offset_cst);
1538 byte_range r (0, byte_size);
1539 if (r.contains_p (offset))
1540 {
1541 tree remaining_byte_size = wide_int_to_tree (size_type_node,
1542 byte_size - offset);
1543 return mgr->get_or_create_constant_svalue (remaining_byte_size);
1544 }
1545 }
1546
1547 return region::get_byte_size_sval (mgr);
1548 }
1549
1550 /* class sized_region : public region. */
1551
1552 /* Implementation of region::accept vfunc for sized_region. */
1553
1554 void
1555 sized_region::accept (visitor *v) const
1556 {
1557 region::accept (v);
1558 m_byte_size_sval->accept (v);
1559 }
1560
1561 /* Implementation of region::dump_to_pp vfunc for sized_region. */
1562
1563 void
1564 sized_region::dump_to_pp (pretty_printer *pp, bool simple) const
1565 {
1566 if (simple)
1567 {
1568 pp_string (pp, "SIZED_REG(");
1569 get_parent_region ()->dump_to_pp (pp, simple);
1570 pp_string (pp, ", ");
1571 m_byte_size_sval->dump_to_pp (pp, simple);
1572 pp_string (pp, ")");
1573 }
1574 else
1575 {
1576 pp_string (pp, "sized_region(");
1577 get_parent_region ()->dump_to_pp (pp, simple);
1578 pp_string (pp, ", ");
1579 m_byte_size_sval->dump_to_pp (pp, simple);
1580 pp_printf (pp, ")");
1581 }
1582 }
1583
1584 /* Implementation of region::get_byte_size vfunc for sized_region. */
1585
1586 bool
1587 sized_region::get_byte_size (byte_size_t *out) const
1588 {
1589 if (tree cst = m_byte_size_sval->maybe_get_constant ())
1590 {
1591 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1592 *out = tree_to_uhwi (cst);
1593 return true;
1594 }
1595 return false;
1596 }
1597
1598 /* Implementation of region::get_bit_size vfunc for sized_region. */
1599
1600 bool
1601 sized_region::get_bit_size (bit_size_t *out) const
1602 {
1603 byte_size_t byte_size;
1604 if (!get_byte_size (&byte_size))
1605 return false;
1606 *out = byte_size * BITS_PER_UNIT;
1607 return true;
1608 }
1609
1610 /* class cast_region : public region. */
1611
1612 /* Implementation of region::accept vfunc for cast_region. */
1613
1614 void
1615 cast_region::accept (visitor *v) const
1616 {
1617 region::accept (v);
1618 m_original_region->accept (v);
1619 }
1620
1621 /* Implementation of region::dump_to_pp vfunc for cast_region. */
1622
1623 void
1624 cast_region::dump_to_pp (pretty_printer *pp, bool simple) const
1625 {
1626 if (simple)
1627 {
1628 pp_string (pp, "CAST_REG(");
1629 print_quoted_type (pp, get_type ());
1630 pp_string (pp, ", ");
1631 m_original_region->dump_to_pp (pp, simple);
1632 pp_string (pp, ")");
1633 }
1634 else
1635 {
1636 pp_string (pp, "cast_region(");
1637 m_original_region->dump_to_pp (pp, simple);
1638 pp_string (pp, ", ");
1639 print_quoted_type (pp, get_type ());
1640 pp_printf (pp, ")");
1641 }
1642 }
1643
1644 /* Implementation of region::get_relative_concrete_offset vfunc
1645 for cast_region. */
1646
1647 bool
1648 cast_region::get_relative_concrete_offset (bit_offset_t *out) const
1649 {
1650 *out = (int) 0;
1651 return true;
1652 }
1653
1654 /* class heap_allocated_region : public region. */
1655
1656 /* Implementation of region::dump_to_pp vfunc for heap_allocated_region. */
1657
1658 void
1659 heap_allocated_region::dump_to_pp (pretty_printer *pp, bool simple) const
1660 {
1661 if (simple)
1662 pp_printf (pp, "HEAP_ALLOCATED_REGION(%i)", get_id ());
1663 else
1664 pp_printf (pp, "heap_allocated_region(%i)", get_id ());
1665 }
1666
1667 /* class alloca_region : public region. */
1668
1669 /* Implementation of region::dump_to_pp vfunc for alloca_region. */
1670
1671 void
1672 alloca_region::dump_to_pp (pretty_printer *pp, bool simple) const
1673 {
1674 if (simple)
1675 pp_printf (pp, "ALLOCA_REGION(%i)", get_id ());
1676 else
1677 pp_printf (pp, "alloca_region(%i)", get_id ());
1678 }
1679
1680 /* class string_region : public region. */
1681
1682 /* Implementation of region::dump_to_pp vfunc for string_region. */
1683
1684 void
1685 string_region::dump_to_pp (pretty_printer *pp, bool simple) const
1686 {
1687 if (simple)
1688 dump_tree (pp, m_string_cst);
1689 else
1690 {
1691 pp_string (pp, "string_region(");
1692 dump_tree (pp, m_string_cst);
1693 if (!flag_dump_noaddr)
1694 {
1695 pp_string (pp, " (");
1696 pp_pointer (pp, m_string_cst);
1697 pp_string (pp, "))");
1698 }
1699 }
1700 }
1701
1702 /* class bit_range_region : public region. */
1703
1704 /* Implementation of region::dump_to_pp vfunc for bit_range_region. */
1705
1706 void
1707 bit_range_region::dump_to_pp (pretty_printer *pp, bool simple) const
1708 {
1709 if (simple)
1710 {
1711 pp_string (pp, "BIT_RANGE_REG(");
1712 get_parent_region ()->dump_to_pp (pp, simple);
1713 pp_string (pp, ", ");
1714 m_bits.dump_to_pp (pp);
1715 pp_string (pp, ")");
1716 }
1717 else
1718 {
1719 pp_string (pp, "bit_range_region(");
1720 get_parent_region ()->dump_to_pp (pp, simple);
1721 pp_string (pp, ", ");
1722 m_bits.dump_to_pp (pp);
1723 pp_printf (pp, ")");
1724 }
1725 }
1726
1727 /* Implementation of region::get_byte_size vfunc for bit_range_region. */
1728
1729 bool
1730 bit_range_region::get_byte_size (byte_size_t *out) const
1731 {
1732 if (m_bits.m_size_in_bits % BITS_PER_UNIT == 0)
1733 {
1734 *out = m_bits.m_size_in_bits / BITS_PER_UNIT;
1735 return true;
1736 }
1737 return false;
1738 }
1739
1740 /* Implementation of region::get_bit_size vfunc for bit_range_region. */
1741
1742 bool
1743 bit_range_region::get_bit_size (bit_size_t *out) const
1744 {
1745 *out = m_bits.m_size_in_bits;
1746 return true;
1747 }
1748
1749 /* Implementation of region::get_byte_size_sval vfunc for bit_range_region. */
1750
1751 const svalue *
1752 bit_range_region::get_byte_size_sval (region_model_manager *mgr) const
1753 {
1754 if (m_bits.m_size_in_bits % BITS_PER_UNIT != 0)
1755 return mgr->get_or_create_unknown_svalue (size_type_node);
1756
1757 HOST_WIDE_INT num_bytes = m_bits.m_size_in_bits.to_shwi () / BITS_PER_UNIT;
1758 return mgr->get_or_create_int_cst (size_type_node, num_bytes);
1759 }
1760
1761 /* Implementation of region::get_relative_concrete_offset vfunc for
1762 bit_range_region. */
1763
1764 bool
1765 bit_range_region::get_relative_concrete_offset (bit_offset_t *out) const
1766 {
1767 *out = m_bits.get_start_bit_offset ();
1768 return true;
1769 }
1770
1771 /* Implementation of region::get_relative_symbolic_offset vfunc for
1772 bit_range_region.
1773 The returned svalue is equal to the offset converted to bytes and
1774 rounded off. */
1775
1776 const svalue *
1777 bit_range_region::get_relative_symbolic_offset (region_model_manager *mgr)
1778 const
1779 {
1780 byte_offset_t start_byte = m_bits.get_start_bit_offset () / BITS_PER_UNIT;
1781 tree start_bit_tree = wide_int_to_tree (integer_type_node, start_byte);
1782 return mgr->get_or_create_constant_svalue (start_bit_tree);
1783 }
1784
1785 /* class var_arg_region : public region. */
1786
1787 void
1788 var_arg_region::dump_to_pp (pretty_printer *pp, bool simple) const
1789 {
1790 if (simple)
1791 {
1792 pp_string (pp, "VAR_ARG_REG(");
1793 get_parent_region ()->dump_to_pp (pp, simple);
1794 pp_printf (pp, ", arg_idx: %d)", m_idx);
1795 }
1796 else
1797 {
1798 pp_string (pp, "var_arg_region(");
1799 get_parent_region ()->dump_to_pp (pp, simple);
1800 pp_printf (pp, ", arg_idx: %d)", m_idx);
1801 }
1802 }
1803
1804 /* Get the frame_region for this var_arg_region. */
1805
1806 const frame_region *
1807 var_arg_region::get_frame_region () const
1808 {
1809 gcc_assert (get_parent_region ());
1810 return as_a <const frame_region *> (get_parent_region ());
1811 }
1812
1813 /* class unknown_region : public region. */
1814
1815 /* Implementation of region::dump_to_pp vfunc for unknown_region. */
1816
1817 void
1818 unknown_region::dump_to_pp (pretty_printer *pp, bool /*simple*/) const
1819 {
1820 pp_string (pp, "UNKNOWN_REGION");
1821 }
1822
1823 } // namespace ana
1824
1825 #endif /* #if ENABLE_ANALYZER */