]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/analyzer/region.cc
4bc191848a41737b6030c74923c4004bcda7cfe1
[thirdparty/gcc.git] / gcc / analyzer / region.cc
1 /* Regions of memory.
2 Copyright (C) 2019-2022 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #define INCLUDE_MEMORY
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tree.h"
26 #include "diagnostic-core.h"
27 #include "gimple-pretty-print.h"
28 #include "function.h"
29 #include "basic-block.h"
30 #include "gimple.h"
31 #include "gimple-iterator.h"
32 #include "diagnostic-core.h"
33 #include "graphviz.h"
34 #include "options.h"
35 #include "cgraph.h"
36 #include "tree-dfa.h"
37 #include "stringpool.h"
38 #include "convert.h"
39 #include "target.h"
40 #include "fold-const.h"
41 #include "tree-pretty-print.h"
42 #include "diagnostic-color.h"
43 #include "diagnostic-metadata.h"
44 #include "bitmap.h"
45 #include "analyzer/analyzer.h"
46 #include "analyzer/analyzer-logging.h"
47 #include "ordered-hash-map.h"
48 #include "options.h"
49 #include "cgraph.h"
50 #include "cfg.h"
51 #include "digraph.h"
52 #include "analyzer/supergraph.h"
53 #include "sbitmap.h"
54 #include "analyzer/call-string.h"
55 #include "analyzer/program-point.h"
56 #include "analyzer/store.h"
57 #include "analyzer/region.h"
58 #include "analyzer/region-model.h"
59 #include "analyzer/sm.h"
60 #include "analyzer/program-state.h"
61
62 #if ENABLE_ANALYZER
63
64 namespace ana {
65
66 /* class region and its various subclasses. */
67
68 /* class region. */
69
70 region::~region ()
71 {
72 delete m_cached_offset;
73 }
74
75 /* Compare REG1 and REG2 by id. */
76
77 int
78 region::cmp_ids (const region *reg1, const region *reg2)
79 {
80 return (long)reg1->get_id () - (long)reg2->get_id ();
81 }
82
83 /* Determine the base region for this region: when considering bindings
84 for this region, the base region is the ancestor which identifies
85 which cluster they should be partitioned into.
86 Regions within the same struct/union/array are in the same cluster.
87 Different decls are in different clusters. */
88
89 const region *
90 region::get_base_region () const
91 {
92 const region *iter = this;
93 while (iter)
94 {
95 switch (iter->get_kind ())
96 {
97 case RK_FIELD:
98 case RK_ELEMENT:
99 case RK_OFFSET:
100 case RK_SIZED:
101 case RK_BIT_RANGE:
102 iter = iter->get_parent_region ();
103 continue;
104 case RK_CAST:
105 iter = iter->dyn_cast_cast_region ()->get_original_region ();
106 continue;
107 default:
108 return iter;
109 }
110 }
111 return iter;
112 }
113
114 /* Return true if get_base_region() == this for this region. */
115
116 bool
117 region::base_region_p () const
118 {
119 switch (get_kind ())
120 {
121 /* Region kinds representing a descendent of a base region. */
122 case RK_FIELD:
123 case RK_ELEMENT:
124 case RK_OFFSET:
125 case RK_SIZED:
126 case RK_CAST:
127 case RK_BIT_RANGE:
128 return false;
129
130 default:
131 return true;
132 }
133 }
134
135 /* Return true if this region is ELDER or one of its descendents. */
136
137 bool
138 region::descendent_of_p (const region *elder) const
139 {
140 const region *iter = this;
141 while (iter)
142 {
143 if (iter == elder)
144 return true;
145 if (iter->get_kind () == RK_CAST)
146 iter = iter->dyn_cast_cast_region ()->get_original_region ();
147 else
148 iter = iter->get_parent_region ();
149 }
150 return false;
151 }
152
153 /* If this region is a frame_region, or a descendent of one, return it.
154 Otherwise return NULL. */
155
156 const frame_region *
157 region::maybe_get_frame_region () const
158 {
159 const region *iter = this;
160 while (iter)
161 {
162 if (const frame_region *frame_reg = iter->dyn_cast_frame_region ())
163 return frame_reg;
164 if (iter->get_kind () == RK_CAST)
165 iter = iter->dyn_cast_cast_region ()->get_original_region ();
166 else
167 iter = iter->get_parent_region ();
168 }
169 return NULL;
170 }
171
172 /* Get the memory space of this region. */
173
174 enum memory_space
175 region::get_memory_space () const
176 {
177 const region *iter = this;
178 while (iter)
179 {
180 switch (iter->get_kind ())
181 {
182 default:
183 break;
184 case RK_GLOBALS:
185 return MEMSPACE_GLOBALS;
186 case RK_CODE:
187 case RK_FUNCTION:
188 case RK_LABEL:
189 return MEMSPACE_CODE;
190 case RK_FRAME:
191 case RK_STACK:
192 case RK_ALLOCA:
193 return MEMSPACE_STACK;
194 case RK_HEAP:
195 case RK_HEAP_ALLOCATED:
196 return MEMSPACE_HEAP;
197 case RK_STRING:
198 return MEMSPACE_READONLY_DATA;
199 }
200 if (iter->get_kind () == RK_CAST)
201 iter = iter->dyn_cast_cast_region ()->get_original_region ();
202 else
203 iter = iter->get_parent_region ();
204 }
205 return MEMSPACE_UNKNOWN;
206 }
207
208 /* Subroutine for use by region_model_manager::get_or_create_initial_value.
209 Return true if this region has an initial_svalue.
210 Return false if attempting to use INIT_VAL(this_region) should give
211 the "UNINITIALIZED" poison value. */
212
213 bool
214 region::can_have_initial_svalue_p () const
215 {
216 const region *base_reg = get_base_region ();
217
218 /* Check for memory spaces that are uninitialized by default. */
219 enum memory_space mem_space = base_reg->get_memory_space ();
220 switch (mem_space)
221 {
222 default:
223 gcc_unreachable ();
224 case MEMSPACE_UNKNOWN:
225 case MEMSPACE_CODE:
226 case MEMSPACE_GLOBALS:
227 case MEMSPACE_READONLY_DATA:
228 /* Such regions have initial_svalues. */
229 return true;
230
231 case MEMSPACE_HEAP:
232 /* Heap allocations are uninitialized by default. */
233 return false;
234
235 case MEMSPACE_STACK:
236 if (tree decl = base_reg->maybe_get_decl ())
237 {
238 /* See the assertion in frame_region::get_region_for_local for the
239 tree codes we need to handle here. */
240 switch (TREE_CODE (decl))
241 {
242 default:
243 gcc_unreachable ();
244
245 case PARM_DECL:
246 /* Parameters have initial values. */
247 return true;
248
249 case VAR_DECL:
250 case RESULT_DECL:
251 /* Function locals don't have initial values. */
252 return false;
253
254 case SSA_NAME:
255 {
256 tree ssa_name = decl;
257 /* SSA names that are the default defn of a PARM_DECL
258 have initial_svalues; other SSA names don't. */
259 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
260 && SSA_NAME_VAR (ssa_name)
261 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == PARM_DECL)
262 return true;
263 else
264 return false;
265 }
266 }
267 }
268
269 /* If we have an on-stack region that isn't associated with a decl
270 or SSA name, then we have VLA/alloca, which is uninitialized. */
271 return false;
272 }
273 }
274
275 /* If this region is a decl_region, return the decl.
276 Otherwise return NULL. */
277
278 tree
279 region::maybe_get_decl () const
280 {
281 if (const decl_region *decl_reg = dyn_cast_decl_region ())
282 return decl_reg->get_decl ();
283 return NULL_TREE;
284 }
285
286 /* Get the region_offset for this region (calculating it on the
287 first call and caching it internally). */
288
289 region_offset
290 region::get_offset (region_model_manager *mgr) const
291 {
292 if(!m_cached_offset)
293 m_cached_offset = new region_offset (calc_offset (mgr));
294 return *m_cached_offset;
295 }
296
297 /* Base class implementation of region::get_byte_size vfunc.
298 If the size of this region (in bytes) is known statically, write it to *OUT
299 and return true.
300 Otherwise return false. */
301
302 bool
303 region::get_byte_size (byte_size_t *out) const
304 {
305 tree type = get_type ();
306
307 /* Bail out e.g. for heap-allocated regions. */
308 if (!type)
309 return false;
310
311 HOST_WIDE_INT bytes = int_size_in_bytes (type);
312 if (bytes == -1)
313 return false;
314 *out = bytes;
315 return true;
316 }
317
318 /* Base implementation of region::get_byte_size_sval vfunc. */
319
320 const svalue *
321 region::get_byte_size_sval (region_model_manager *mgr) const
322 {
323 tree type = get_type ();
324
325 /* Bail out e.g. for heap-allocated regions. */
326 if (!type)
327 return mgr->get_or_create_unknown_svalue (size_type_node);
328
329 HOST_WIDE_INT bytes = int_size_in_bytes (type);
330 if (bytes == -1)
331 return mgr->get_or_create_unknown_svalue (size_type_node);
332
333 tree byte_size = size_in_bytes (type);
334 if (TREE_TYPE (byte_size) != size_type_node)
335 byte_size = fold_build1 (NOP_EXPR, size_type_node, byte_size);
336 return mgr->get_or_create_constant_svalue (byte_size);
337 }
338
339 /* Attempt to get the size of TYPE in bits.
340 If successful, return true and write the size to *OUT.
341 Otherwise return false. */
342
343 bool
344 int_size_in_bits (const_tree type, bit_size_t *out)
345 {
346 if (INTEGRAL_TYPE_P (type))
347 {
348 *out = TYPE_PRECISION (type);
349 return true;
350 }
351
352 tree sz = TYPE_SIZE (type);
353 if (sz && tree_fits_uhwi_p (sz))
354 {
355 *out = TREE_INT_CST_LOW (sz);
356 return true;
357 }
358 else
359 return false;
360 }
361
362 /* If the size of this region (in bits) is known statically, write it to *OUT
363 and return true.
364 Otherwise return false. */
365
366 bool
367 region::get_bit_size (bit_size_t *out) const
368 {
369 tree type = get_type ();
370
371 /* Bail out e.g. for heap-allocated regions. */
372 if (!type)
373 return false;
374
375 return int_size_in_bits (type, out);
376 }
377
378 /* Get the field within RECORD_TYPE at BIT_OFFSET. */
379
380 tree
381 get_field_at_bit_offset (tree record_type, bit_offset_t bit_offset)
382 {
383 gcc_assert (TREE_CODE (record_type) == RECORD_TYPE);
384 if (bit_offset < 0)
385 return NULL;
386
387 /* Find the first field that has an offset > BIT_OFFSET,
388 then return the one preceding it.
389 Skip other trees within the chain, such as FUNCTION_DECLs. */
390 tree last_field = NULL_TREE;
391 for (tree iter = TYPE_FIELDS (record_type); iter != NULL_TREE;
392 iter = DECL_CHAIN (iter))
393 {
394 if (TREE_CODE (iter) == FIELD_DECL)
395 {
396 int iter_field_offset = int_bit_position (iter);
397 if (bit_offset < iter_field_offset)
398 return last_field;
399 last_field = iter;
400 }
401 }
402 return last_field;
403 }
404
405 /* Populate *OUT with descendent regions of type TYPE that match
406 RELATIVE_BIT_OFFSET and SIZE_IN_BITS within this region. */
407
408 void
409 region::get_subregions_for_binding (region_model_manager *mgr,
410 bit_offset_t relative_bit_offset,
411 bit_size_t size_in_bits,
412 tree type,
413 auto_vec <const region *> *out) const
414 {
415 if (get_type () == NULL_TREE || type == NULL_TREE)
416 return;
417 if (relative_bit_offset == 0
418 && types_compatible_p (get_type (), type))
419 {
420 out->safe_push (this);
421 return;
422 }
423 switch (TREE_CODE (get_type ()))
424 {
425 case ARRAY_TYPE:
426 {
427 tree element_type = TREE_TYPE (get_type ());
428 HOST_WIDE_INT hwi_byte_size = int_size_in_bytes (element_type);
429 if (hwi_byte_size > 0)
430 {
431 HOST_WIDE_INT bits_per_element
432 = hwi_byte_size << LOG2_BITS_PER_UNIT;
433 HOST_WIDE_INT element_index
434 = (relative_bit_offset.to_shwi () / bits_per_element);
435 tree element_index_cst
436 = build_int_cst (integer_type_node, element_index);
437 HOST_WIDE_INT inner_bit_offset
438 = relative_bit_offset.to_shwi () % bits_per_element;
439 const region *subregion = mgr->get_element_region
440 (this, element_type,
441 mgr->get_or_create_constant_svalue (element_index_cst));
442 subregion->get_subregions_for_binding (mgr, inner_bit_offset,
443 size_in_bits, type, out);
444 }
445 }
446 break;
447 case RECORD_TYPE:
448 {
449 /* The bit offset might be *within* one of the fields (such as
450 with nested structs).
451 So we want to find the enclosing field, adjust the offset,
452 and repeat. */
453 if (tree field = get_field_at_bit_offset (get_type (),
454 relative_bit_offset))
455 {
456 int field_bit_offset = int_bit_position (field);
457 const region *subregion = mgr->get_field_region (this, field);
458 subregion->get_subregions_for_binding
459 (mgr, relative_bit_offset - field_bit_offset,
460 size_in_bits, type, out);
461 }
462 }
463 break;
464 case UNION_TYPE:
465 {
466 for (tree field = TYPE_FIELDS (get_type ()); field != NULL_TREE;
467 field = DECL_CHAIN (field))
468 {
469 if (TREE_CODE (field) != FIELD_DECL)
470 continue;
471 const region *subregion = mgr->get_field_region (this, field);
472 subregion->get_subregions_for_binding (mgr,
473 relative_bit_offset,
474 size_in_bits,
475 type,
476 out);
477 }
478 }
479 break;
480 default:
481 /* Do nothing. */
482 break;
483 }
484 }
485
486 /* Walk from this region up to the base region within its cluster, calculating
487 the offset relative to the base region, either as an offset in bits,
488 or a symbolic offset. */
489
490 region_offset
491 region::calc_offset (region_model_manager *mgr) const
492 {
493 const region *iter_region = this;
494 bit_offset_t accum_bit_offset = 0;
495 const svalue *accum_byte_sval = NULL;
496
497 while (iter_region)
498 {
499 switch (iter_region->get_kind ())
500 {
501 case RK_FIELD:
502 case RK_ELEMENT:
503 case RK_OFFSET:
504 case RK_BIT_RANGE:
505 if (accum_byte_sval)
506 {
507 const svalue *sval
508 = iter_region->get_relative_symbolic_offset (mgr);
509 accum_byte_sval
510 = mgr->get_or_create_binop (sval->get_type (), PLUS_EXPR,
511 accum_byte_sval, sval);
512 iter_region = iter_region->get_parent_region ();
513 }
514 else
515 {
516 bit_offset_t rel_bit_offset;
517 if (iter_region->get_relative_concrete_offset (&rel_bit_offset))
518 {
519 accum_bit_offset += rel_bit_offset;
520 iter_region = iter_region->get_parent_region ();
521 }
522 else
523 {
524 /* If the iter_region is not concrete anymore, convert the
525 accumulated bits to a svalue in bytes and revisit the
526 iter_region collecting the symbolic value. */
527 byte_offset_t byte_offset = accum_bit_offset / BITS_PER_UNIT;
528 tree offset_tree = wide_int_to_tree (integer_type_node,
529 byte_offset);
530 accum_byte_sval
531 = mgr->get_or_create_constant_svalue (offset_tree);
532 }
533 }
534 continue;
535 case RK_SIZED:
536 iter_region = iter_region->get_parent_region ();
537 continue;
538
539 case RK_CAST:
540 {
541 const cast_region *cast_reg
542 = as_a <const cast_region *> (iter_region);
543 iter_region = cast_reg->get_original_region ();
544 }
545 continue;
546
547 default:
548 return accum_byte_sval
549 ? region_offset::make_symbolic (iter_region,
550 accum_byte_sval)
551 : region_offset::make_concrete (iter_region,
552 accum_bit_offset);
553 }
554 }
555
556 return accum_byte_sval ? region_offset::make_symbolic (iter_region,
557 accum_byte_sval)
558 : region_offset::make_concrete (iter_region,
559 accum_bit_offset);
560 }
561
562 /* Base implementation of region::get_relative_concrete_offset vfunc. */
563
564 bool
565 region::get_relative_concrete_offset (bit_offset_t *) const
566 {
567 return false;
568 }
569
570 /* Base implementation of region::get_relative_symbolic_offset vfunc. */
571
572 const svalue *
573 region::get_relative_symbolic_offset (region_model_manager *mgr) const
574 {
575 return mgr->get_or_create_unknown_svalue (integer_type_node);
576 }
577
578 /* Attempt to get the position and size of this region expressed as a
579 concrete range of bytes relative to its parent.
580 If successful, return true and write to *OUT.
581 Otherwise return false. */
582
583 bool
584 region::get_relative_concrete_byte_range (byte_range *out) const
585 {
586 /* We must have a concrete offset relative to the parent. */
587 bit_offset_t rel_bit_offset;
588 if (!get_relative_concrete_offset (&rel_bit_offset))
589 return false;
590 /* ...which must be a whole number of bytes. */
591 if (rel_bit_offset % BITS_PER_UNIT != 0)
592 return false;
593 byte_offset_t start_byte_offset = rel_bit_offset / BITS_PER_UNIT;
594
595 /* We must have a concrete size, which must be a whole number
596 of bytes. */
597 byte_size_t num_bytes;
598 if (!get_byte_size (&num_bytes))
599 return false;
600
601 /* Success. */
602 *out = byte_range (start_byte_offset, num_bytes);
603 return true;
604 }
605
606 /* Dump a description of this region to stderr. */
607
608 DEBUG_FUNCTION void
609 region::dump (bool simple) const
610 {
611 pretty_printer pp;
612 pp_format_decoder (&pp) = default_tree_printer;
613 pp_show_color (&pp) = pp_show_color (global_dc->printer);
614 pp.buffer->stream = stderr;
615 dump_to_pp (&pp, simple);
616 pp_newline (&pp);
617 pp_flush (&pp);
618 }
619
620 /* Return a new json::string describing the region. */
621
622 json::value *
623 region::to_json () const
624 {
625 label_text desc = get_desc (true);
626 json::value *reg_js = new json::string (desc.get ());
627 return reg_js;
628 }
629
630 /* Generate a description of this region. */
631
632 DEBUG_FUNCTION label_text
633 region::get_desc (bool simple) const
634 {
635 pretty_printer pp;
636 pp_format_decoder (&pp) = default_tree_printer;
637 dump_to_pp (&pp, simple);
638 return label_text::take (xstrdup (pp_formatted_text (&pp)));
639 }
640
641 /* Base implementation of region::accept vfunc.
642 Subclass implementations should chain up to this. */
643
644 void
645 region::accept (visitor *v) const
646 {
647 v->visit_region (this);
648 if (m_parent)
649 m_parent->accept (v);
650 }
651
652 /* Return true if this is a symbolic region for deferencing an
653 unknown ptr.
654 We shouldn't attempt to bind values for this region (but
655 can unbind values for other regions). */
656
657 bool
658 region::symbolic_for_unknown_ptr_p () const
659 {
660 if (const symbolic_region *sym_reg = dyn_cast_symbolic_region ())
661 if (sym_reg->get_pointer ()->get_kind () == SK_UNKNOWN)
662 return true;
663 return false;
664 }
665
666 /* Return true if this is a symbolic region. */
667
668 bool
669 region::symbolic_p () const
670 {
671 return get_kind () == RK_SYMBOLIC;
672 }
673
674 /* Return true if this is a region for a decl with name DECL_NAME.
675 Intended for use when debugging (for assertions and conditional
676 breakpoints). */
677
678 DEBUG_FUNCTION bool
679 region::is_named_decl_p (const char *decl_name) const
680 {
681 if (tree decl = maybe_get_decl ())
682 if (DECL_NAME (decl)
683 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (decl)), decl_name))
684 return true;
685 return false;
686 }
687
688 /* region's ctor. */
689
690 region::region (complexity c, unsigned id, const region *parent, tree type)
691 : m_complexity (c), m_id (id), m_parent (parent), m_type (type),
692 m_cached_offset (NULL)
693 {
694 gcc_assert (type == NULL_TREE || TYPE_P (type));
695 }
696
697 /* Comparator for use by vec<const region *>::qsort,
698 using their IDs to order them. */
699
700 int
701 region::cmp_ptr_ptr (const void *p1, const void *p2)
702 {
703 const region * const *reg1 = (const region * const *)p1;
704 const region * const *reg2 = (const region * const *)p2;
705
706 return cmp_ids (*reg1, *reg2);
707 }
708
709 /* Determine if a pointer to this region must be non-NULL.
710
711 Generally, pointers to regions must be non-NULL, but pointers
712 to symbolic_regions might, in fact, be NULL.
713
714 This allows us to simulate functions like malloc and calloc with:
715 - only one "outcome" from each statement,
716 - the idea that the pointer is on the heap if non-NULL
717 - the possibility that the pointer could be NULL
718 - the idea that successive values returned from malloc are non-equal
719 - to be able to zero-fill for calloc. */
720
721 bool
722 region::non_null_p () const
723 {
724 switch (get_kind ())
725 {
726 default:
727 return true;
728 case RK_SYMBOLIC:
729 /* Are we within a symbolic_region? If so, it could be NULL, and we
730 have to fall back on the constraints. */
731 return false;
732 case RK_HEAP_ALLOCATED:
733 return false;
734 }
735 }
736
737 /* Return true iff this region is defined in terms of SVAL. */
738
739 bool
740 region::involves_p (const svalue *sval) const
741 {
742 if (const symbolic_region *symbolic_reg = dyn_cast_symbolic_region ())
743 {
744 if (symbolic_reg->get_pointer ()->involves_p (sval))
745 return true;
746 }
747
748 return false;
749 }
750
751 /* Comparator for trees to impose a deterministic ordering on
752 T1 and T2. */
753
754 static int
755 tree_cmp (const_tree t1, const_tree t2)
756 {
757 gcc_assert (t1);
758 gcc_assert (t2);
759
760 /* Test tree codes first. */
761 if (TREE_CODE (t1) != TREE_CODE (t2))
762 return TREE_CODE (t1) - TREE_CODE (t2);
763
764 /* From this point on, we know T1 and T2 have the same tree code. */
765
766 if (DECL_P (t1))
767 {
768 if (DECL_NAME (t1) && DECL_NAME (t2))
769 return strcmp (IDENTIFIER_POINTER (DECL_NAME (t1)),
770 IDENTIFIER_POINTER (DECL_NAME (t2)));
771 else
772 {
773 if (DECL_NAME (t1))
774 return -1;
775 else if (DECL_NAME (t2))
776 return 1;
777 else
778 return DECL_UID (t1) - DECL_UID (t2);
779 }
780 }
781
782 switch (TREE_CODE (t1))
783 {
784 case SSA_NAME:
785 {
786 if (SSA_NAME_VAR (t1) && SSA_NAME_VAR (t2))
787 {
788 int var_cmp = tree_cmp (SSA_NAME_VAR (t1), SSA_NAME_VAR (t2));
789 if (var_cmp)
790 return var_cmp;
791 return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2);
792 }
793 else
794 {
795 if (SSA_NAME_VAR (t1))
796 return -1;
797 else if (SSA_NAME_VAR (t2))
798 return 1;
799 else
800 return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2);
801 }
802 }
803 break;
804
805 case INTEGER_CST:
806 return tree_int_cst_compare (t1, t2);
807
808 case REAL_CST:
809 {
810 const real_value *rv1 = TREE_REAL_CST_PTR (t1);
811 const real_value *rv2 = TREE_REAL_CST_PTR (t2);
812 if (real_compare (UNORDERED_EXPR, rv1, rv2))
813 {
814 /* Impose an arbitrary order on NaNs relative to other NaNs
815 and to non-NaNs. */
816 if (int cmp_isnan = real_isnan (rv1) - real_isnan (rv2))
817 return cmp_isnan;
818 if (int cmp_issignaling_nan
819 = real_issignaling_nan (rv1) - real_issignaling_nan (rv2))
820 return cmp_issignaling_nan;
821 return real_isneg (rv1) - real_isneg (rv2);
822 }
823 if (real_compare (LT_EXPR, rv1, rv2))
824 return -1;
825 if (real_compare (GT_EXPR, rv1, rv2))
826 return 1;
827 return 0;
828 }
829
830 case STRING_CST:
831 return strcmp (TREE_STRING_POINTER (t1),
832 TREE_STRING_POINTER (t2));
833
834 default:
835 gcc_unreachable ();
836 break;
837 }
838
839 gcc_unreachable ();
840
841 return 0;
842 }
843
844 /* qsort comparator for trees to impose a deterministic ordering on
845 P1 and P2. */
846
847 int
848 tree_cmp (const void *p1, const void *p2)
849 {
850 const_tree t1 = *(const_tree const *)p1;
851 const_tree t2 = *(const_tree const *)p2;
852
853 return tree_cmp (t1, t2);
854 }
855
856 /* class frame_region : public space_region. */
857
858 frame_region::~frame_region ()
859 {
860 for (map_t::iterator iter = m_locals.begin ();
861 iter != m_locals.end ();
862 ++iter)
863 delete (*iter).second;
864 }
865
866 void
867 frame_region::accept (visitor *v) const
868 {
869 region::accept (v);
870 if (m_calling_frame)
871 m_calling_frame->accept (v);
872 }
873
874 /* Implementation of region::dump_to_pp vfunc for frame_region. */
875
876 void
877 frame_region::dump_to_pp (pretty_printer *pp, bool simple) const
878 {
879 if (simple)
880 pp_printf (pp, "frame: %qs@%i", function_name (m_fun), get_stack_depth ());
881 else
882 pp_printf (pp, "frame_region(%qs, index: %i, depth: %i)",
883 function_name (m_fun), m_index, get_stack_depth ());
884 }
885
886 const decl_region *
887 frame_region::get_region_for_local (region_model_manager *mgr,
888 tree expr,
889 const region_model_context *ctxt) const
890 {
891 if (CHECKING_P)
892 {
893 /* Verify that EXPR is a local or SSA name, and that it's for the
894 correct function for this stack frame. */
895 gcc_assert (TREE_CODE (expr) == PARM_DECL
896 || TREE_CODE (expr) == VAR_DECL
897 || TREE_CODE (expr) == SSA_NAME
898 || TREE_CODE (expr) == RESULT_DECL);
899 switch (TREE_CODE (expr))
900 {
901 default:
902 gcc_unreachable ();
903 case VAR_DECL:
904 gcc_assert (!is_global_var (expr));
905 /* Fall through. */
906 case PARM_DECL:
907 case RESULT_DECL:
908 gcc_assert (DECL_CONTEXT (expr) == m_fun->decl);
909 break;
910 case SSA_NAME:
911 {
912 if (tree var = SSA_NAME_VAR (expr))
913 {
914 if (DECL_P (var))
915 gcc_assert (DECL_CONTEXT (var) == m_fun->decl);
916 }
917 else if (ctxt)
918 if (const extrinsic_state *ext_state = ctxt->get_ext_state ())
919 if (const supergraph *sg
920 = ext_state->get_engine ()->get_supergraph ())
921 {
922 const gimple *def_stmt = SSA_NAME_DEF_STMT (expr);
923 const supernode *snode
924 = sg->get_supernode_for_stmt (def_stmt);
925 gcc_assert (snode->get_function () == m_fun);
926 }
927 }
928 break;
929 }
930 }
931
932 /* Ideally we'd use mutable here. */
933 map_t &mutable_locals = const_cast <map_t &> (m_locals);
934
935 if (decl_region **slot = mutable_locals.get (expr))
936 return *slot;
937 decl_region *reg
938 = new decl_region (mgr->alloc_region_id (), this, expr);
939 mutable_locals.put (expr, reg);
940 return reg;
941 }
942
943 /* class globals_region : public space_region. */
944
945 /* Implementation of region::dump_to_pp vfunc for globals_region. */
946
947 void
948 globals_region::dump_to_pp (pretty_printer *pp, bool simple) const
949 {
950 if (simple)
951 pp_string (pp, "::");
952 else
953 pp_string (pp, "globals");
954 }
955
956 /* class code_region : public map_region. */
957
958 /* Implementation of region::dump_to_pp vfunc for code_region. */
959
960 void
961 code_region::dump_to_pp (pretty_printer *pp, bool simple) const
962 {
963 if (simple)
964 pp_string (pp, "code region");
965 else
966 pp_string (pp, "code_region()");
967 }
968
969 /* class function_region : public region. */
970
971 /* Implementation of region::dump_to_pp vfunc for function_region. */
972
973 void
974 function_region::dump_to_pp (pretty_printer *pp, bool simple) const
975 {
976 if (simple)
977 {
978 dump_quoted_tree (pp, m_fndecl);
979 }
980 else
981 {
982 pp_string (pp, "function_region(");
983 dump_quoted_tree (pp, m_fndecl);
984 pp_string (pp, ")");
985 }
986 }
987
988 /* class label_region : public region. */
989
990 /* Implementation of region::dump_to_pp vfunc for label_region. */
991
992 void
993 label_region::dump_to_pp (pretty_printer *pp, bool simple) const
994 {
995 if (simple)
996 {
997 dump_quoted_tree (pp, m_label);
998 }
999 else
1000 {
1001 pp_string (pp, "label_region(");
1002 dump_quoted_tree (pp, m_label);
1003 pp_string (pp, ")");
1004 }
1005 }
1006
1007 /* class stack_region : public region. */
1008
1009 /* Implementation of region::dump_to_pp vfunc for stack_region. */
1010
1011 void
1012 stack_region::dump_to_pp (pretty_printer *pp, bool simple) const
1013 {
1014 if (simple)
1015 pp_string (pp, "stack region");
1016 else
1017 pp_string (pp, "stack_region()");
1018 }
1019
1020 /* class heap_region : public region. */
1021
1022 /* Implementation of region::dump_to_pp vfunc for heap_region. */
1023
1024 void
1025 heap_region::dump_to_pp (pretty_printer *pp, bool simple) const
1026 {
1027 if (simple)
1028 pp_string (pp, "heap region");
1029 else
1030 pp_string (pp, "heap_region()");
1031 }
1032
1033 /* class root_region : public region. */
1034
1035 /* root_region's ctor. */
1036
1037 root_region::root_region (unsigned id)
1038 : region (complexity (1, 1), id, NULL, NULL_TREE)
1039 {
1040 }
1041
1042 /* Implementation of region::dump_to_pp vfunc for root_region. */
1043
1044 void
1045 root_region::dump_to_pp (pretty_printer *pp, bool simple) const
1046 {
1047 if (simple)
1048 pp_string (pp, "root region");
1049 else
1050 pp_string (pp, "root_region()");
1051 }
1052
1053 /* class symbolic_region : public map_region. */
1054
1055 /* symbolic_region's ctor. */
1056
1057 symbolic_region::symbolic_region (unsigned id, region *parent,
1058 const svalue *sval_ptr)
1059 : region (complexity::from_pair (parent, sval_ptr), id, parent,
1060 (sval_ptr->get_type ()
1061 ? TREE_TYPE (sval_ptr->get_type ())
1062 : NULL_TREE)),
1063 m_sval_ptr (sval_ptr)
1064 {
1065 }
1066
1067 /* Implementation of region::accept vfunc for symbolic_region. */
1068
1069 void
1070 symbolic_region::accept (visitor *v) const
1071 {
1072 region::accept (v);
1073 m_sval_ptr->accept (v);
1074 }
1075
1076 /* Implementation of region::dump_to_pp vfunc for symbolic_region. */
1077
1078 void
1079 symbolic_region::dump_to_pp (pretty_printer *pp, bool simple) const
1080 {
1081 if (simple)
1082 {
1083 pp_string (pp, "(*");
1084 m_sval_ptr->dump_to_pp (pp, simple);
1085 pp_string (pp, ")");
1086 }
1087 else
1088 {
1089 pp_string (pp, "symbolic_region(");
1090 get_parent_region ()->dump_to_pp (pp, simple);
1091 if (get_type ())
1092 {
1093 pp_string (pp, ", ");
1094 print_quoted_type (pp, get_type ());
1095 }
1096 pp_string (pp, ", ");
1097 m_sval_ptr->dump_to_pp (pp, simple);
1098 pp_string (pp, ")");
1099 }
1100 }
1101
1102 /* class decl_region : public region. */
1103
1104 /* Implementation of region::dump_to_pp vfunc for decl_region. */
1105
1106 void
1107 decl_region::dump_to_pp (pretty_printer *pp, bool simple) const
1108 {
1109 if (simple)
1110 pp_printf (pp, "%E", m_decl);
1111 else
1112 {
1113 pp_string (pp, "decl_region(");
1114 get_parent_region ()->dump_to_pp (pp, simple);
1115 pp_string (pp, ", ");
1116 print_quoted_type (pp, get_type ());
1117 pp_printf (pp, ", %qE)", m_decl);
1118 }
1119 }
1120
1121 /* Get the stack depth for the frame containing this decl, or 0
1122 for a global. */
1123
1124 int
1125 decl_region::get_stack_depth () const
1126 {
1127 if (get_parent_region () == NULL)
1128 return 0;
1129 if (const frame_region *frame_reg
1130 = get_parent_region ()->dyn_cast_frame_region ())
1131 return frame_reg->get_stack_depth ();
1132 return 0;
1133 }
1134
1135 /* If the underlying decl is in the global constant pool,
1136 return an svalue representing the constant value.
1137 Otherwise return NULL. */
1138
1139 const svalue *
1140 decl_region::maybe_get_constant_value (region_model_manager *mgr) const
1141 {
1142 if (TREE_CODE (m_decl) == VAR_DECL
1143 && DECL_IN_CONSTANT_POOL (m_decl)
1144 && DECL_INITIAL (m_decl)
1145 && TREE_CODE (DECL_INITIAL (m_decl)) == CONSTRUCTOR)
1146 return get_svalue_for_constructor (DECL_INITIAL (m_decl), mgr);
1147 return NULL;
1148 }
1149
1150 /* Get an svalue for CTOR, a CONSTRUCTOR for this region's decl. */
1151
1152 const svalue *
1153 decl_region::get_svalue_for_constructor (tree ctor,
1154 region_model_manager *mgr) const
1155 {
1156 gcc_assert (!TREE_CLOBBER_P (ctor));
1157
1158 /* Create a binding map, applying ctor to it, using this
1159 decl_region as the base region when building child regions
1160 for offset calculations. */
1161 binding_map map;
1162 if (!map.apply_ctor_to_region (this, ctor, mgr))
1163 return mgr->get_or_create_unknown_svalue (get_type ());
1164
1165 /* Return a compound svalue for the map we built. */
1166 return mgr->get_or_create_compound_svalue (get_type (), map);
1167 }
1168
1169 /* For use on decl_regions for global variables.
1170
1171 Get an svalue for the initial value of this region at entry to
1172 "main" (either based on DECL_INITIAL, or implicit initialization to
1173 zero.
1174
1175 Return NULL if there is a problem. */
1176
1177 const svalue *
1178 decl_region::get_svalue_for_initializer (region_model_manager *mgr) const
1179 {
1180 tree init = DECL_INITIAL (m_decl);
1181 if (!init)
1182 {
1183 /* If we have an "extern" decl then there may be an initializer in
1184 another TU. */
1185 if (DECL_EXTERNAL (m_decl))
1186 return NULL;
1187
1188 /* Implicit initialization to zero; use a compound_svalue for it.
1189 Doing so requires that we have a concrete binding for this region,
1190 which can fail if we have a region with unknown size
1191 (e.g. "extern const char arr[];"). */
1192 const binding_key *binding
1193 = binding_key::make (mgr->get_store_manager (), this);
1194 if (binding->symbolic_p ())
1195 return NULL;
1196
1197 /* If we don't care about tracking the content of this region, then
1198 it's unused, and the value doesn't matter. */
1199 if (!tracked_p ())
1200 return NULL;
1201
1202 binding_cluster c (this);
1203 c.zero_fill_region (mgr->get_store_manager (), this);
1204 return mgr->get_or_create_compound_svalue (TREE_TYPE (m_decl),
1205 c.get_map ());
1206 }
1207
1208 /* LTO can write out error_mark_node as the DECL_INITIAL for simple scalar
1209 values (to avoid writing out an extra section). */
1210 if (init == error_mark_node)
1211 return NULL;
1212
1213 if (TREE_CODE (init) == CONSTRUCTOR)
1214 return get_svalue_for_constructor (init, mgr);
1215
1216 /* Reuse the get_rvalue logic from region_model. */
1217 region_model m (mgr);
1218 return m.get_rvalue (path_var (init, 0), NULL);
1219 }
1220
1221 /* Subroutine of symnode_requires_tracking_p; return true if REF
1222 might imply that we should be tracking the value of its decl. */
1223
1224 static bool
1225 ipa_ref_requires_tracking (ipa_ref *ref)
1226 {
1227 /* If we have a load/store/alias of the symbol, then we'll track
1228 the decl's value. */
1229 if (ref->use != IPA_REF_ADDR)
1230 return true;
1231
1232 if (ref->stmt == NULL)
1233 return true;
1234
1235 switch (ref->stmt->code)
1236 {
1237 default:
1238 return true;
1239 case GIMPLE_CALL:
1240 {
1241 cgraph_node *caller_cnode = dyn_cast <cgraph_node *> (ref->referring);
1242 if (caller_cnode == NULL)
1243 return true;
1244 cgraph_edge *edge = caller_cnode->get_edge (ref->stmt);
1245 if (!edge)
1246 return true;
1247 if (edge->callee == NULL)
1248 return true; /* e.g. call through function ptr. */
1249 if (edge->callee->definition)
1250 return true;
1251 /* If we get here, then this ref is a pointer passed to
1252 a function we don't have the definition for. */
1253 return false;
1254 }
1255 break;
1256 case GIMPLE_ASM:
1257 {
1258 const gasm *asm_stmt = as_a <const gasm *> (ref->stmt);
1259 if (gimple_asm_noutputs (asm_stmt) > 0)
1260 return true;
1261 if (gimple_asm_nclobbers (asm_stmt) > 0)
1262 return true;
1263 /* If we get here, then this ref is the decl being passed
1264 by pointer to asm with no outputs. */
1265 return false;
1266 }
1267 break;
1268 }
1269 }
1270
1271 /* Determine if the decl for SYMNODE should have binding_clusters
1272 in our state objects; return false to optimize away tracking
1273 certain decls in our state objects, as an optimization. */
1274
1275 static bool
1276 symnode_requires_tracking_p (symtab_node *symnode)
1277 {
1278 gcc_assert (symnode);
1279 if (symnode->externally_visible)
1280 return true;
1281 tree context_fndecl = DECL_CONTEXT (symnode->decl);
1282 if (context_fndecl == NULL)
1283 return true;
1284 if (TREE_CODE (context_fndecl) != FUNCTION_DECL)
1285 return true;
1286 for (auto ref : symnode->ref_list.referring)
1287 if (ipa_ref_requires_tracking (ref))
1288 return true;
1289
1290 /* If we get here, then we don't have uses of this decl that require
1291 tracking; we never read from it or write to it explicitly. */
1292 return false;
1293 }
1294
1295 /* Subroutine of decl_region ctor: determine whether this decl_region
1296 can have binding_clusters; return false to optimize away tracking
1297 of certain decls in our state objects, as an optimization. */
1298
1299 bool
1300 decl_region::calc_tracked_p (tree decl)
1301 {
1302 /* Precondition of symtab_node::get. */
1303 if (TREE_CODE (decl) == VAR_DECL
1304 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl) || in_lto_p))
1305 if (symtab_node *symnode = symtab_node::get (decl))
1306 return symnode_requires_tracking_p (symnode);
1307 return true;
1308 }
1309
1310 /* class field_region : public region. */
1311
1312 /* Implementation of region::dump_to_pp vfunc for field_region. */
1313
1314 void
1315 field_region::dump_to_pp (pretty_printer *pp, bool simple) const
1316 {
1317 if (simple)
1318 {
1319 get_parent_region ()->dump_to_pp (pp, simple);
1320 pp_string (pp, ".");
1321 pp_printf (pp, "%E", m_field);
1322 }
1323 else
1324 {
1325 pp_string (pp, "field_region(");
1326 get_parent_region ()->dump_to_pp (pp, simple);
1327 pp_string (pp, ", ");
1328 print_quoted_type (pp, get_type ());
1329 pp_printf (pp, ", %qE)", m_field);
1330 }
1331 }
1332
1333 /* Implementation of region::get_relative_concrete_offset vfunc
1334 for field_region. */
1335
1336 bool
1337 field_region::get_relative_concrete_offset (bit_offset_t *out) const
1338 {
1339 /* Compare with e.g. gimple-fold.cc's
1340 fold_nonarray_ctor_reference. */
1341 tree byte_offset = DECL_FIELD_OFFSET (m_field);
1342 if (TREE_CODE (byte_offset) != INTEGER_CST)
1343 return false;
1344 tree field_offset = DECL_FIELD_BIT_OFFSET (m_field);
1345 /* Compute bit offset of the field. */
1346 offset_int bitoffset
1347 = (wi::to_offset (field_offset)
1348 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
1349 *out = bitoffset;
1350 return true;
1351 }
1352
1353
1354 /* Implementation of region::get_relative_symbolic_offset vfunc
1355 for field_region.
1356 If known, the returned svalue is equal to the offset converted to bytes and
1357 rounded off. */
1358
1359 const svalue *
1360 field_region::get_relative_symbolic_offset (region_model_manager *mgr) const
1361 {
1362 bit_offset_t out;
1363 if (get_relative_concrete_offset (&out))
1364 {
1365 tree cst_tree
1366 = wide_int_to_tree (integer_type_node, out / BITS_PER_UNIT);
1367 return mgr->get_or_create_constant_svalue (cst_tree);
1368 }
1369 return mgr->get_or_create_unknown_svalue (integer_type_node);
1370 }
1371
1372 /* class element_region : public region. */
1373
1374 /* Implementation of region::accept vfunc for element_region. */
1375
1376 void
1377 element_region::accept (visitor *v) const
1378 {
1379 region::accept (v);
1380 m_index->accept (v);
1381 }
1382
1383 /* Implementation of region::dump_to_pp vfunc for element_region. */
1384
1385 void
1386 element_region::dump_to_pp (pretty_printer *pp, bool simple) const
1387 {
1388 if (simple)
1389 {
1390 //pp_string (pp, "(");
1391 get_parent_region ()->dump_to_pp (pp, simple);
1392 pp_string (pp, "[");
1393 m_index->dump_to_pp (pp, simple);
1394 pp_string (pp, "]");
1395 //pp_string (pp, ")");
1396 }
1397 else
1398 {
1399 pp_string (pp, "element_region(");
1400 get_parent_region ()->dump_to_pp (pp, simple);
1401 pp_string (pp, ", ");
1402 print_quoted_type (pp, get_type ());
1403 pp_string (pp, ", ");
1404 m_index->dump_to_pp (pp, simple);
1405 pp_printf (pp, ")");
1406 }
1407 }
1408
1409 /* Implementation of region::get_relative_concrete_offset vfunc
1410 for element_region. */
1411
1412 bool
1413 element_region::get_relative_concrete_offset (bit_offset_t *out) const
1414 {
1415 if (tree idx_cst = m_index->maybe_get_constant ())
1416 {
1417 gcc_assert (TREE_CODE (idx_cst) == INTEGER_CST);
1418
1419 tree elem_type = get_type ();
1420 offset_int element_idx = wi::to_offset (idx_cst);
1421
1422 /* First, use int_size_in_bytes, to reject the case where we
1423 have an incomplete type, or a non-constant value. */
1424 HOST_WIDE_INT hwi_byte_size = int_size_in_bytes (elem_type);
1425 if (hwi_byte_size > 0)
1426 {
1427 offset_int element_bit_size
1428 = hwi_byte_size << LOG2_BITS_PER_UNIT;
1429 offset_int element_bit_offset
1430 = element_idx * element_bit_size;
1431 *out = element_bit_offset;
1432 return true;
1433 }
1434 }
1435 return false;
1436 }
1437
1438 /* Implementation of region::get_relative_symbolic_offset vfunc
1439 for element_region. */
1440
1441 const svalue *
1442 element_region::get_relative_symbolic_offset (region_model_manager *mgr) const
1443 {
1444 tree elem_type = get_type ();
1445
1446 /* First, use int_size_in_bytes, to reject the case where we
1447 have an incomplete type, or a non-constant value. */
1448 HOST_WIDE_INT hwi_byte_size = int_size_in_bytes (elem_type);
1449 if (hwi_byte_size > 0)
1450 {
1451 tree byte_size_tree = wide_int_to_tree (integer_type_node,
1452 hwi_byte_size);
1453 const svalue *byte_size_sval
1454 = mgr->get_or_create_constant_svalue (byte_size_tree);
1455 return mgr->get_or_create_binop (integer_type_node, MULT_EXPR,
1456 m_index, byte_size_sval);
1457 }
1458 return mgr->get_or_create_unknown_svalue (integer_type_node);
1459 }
1460
1461 /* class offset_region : public region. */
1462
1463 /* Implementation of region::accept vfunc for offset_region. */
1464
1465 void
1466 offset_region::accept (visitor *v) const
1467 {
1468 region::accept (v);
1469 m_byte_offset->accept (v);
1470 }
1471
1472 /* Implementation of region::dump_to_pp vfunc for offset_region. */
1473
1474 void
1475 offset_region::dump_to_pp (pretty_printer *pp, bool simple) const
1476 {
1477 if (simple)
1478 {
1479 //pp_string (pp, "(");
1480 get_parent_region ()->dump_to_pp (pp, simple);
1481 pp_string (pp, "+");
1482 m_byte_offset->dump_to_pp (pp, simple);
1483 //pp_string (pp, ")");
1484 }
1485 else
1486 {
1487 pp_string (pp, "offset_region(");
1488 get_parent_region ()->dump_to_pp (pp, simple);
1489 pp_string (pp, ", ");
1490 print_quoted_type (pp, get_type ());
1491 pp_string (pp, ", ");
1492 m_byte_offset->dump_to_pp (pp, simple);
1493 pp_printf (pp, ")");
1494 }
1495 }
1496
1497 /* Implementation of region::get_relative_concrete_offset vfunc
1498 for offset_region. */
1499
1500 bool
1501 offset_region::get_relative_concrete_offset (bit_offset_t *out) const
1502 {
1503 if (tree byte_offset_cst = m_byte_offset->maybe_get_constant ())
1504 {
1505 gcc_assert (TREE_CODE (byte_offset_cst) == INTEGER_CST);
1506 /* Use a signed value for the byte offset, to handle
1507 negative offsets. */
1508 HOST_WIDE_INT byte_offset
1509 = wi::to_offset (byte_offset_cst).to_shwi ();
1510 HOST_WIDE_INT bit_offset = byte_offset * BITS_PER_UNIT;
1511 *out = bit_offset;
1512 return true;
1513 }
1514 return false;
1515 }
1516
1517 /* Implementation of region::get_relative_symbolic_offset vfunc
1518 for offset_region. */
1519
1520 const svalue *
1521 offset_region::get_relative_symbolic_offset (region_model_manager *mgr
1522 ATTRIBUTE_UNUSED) const
1523 {
1524 return get_byte_offset ();
1525 }
1526
1527 /* Implementation of region::get_byte_size_sval vfunc for offset_region. */
1528
1529 const svalue *
1530 offset_region::get_byte_size_sval (region_model_manager *mgr) const
1531 {
1532 tree offset_cst = get_byte_offset ()->maybe_get_constant ();
1533 byte_size_t byte_size;
1534 /* If the offset points in the middle of the region,
1535 return the remaining bytes. */
1536 if (get_byte_size (&byte_size) && offset_cst)
1537 {
1538 byte_size_t offset = wi::to_offset (offset_cst);
1539 byte_range r (0, byte_size);
1540 if (r.contains_p (offset))
1541 {
1542 tree remaining_byte_size = wide_int_to_tree (size_type_node,
1543 byte_size - offset);
1544 return mgr->get_or_create_constant_svalue (remaining_byte_size);
1545 }
1546 }
1547
1548 return region::get_byte_size_sval (mgr);
1549 }
1550
1551 /* class sized_region : public region. */
1552
1553 /* Implementation of region::accept vfunc for sized_region. */
1554
1555 void
1556 sized_region::accept (visitor *v) const
1557 {
1558 region::accept (v);
1559 m_byte_size_sval->accept (v);
1560 }
1561
1562 /* Implementation of region::dump_to_pp vfunc for sized_region. */
1563
1564 void
1565 sized_region::dump_to_pp (pretty_printer *pp, bool simple) const
1566 {
1567 if (simple)
1568 {
1569 pp_string (pp, "SIZED_REG(");
1570 get_parent_region ()->dump_to_pp (pp, simple);
1571 pp_string (pp, ", ");
1572 m_byte_size_sval->dump_to_pp (pp, simple);
1573 pp_string (pp, ")");
1574 }
1575 else
1576 {
1577 pp_string (pp, "sized_region(");
1578 get_parent_region ()->dump_to_pp (pp, simple);
1579 pp_string (pp, ", ");
1580 m_byte_size_sval->dump_to_pp (pp, simple);
1581 pp_printf (pp, ")");
1582 }
1583 }
1584
1585 /* Implementation of region::get_byte_size vfunc for sized_region. */
1586
1587 bool
1588 sized_region::get_byte_size (byte_size_t *out) const
1589 {
1590 if (tree cst = m_byte_size_sval->maybe_get_constant ())
1591 {
1592 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
1593 *out = tree_to_uhwi (cst);
1594 return true;
1595 }
1596 return false;
1597 }
1598
1599 /* Implementation of region::get_bit_size vfunc for sized_region. */
1600
1601 bool
1602 sized_region::get_bit_size (bit_size_t *out) const
1603 {
1604 byte_size_t byte_size;
1605 if (!get_byte_size (&byte_size))
1606 return false;
1607 *out = byte_size * BITS_PER_UNIT;
1608 return true;
1609 }
1610
1611 /* class cast_region : public region. */
1612
1613 /* Implementation of region::accept vfunc for cast_region. */
1614
1615 void
1616 cast_region::accept (visitor *v) const
1617 {
1618 region::accept (v);
1619 m_original_region->accept (v);
1620 }
1621
1622 /* Implementation of region::dump_to_pp vfunc for cast_region. */
1623
1624 void
1625 cast_region::dump_to_pp (pretty_printer *pp, bool simple) const
1626 {
1627 if (simple)
1628 {
1629 pp_string (pp, "CAST_REG(");
1630 print_quoted_type (pp, get_type ());
1631 pp_string (pp, ", ");
1632 m_original_region->dump_to_pp (pp, simple);
1633 pp_string (pp, ")");
1634 }
1635 else
1636 {
1637 pp_string (pp, "cast_region(");
1638 m_original_region->dump_to_pp (pp, simple);
1639 pp_string (pp, ", ");
1640 print_quoted_type (pp, get_type ());
1641 pp_printf (pp, ")");
1642 }
1643 }
1644
1645 /* Implementation of region::get_relative_concrete_offset vfunc
1646 for cast_region. */
1647
1648 bool
1649 cast_region::get_relative_concrete_offset (bit_offset_t *out) const
1650 {
1651 *out = (int) 0;
1652 return true;
1653 }
1654
1655 /* class heap_allocated_region : public region. */
1656
1657 /* Implementation of region::dump_to_pp vfunc for heap_allocated_region. */
1658
1659 void
1660 heap_allocated_region::dump_to_pp (pretty_printer *pp, bool simple) const
1661 {
1662 if (simple)
1663 pp_printf (pp, "HEAP_ALLOCATED_REGION(%i)", get_id ());
1664 else
1665 pp_printf (pp, "heap_allocated_region(%i)", get_id ());
1666 }
1667
1668 /* class alloca_region : public region. */
1669
1670 /* Implementation of region::dump_to_pp vfunc for alloca_region. */
1671
1672 void
1673 alloca_region::dump_to_pp (pretty_printer *pp, bool simple) const
1674 {
1675 if (simple)
1676 pp_printf (pp, "ALLOCA_REGION(%i)", get_id ());
1677 else
1678 pp_printf (pp, "alloca_region(%i)", get_id ());
1679 }
1680
1681 /* class string_region : public region. */
1682
1683 /* Implementation of region::dump_to_pp vfunc for string_region. */
1684
1685 void
1686 string_region::dump_to_pp (pretty_printer *pp, bool simple) const
1687 {
1688 if (simple)
1689 dump_tree (pp, m_string_cst);
1690 else
1691 {
1692 pp_string (pp, "string_region(");
1693 dump_tree (pp, m_string_cst);
1694 if (!flag_dump_noaddr)
1695 {
1696 pp_string (pp, " (");
1697 pp_pointer (pp, m_string_cst);
1698 pp_string (pp, "))");
1699 }
1700 }
1701 }
1702
1703 /* class bit_range_region : public region. */
1704
1705 /* Implementation of region::dump_to_pp vfunc for bit_range_region. */
1706
1707 void
1708 bit_range_region::dump_to_pp (pretty_printer *pp, bool simple) const
1709 {
1710 if (simple)
1711 {
1712 pp_string (pp, "BIT_RANGE_REG(");
1713 get_parent_region ()->dump_to_pp (pp, simple);
1714 pp_string (pp, ", ");
1715 m_bits.dump_to_pp (pp);
1716 pp_string (pp, ")");
1717 }
1718 else
1719 {
1720 pp_string (pp, "bit_range_region(");
1721 get_parent_region ()->dump_to_pp (pp, simple);
1722 pp_string (pp, ", ");
1723 m_bits.dump_to_pp (pp);
1724 pp_printf (pp, ")");
1725 }
1726 }
1727
1728 /* Implementation of region::get_byte_size vfunc for bit_range_region. */
1729
1730 bool
1731 bit_range_region::get_byte_size (byte_size_t *out) const
1732 {
1733 if (m_bits.m_size_in_bits % BITS_PER_UNIT == 0)
1734 {
1735 *out = m_bits.m_size_in_bits / BITS_PER_UNIT;
1736 return true;
1737 }
1738 return false;
1739 }
1740
1741 /* Implementation of region::get_bit_size vfunc for bit_range_region. */
1742
1743 bool
1744 bit_range_region::get_bit_size (bit_size_t *out) const
1745 {
1746 *out = m_bits.m_size_in_bits;
1747 return true;
1748 }
1749
1750 /* Implementation of region::get_byte_size_sval vfunc for bit_range_region. */
1751
1752 const svalue *
1753 bit_range_region::get_byte_size_sval (region_model_manager *mgr) const
1754 {
1755 if (m_bits.m_size_in_bits % BITS_PER_UNIT != 0)
1756 return mgr->get_or_create_unknown_svalue (size_type_node);
1757
1758 HOST_WIDE_INT num_bytes = m_bits.m_size_in_bits.to_shwi () / BITS_PER_UNIT;
1759 return mgr->get_or_create_int_cst (size_type_node, num_bytes);
1760 }
1761
1762 /* Implementation of region::get_relative_concrete_offset vfunc for
1763 bit_range_region. */
1764
1765 bool
1766 bit_range_region::get_relative_concrete_offset (bit_offset_t *out) const
1767 {
1768 *out = m_bits.get_start_bit_offset ();
1769 return true;
1770 }
1771
1772 /* Implementation of region::get_relative_symbolic_offset vfunc for
1773 bit_range_region.
1774 The returned svalue is equal to the offset converted to bytes and
1775 rounded off. */
1776
1777 const svalue *
1778 bit_range_region::get_relative_symbolic_offset (region_model_manager *mgr)
1779 const
1780 {
1781 byte_offset_t start_byte = m_bits.get_start_bit_offset () / BITS_PER_UNIT;
1782 tree start_bit_tree = wide_int_to_tree (integer_type_node, start_byte);
1783 return mgr->get_or_create_constant_svalue (start_bit_tree);
1784 }
1785
1786 /* class var_arg_region : public region. */
1787
1788 void
1789 var_arg_region::dump_to_pp (pretty_printer *pp, bool simple) const
1790 {
1791 if (simple)
1792 {
1793 pp_string (pp, "VAR_ARG_REG(");
1794 get_parent_region ()->dump_to_pp (pp, simple);
1795 pp_printf (pp, ", arg_idx: %d)", m_idx);
1796 }
1797 else
1798 {
1799 pp_string (pp, "var_arg_region(");
1800 get_parent_region ()->dump_to_pp (pp, simple);
1801 pp_printf (pp, ", arg_idx: %d)", m_idx);
1802 }
1803 }
1804
1805 /* Get the frame_region for this var_arg_region. */
1806
1807 const frame_region *
1808 var_arg_region::get_frame_region () const
1809 {
1810 gcc_assert (get_parent_region ());
1811 return as_a <const frame_region *> (get_parent_region ());
1812 }
1813
1814 /* class unknown_region : public region. */
1815
1816 /* Implementation of region::dump_to_pp vfunc for unknown_region. */
1817
1818 void
1819 unknown_region::dump_to_pp (pretty_printer *pp, bool /*simple*/) const
1820 {
1821 pp_string (pp, "UNKNOWN_REGION");
1822 }
1823
1824 } // namespace ana
1825
1826 #endif /* #if ENABLE_ANALYZER */