1 /* Classes for modeling the state of memory.
2 Copyright (C) 2019-2020 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "basic-block.h"
28 #include "gimple-iterator.h"
29 #include "diagnostic-core.h"
34 #include "stringpool.h"
37 #include "fold-const.h"
38 #include "tree-pretty-print.h"
39 #include "diagnostic-color.h"
40 #include "diagnostic-metadata.h"
45 #include "analyzer/analyzer.h"
46 #include "analyzer/analyzer-logging.h"
47 #include "ordered-hash-map.h"
52 #include "analyzer/supergraph.h"
54 #include "analyzer/region-model.h"
55 #include "analyzer/constraint-manager.h"
56 #include "diagnostic-event-id.h"
57 #include "analyzer/sm.h"
58 #include "diagnostic-event-id.h"
59 #include "analyzer/sm.h"
60 #include "analyzer/pending-diagnostic.h"
61 #include "analyzer/analyzer-selftests.h"
67 /* Dump T to PP in language-independent form, for debugging/logging/dumping
71 dump_tree (pretty_printer
*pp
, tree t
)
73 dump_generic_node (pp
, t
, 0, TDF_SLIM
, 0);
76 /* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
77 calls within other pp_printf calls.
79 default_tree_printer handles 'T' and some other codes by calling
80 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
81 dump_generic_node calls pp_printf in various places, leading to
84 Ideally pp_printf could be made to be reentrant, but in the meantime
85 this function provides a workaround. */
88 print_quoted_type (pretty_printer
*pp
, tree t
)
90 pp_begin_quote (pp
, pp_show_color (pp
));
91 dump_generic_node (pp
, t
, 0, TDF_SLIM
, 0);
92 pp_end_quote (pp
, pp_show_color (pp
));
95 /* Dump this path_var to PP (which must support %E for trees).
97 Express the stack depth using an "@DEPTH" suffix, so e.g. given
104 - the "i" in "bar" would be "(i @ 0)"
105 - the "j" in "foo" would be "(j @ 1)". */
108 path_var::dump (pretty_printer
*pp
) const
110 if (m_tree
== NULL_TREE
)
111 pp_string (pp
, "NULL");
112 if (CONSTANT_CLASS_P (m_tree
))
113 pp_printf (pp
, "%qE", m_tree
);
115 pp_printf (pp
, "(%qE @ %i)", m_tree
, m_stack_depth
);
118 /* For use in printing a comma-separated list. */
121 dump_separator (pretty_printer
*pp
, bool *is_first
)
124 pp_string (pp
, ", ");
128 /* Concrete subclass of constraint_manager that wires it up to a region_model
129 (whilst allowing the constraint_manager and region_model to be somewhat
131 TODO: revisit this; maybe put the region_model * into the constraint_manager
134 class impl_constraint_manager
: public constraint_manager
137 impl_constraint_manager (region_model
*model
)
138 : constraint_manager (),
142 impl_constraint_manager (const impl_constraint_manager
&other
,
144 : constraint_manager (other
),
148 constraint_manager
*clone (region_model
*model
) const
150 return new impl_constraint_manager (*this, model
);
153 tree
maybe_get_constant (svalue_id sid
) const FINAL OVERRIDE
155 svalue
*svalue
= m_model
->get_svalue (sid
);
156 return svalue
->maybe_get_constant ();
159 svalue_id
get_sid_for_constant (tree cst
) const FINAL OVERRIDE
161 gcc_assert (CONSTANT_CLASS_P (cst
));
162 return m_model
->get_rvalue (cst
, NULL
);
165 int get_num_svalues () const FINAL OVERRIDE
167 return m_model
->get_num_svalues ();
171 region_model
*m_model
;
174 /* class svalue_id. */
176 /* Print this svalue_id to PP. */
179 svalue_id::print (pretty_printer
*pp
) const
182 pp_printf (pp
, "null");
184 pp_printf (pp
, "sv%i", m_idx
);
187 /* Print this svalue_id in .dot format to PP. */
190 svalue_id::dump_node_name_to_pp (pretty_printer
*pp
) const
192 gcc_assert (!null_p ());
193 pp_printf (pp
, "svalue_%i", m_idx
);
196 /* Assert that this object is valid (w.r.t. MODEL). */
199 svalue_id::validate (const region_model
&model
) const
201 gcc_assert (null_p () || m_idx
< (int)model
.get_num_svalues ());
204 /* class region_id. */
206 /* Print this region_id to PP. */
209 region_id::print (pretty_printer
*pp
) const
212 pp_printf (pp
, "null");
214 pp_printf (pp
, "r%i", m_idx
);
217 /* Print this region_id in .dot format to PP. */
220 region_id::dump_node_name_to_pp (pretty_printer
*pp
) const
222 gcc_assert (!null_p ());
223 pp_printf (pp
, "region_%i", m_idx
);
226 /* Assert that this object is valid (w.r.t. MODEL). */
229 region_id::validate (const region_model
&model
) const
231 gcc_assert (null_p () || m_idx
< (int)model
.get_num_regions ());
236 /* id_set<region_id>'s ctor. */
239 id_set
<region_id
>::id_set (const region_model
*model
)
240 : m_bitmap (model
->get_num_regions ())
242 bitmap_clear (m_bitmap
);
245 /* class svalue and its various subclasses. */
249 /* svalue's equality operator. Most of the work is done by the
250 a "compare_fields" implementation on each subclass. */
253 svalue::operator== (const svalue
&other
) const
255 enum svalue_kind this_kind
= get_kind ();
256 enum svalue_kind other_kind
= other
.get_kind ();
257 if (this_kind
!= other_kind
)
260 if (m_type
!= other
.m_type
)
269 const region_svalue
&this_sub
270 = (const region_svalue
&)*this;
271 const region_svalue
&other_sub
272 = (const region_svalue
&)other
;
273 return this_sub
.compare_fields (other_sub
);
278 const constant_svalue
&this_sub
279 = (const constant_svalue
&)*this;
280 const constant_svalue
&other_sub
281 = (const constant_svalue
&)other
;
282 return this_sub
.compare_fields (other_sub
);
287 const unknown_svalue
&this_sub
288 = (const unknown_svalue
&)*this;
289 const unknown_svalue
&other_sub
290 = (const unknown_svalue
&)other
;
291 return this_sub
.compare_fields (other_sub
);
296 const poisoned_svalue
&this_sub
297 = (const poisoned_svalue
&)*this;
298 const poisoned_svalue
&other_sub
299 = (const poisoned_svalue
&)other
;
300 return this_sub
.compare_fields (other_sub
);
305 const setjmp_svalue
&this_sub
306 = (const setjmp_svalue
&)*this;
307 const setjmp_svalue
&other_sub
308 = (const setjmp_svalue
&)other
;
309 return this_sub
.compare_fields (other_sub
);
315 /* Generate a hash value for this svalue. Most of the work is done by the
316 add_to_hash vfunc. */
319 svalue::hash () const
321 inchash::hash hstate
;
323 hstate
.add_int (TYPE_UID (m_type
));
324 add_to_hash (hstate
);
325 return hstate
.end ();
328 /* Print this svalue and its ID to PP. */
331 svalue::print (const region_model
&model
,
333 pretty_printer
*pp
) const
336 pp_string (pp
, ": {");
340 gcc_assert (TYPE_P (m_type
));
341 pp_string (pp
, "type: ");
342 print_quoted_type (pp
, m_type
);
343 pp_string (pp
, ", ");
347 print_details (model
, this_sid
, pp
);
352 /* Dump this svalue in the form of a .dot record to PP. */
355 svalue::dump_dot_to_pp (const region_model
&model
,
357 pretty_printer
*pp
) const
359 this_sid
.dump_node_name_to_pp (pp
);
360 pp_printf (pp
, " [label=\"");
361 pp_write_text_to_stream (pp
);
363 pp_string (pp
, ": {");
364 print (model
, this_sid
, pp
);
365 pp_write_text_as_dot_label_to_stream (pp
, /*for_record=*/false);
366 pp_string (pp
, "}\"];");
370 /* Base implementation of svalue::remap_region_ids vfunc. */
373 svalue::remap_region_ids (const region_id_map
&)
378 /* Base implementation of svalue::walk_for_canonicalization vfunc. */
381 svalue::walk_for_canonicalization (canonicalization
*) const
386 /* Base implementation of svalue::get_child_sid vfunc. */
389 svalue::get_child_sid (region
*parent ATTRIBUTE_UNUSED
,
392 region_model_context
*ctxt ATTRIBUTE_UNUSED
)
394 svalue
*new_child_value
= clone ();
395 if (child
->get_type ())
396 new_child_value
->m_type
= child
->get_type ();
397 svalue_id new_child_sid
= model
.add_svalue (new_child_value
);
398 return new_child_sid
;
401 /* If this svalue is a constant_svalue, return the underlying tree constant.
402 Otherwise return NULL_TREE. */
405 svalue::maybe_get_constant () const
407 if (const constant_svalue
*cst_sval
= dyn_cast_constant_svalue ())
408 return cst_sval
->get_constant ();
413 /* class region_svalue : public svalue. */
415 /* Compare the fields of this region_svalue with OTHER, returning true
417 For use by svalue::operator==. */
420 region_svalue::compare_fields (const region_svalue
&other
) const
422 return m_rid
== other
.m_rid
;
425 /* Implementation of svalue::add_to_hash vfunc for region_svalue. */
428 region_svalue::add_to_hash (inchash::hash
&hstate
) const
430 inchash::add (m_rid
, hstate
);
433 /* Implementation of svalue::print_details vfunc for region_svalue. */
436 region_svalue::print_details (const region_model
&model ATTRIBUTE_UNUSED
,
437 svalue_id this_sid ATTRIBUTE_UNUSED
,
438 pretty_printer
*pp
) const
441 pp_string (pp
, "NULL");
449 /* Implementation of svalue::dump_dot_to_pp for region_svalue. */
452 region_svalue::dump_dot_to_pp (const region_model
&model
,
454 pretty_printer
*pp
) const
456 svalue::dump_dot_to_pp (model
, this_sid
, pp
);
458 /* If non-NULL, add an edge to the pointed-to region. */
459 if (!m_rid
.null_p ())
461 this_sid
.dump_node_name_to_pp (pp
);
462 pp_string (pp
, " -> ");
463 m_rid
.dump_node_name_to_pp (pp
);
469 /* Implementation of svalue::remap_region_ids vfunc for region_svalue. */
472 region_svalue::remap_region_ids (const region_id_map
&map
)
477 /* Merge REGION_SVAL_A and REGION_SVAL_B using MERGER, writing the result
481 region_svalue::merge_values (const region_svalue
®ion_sval_a
,
482 const region_svalue
®ion_sval_b
,
483 svalue_id
*merged_sid
,
485 model_merger
*merger
)
487 region_id a_rid
= region_sval_a
.get_pointee ();
488 region_id b_rid
= region_sval_b
.get_pointee ();
490 /* Both are non-NULL. */
491 gcc_assert (!a_rid
.null_p () && !b_rid
.null_p ());
493 /* Have these ptr-values already been merged? */
496 = merger
->m_map_regions_from_a_to_m
.get_dst_for_src (a_rid
);
498 = merger
->m_map_regions_from_b_to_m
.get_dst_for_src (b_rid
);
500 /* "null_p" here means "we haven't seen this ptr-value before".
501 If we've seen one but not the other, or we have different
502 regions, then the merged ptr has to be "unknown". */
503 if (a_rid_in_m
!= b_rid_in_m
)
505 svalue
*merged_sval
= new unknown_svalue (type
);
506 *merged_sid
= merger
->m_merged_model
->add_svalue (merged_sval
);
510 /* Have we seen this yet? If so, reuse the value. */
511 if (!a_rid_in_m
.null_p ())
514 = merger
->m_merged_model
->get_or_create_ptr_svalue (type
, a_rid_in_m
);
518 /* Otherwise we have A/B regions that haven't been referenced yet. */
520 /* Are the regions the "same", when seen from the tree point-of-view.
521 If so, create a merged pointer to it. */
522 path_var pv_a
= merger
->m_model_a
->get_representative_path_var (a_rid
);
523 path_var pv_b
= merger
->m_model_b
->get_representative_path_var (b_rid
);
527 region_id merged_pointee_rid
528 = merger
->m_merged_model
->get_lvalue (pv_a
, NULL
);
530 = merger
->m_merged_model
->get_or_create_ptr_svalue (type
,
532 merger
->record_regions (a_rid
, b_rid
, merged_pointee_rid
);
536 /* Handle an A/B pair of ptrs that both point at heap regions.
537 If they both have a heap region in the merger model, merge them. */
538 region
*region_a
= merger
->m_model_a
->get_region (a_rid
);
539 region
*region_b
= merger
->m_model_b
->get_region (b_rid
);
540 region_id a_parent_rid
= region_a
->get_parent ();
541 region_id b_parent_rid
= region_b
->get_parent ();
542 region
*parent_region_a
= merger
->m_model_a
->get_region (a_parent_rid
);
543 region
*parent_region_b
= merger
->m_model_b
->get_region (b_parent_rid
);
546 && parent_region_a
->get_kind () == RK_HEAP
547 && parent_region_b
->get_kind () == RK_HEAP
)
549 /* We have an A/B pair of ptrs that both point at heap regions. */
550 /* presumably we want to see if each A/B heap region already
551 has a merged region, and, if so, is it the same one. */
552 // This check is above
554 region_id merged_pointee_rid
555 = merger
->m_merged_model
->add_new_malloc_region ();
557 = merger
->m_merged_model
->get_or_create_ptr_svalue
558 (type
, merged_pointee_rid
);
559 merger
->record_regions (a_rid
, b_rid
, merged_pointee_rid
);
563 /* Two different non-NULL pointers? Merge to unknown. */
564 svalue
*merged_sval
= new unknown_svalue (type
);
565 *merged_sid
= merger
->m_merged_model
->add_svalue (merged_sval
);
569 /* Implementation of svalue::walk_for_canonicalization vfunc for
573 region_svalue::walk_for_canonicalization (canonicalization
*c
) const
578 /* Evaluate the condition LHS OP RHS.
579 Subroutine of region_model::eval_condition for when we have a pair of
583 region_svalue::eval_condition (region_svalue
*lhs
,
587 /* See if they point to the same region. */
588 /* TODO: what about child regions where the child is the first child
590 region_id lhs_rid
= lhs
->get_pointee ();
591 region_id rhs_rid
= rhs
->get_pointee ();
598 if (lhs_rid
== rhs_rid
)
599 return tristate::TS_TRUE
;
601 return tristate::TS_FALSE
;
605 if (lhs_rid
!= rhs_rid
)
606 return tristate::TS_TRUE
;
608 return tristate::TS_FALSE
;
613 if (lhs_rid
== rhs_rid
)
614 return tristate::TS_TRUE
;
619 if (lhs_rid
== rhs_rid
)
620 return tristate::TS_FALSE
;
624 return tristate::TS_UNKNOWN
;
627 /* class constant_svalue : public svalue. */
629 /* Compare the fields of this constant_svalue with OTHER, returning true
631 For use by svalue::operator==. */
634 constant_svalue::compare_fields (const constant_svalue
&other
) const
636 return m_cst_expr
== other
.m_cst_expr
;
639 /* Implementation of svalue::add_to_hash vfunc for constant_svalue. */
642 constant_svalue::add_to_hash (inchash::hash
&hstate
) const
644 inchash::add_expr (m_cst_expr
, hstate
);
647 /* Merge the CST_SVAL_A and CST_SVAL_B using MERGER, writing the id of
648 the resulting svalue into *MERGED_SID. */
651 constant_svalue::merge_values (const constant_svalue
&cst_sval_a
,
652 const constant_svalue
&cst_sval_b
,
653 svalue_id
*merged_sid
,
654 model_merger
*merger
)
656 tree cst_a
= cst_sval_a
.get_constant ();
657 tree cst_b
= cst_sval_b
.get_constant ();
661 /* If they are the same constant, merge as that constant value. */
662 merged_sval
= new constant_svalue (cst_a
);
666 /* Otherwise, we have two different constant values.
667 Merge as an unknown value.
668 TODO: impose constraints on the value?
669 (maybe just based on A, to avoid infinite chains) */
670 merged_sval
= new unknown_svalue (TREE_TYPE (cst_a
));
672 *merged_sid
= merger
->m_merged_model
->add_svalue (merged_sval
);
675 /* Evaluate the condition LHS OP RHS.
676 Subroutine of region_model::eval_condition for when we have a pair of
680 constant_svalue::eval_condition (constant_svalue
*lhs
,
682 constant_svalue
*rhs
)
684 tree lhs_const
= lhs
->get_constant ();
685 tree rhs_const
= rhs
->get_constant ();
687 gcc_assert (CONSTANT_CLASS_P (lhs_const
));
688 gcc_assert (CONSTANT_CLASS_P (rhs_const
));
690 /* Check for comparable types. */
691 if (types_compatible_p (TREE_TYPE (lhs_const
), TREE_TYPE (rhs_const
)))
694 = fold_binary (op
, boolean_type_node
, lhs_const
, rhs_const
);
695 if (comparison
== boolean_true_node
)
696 return tristate (tristate::TS_TRUE
);
697 if (comparison
== boolean_false_node
)
698 return tristate (tristate::TS_FALSE
);
700 return tristate::TS_UNKNOWN
;
703 /* Implementation of svalue::print_details vfunc for constant_svalue. */
706 constant_svalue::print_details (const region_model
&model ATTRIBUTE_UNUSED
,
707 svalue_id this_sid ATTRIBUTE_UNUSED
,
708 pretty_printer
*pp
) const
710 pp_printf (pp
, "%qE", m_cst_expr
);
713 /* Implementation of svalue::get_child_sid vfunc for constant_svalue. */
716 constant_svalue::get_child_sid (region
*parent ATTRIBUTE_UNUSED
,
719 region_model_context
*ctxt ATTRIBUTE_UNUSED
)
721 /* TODO: handle the all-zeroes case by returning an all-zeroes of the
724 /* Otherwise, we don't have a good way to get a child value out of a
727 Handle this case by using an unknown value. */
728 svalue
*unknown_sval
= new unknown_svalue (child
->get_type ());
729 return model
.add_svalue (unknown_sval
);
732 /* class unknown_svalue : public svalue. */
734 /* Compare the fields of this unknown_svalue with OTHER, returning true
736 For use by svalue::operator==. */
739 unknown_svalue::compare_fields (const unknown_svalue
&) const
741 /* I *think* we want to return true here, in that when comparing
742 two region models, we want two peer unknown_svalue instances
747 /* Implementation of svalue::add_to_hash vfunc for unknown_svalue. */
750 unknown_svalue::add_to_hash (inchash::hash
&) const
755 /* Implementation of svalue::print_details vfunc for unknown_svalue. */
758 unknown_svalue::print_details (const region_model
&model ATTRIBUTE_UNUSED
,
759 svalue_id this_sid ATTRIBUTE_UNUSED
,
760 pretty_printer
*pp
) const
762 pp_string (pp
, "unknown");
765 /* Get a string for KIND for use in debug dumps. */
768 poison_kind_to_str (enum poison_kind kind
)
774 case POISON_KIND_UNINIT
:
776 case POISON_KIND_FREED
:
778 case POISON_KIND_POPPED_STACK
:
779 return "popped stack";
783 /* class poisoned_svalue : public svalue. */
785 /* Compare the fields of this poisoned_svalue with OTHER, returning true
787 For use by svalue::operator==. */
790 poisoned_svalue::compare_fields (const poisoned_svalue
&other
) const
792 return m_kind
== other
.m_kind
;
795 /* Implementation of svalue::add_to_hash vfunc for poisoned_svalue. */
798 poisoned_svalue::add_to_hash (inchash::hash
&hstate
) const
800 hstate
.add_int (m_kind
);
803 /* Implementation of svalue::print_details vfunc for poisoned_svalue. */
806 poisoned_svalue::print_details (const region_model
&model ATTRIBUTE_UNUSED
,
807 svalue_id this_sid ATTRIBUTE_UNUSED
,
808 pretty_printer
*pp
) const
810 pp_printf (pp
, "poisoned: %s", poison_kind_to_str (m_kind
));
813 /* class setjmp_svalue's implementation is in engine.cc, so that it can use
814 the declaration of exploded_node. */
816 /* class region and its various subclasses. */
818 /* Get a string for KIND for use in debug dumps. */
821 region_kind_to_str (enum region_kind kind
)
856 /* Equality operator for region.
857 After comparing base class fields and kind, the rest of the
858 comparison is handled off to a "compare_fields" member function
859 specific to the appropriate subclass. */
862 region::operator== (const region
&other
) const
864 if (m_parent_rid
!= other
.m_parent_rid
)
866 if (m_sval_id
!= other
.m_sval_id
)
868 if (m_type
!= other
.m_type
)
871 enum region_kind this_kind
= get_kind ();
872 enum region_kind other_kind
= other
.get_kind ();
873 if (this_kind
!= other_kind
)
877 if (m_view_rids
.length () != other
.m_view_rids
.length ())
881 FOR_EACH_VEC_ELT (m_view_rids
, i
, rid
)
882 if (! (*rid
== other
.m_view_rids
[i
]))
894 const primitive_region
&this_sub
895 = (const primitive_region
&)*this;
896 const primitive_region
&other_sub
897 = (const primitive_region
&)other
;
898 return this_sub
.compare_fields (other_sub
);
903 const struct_region
&this_sub
904 = (const struct_region
&)*this;
905 const struct_region
&other_sub
906 = (const struct_region
&)other
;
907 return this_sub
.compare_fields (other_sub
);
911 const union_region
&this_sub
912 = (const union_region
&)*this;
913 const union_region
&other_sub
914 = (const union_region
&)other
;
915 return this_sub
.compare_fields (other_sub
);
919 const array_region
&this_sub
920 = (const array_region
&)*this;
921 const array_region
&other_sub
922 = (const array_region
&)other
;
923 return this_sub
.compare_fields (other_sub
);
927 const frame_region
&this_sub
928 = (const frame_region
&)*this;
929 const frame_region
&other_sub
930 = (const frame_region
&)other
;
931 return this_sub
.compare_fields (other_sub
);
935 const globals_region
&this_sub
936 = (const globals_region
&)*this;
937 const globals_region
&other_sub
938 = (const globals_region
&)other
;
939 return this_sub
.compare_fields (other_sub
);
943 const code_region
&this_sub
944 = (const code_region
&)*this;
945 const code_region
&other_sub
946 = (const code_region
&)other
;
947 return this_sub
.compare_fields (other_sub
);
951 const function_region
&this_sub
952 = (const function_region
&)*this;
953 const function_region
&other_sub
954 = (const function_region
&)other
;
955 return this_sub
.compare_fields (other_sub
);
959 const stack_region
&this_sub
960 = (const stack_region
&)*this;
961 const stack_region
&other_sub
962 = (const stack_region
&)other
;
963 return this_sub
.compare_fields (other_sub
);
967 const root_region
&this_sub
968 = (const root_region
&)*this;
969 const root_region
&other_sub
970 = (const root_region
&)other
;
971 return this_sub
.compare_fields (other_sub
);
975 const symbolic_region
&this_sub
976 = (const symbolic_region
&)*this;
977 const symbolic_region
&other_sub
978 = (const symbolic_region
&)other
;
979 return this_sub
.compare_fields (other_sub
);
983 const heap_region
&this_sub
984 = (const heap_region
&)*this;
985 const heap_region
&other_sub
986 = (const heap_region
&)other
;
987 return this_sub
.compare_fields (other_sub
);
992 /* Get the parent region of this region. */
995 region::get_parent_region (const region_model
&model
) const
997 return model
.get_region (m_parent_rid
);
1000 /* Set this region's value to RHS_SID (or potentially a variant of it,
1001 for some kinds of casts). */
1004 region::set_value (region_model
&model
, region_id this_rid
, svalue_id rhs_sid
,
1005 region_model_context
*ctxt
)
1007 /* Handle some kinds of casting. */
1010 svalue
*sval
= model
.get_svalue (rhs_sid
);
1011 if (sval
->get_type ())
1012 rhs_sid
= model
.maybe_cast (m_type
, rhs_sid
, ctxt
);
1014 sval
= model
.get_svalue (rhs_sid
);
1015 if (sval
->get_type ())
1016 gcc_assert (m_type
== sval
->get_type ());
1019 m_sval_id
= rhs_sid
;
1022 If this is a view, it becomes its parent's active view.
1023 If there was already an active views, invalidate its value; otherwise
1024 if the parent itself had a value, invalidate it.
1025 If it's not a view, then deactivate any view that is active on this
1029 become_active_view (model
, this_rid
);
1032 deactivate_any_active_view (model
);
1033 gcc_assert (m_active_view_rid
.null_p ());
1038 /* Make this region (with id THIS_RID) the "active" view of its parent.
1039 Any other active view has its value set to "unknown" and descendent values
1041 If there wasn't an active view, then set the parent's value to unknown, and
1042 clear its descendent values (apart from this view). */
1045 region::become_active_view (region_model
&model
, region_id this_rid
)
1047 gcc_assert (m_is_view
);
1049 region
*parent_reg
= model
.get_region (m_parent_rid
);
1050 gcc_assert (parent_reg
);
1052 region_id old_active_view_rid
= parent_reg
->m_active_view_rid
;
1054 if (old_active_view_rid
== this_rid
)
1056 /* Already the active view: do nothing. */
1060 /* We have a change of active view. */
1061 parent_reg
->m_active_view_rid
= this_rid
;
1063 if (old_active_view_rid
.null_p ())
1065 /* No previous active view, but the parent and its other children
1067 If so, invalidate those values - but not that of the new view. */
1068 region_id_set
below_region (&model
);
1069 model
.get_descendents (m_parent_rid
, &below_region
, this_rid
);
1070 for (unsigned i
= 0; i
< model
.get_num_regions (); i
++)
1072 region_id
rid (region_id::from_int (i
));
1073 if (below_region
.region_p (rid
))
1075 region
*other_reg
= model
.get_region (rid
);
1076 other_reg
->m_sval_id
= svalue_id::null ();
1079 region
*parent
= model
.get_region (m_parent_rid
);
1081 = model
.add_svalue (new unknown_svalue (parent
->get_type ()));
1085 /* If there was an active view, invalidate it. */
1086 region
*old_active_view
= model
.get_region (old_active_view_rid
);
1087 old_active_view
->deactivate_view (model
, old_active_view_rid
);
1091 /* If this region (with id THIS_RID) has an active view, deactivate it,
1092 clearing m_active_view_rid. */
1095 region::deactivate_any_active_view (region_model
&model
)
1097 if (m_active_view_rid
.null_p ())
1099 region
*view
= model
.get_region (m_active_view_rid
);
1100 view
->deactivate_view (model
, m_active_view_rid
);
1101 m_active_view_rid
= region_id::null ();
1104 /* Clear any values for regions below THIS_RID.
1105 Set the view's value to unknown. */
1108 region::deactivate_view (region_model
&model
, region_id this_view_rid
)
1110 gcc_assert (is_view_p ());
1112 /* Purge values from old_active_this_view_rid and all its
1113 descendents. Potentially we could use a poison value
1114 for this, but let's use unknown for now. */
1115 region_id_set
below_view (&model
);
1116 model
.get_descendents (this_view_rid
, &below_view
, region_id::null ());
1118 for (unsigned i
= 0; i
< model
.get_num_regions (); i
++)
1120 region_id
rid (region_id::from_int (i
));
1121 if (below_view
.region_p (rid
))
1123 region
*other_reg
= model
.get_region (rid
);
1124 other_reg
->m_sval_id
= svalue_id::null ();
1128 m_sval_id
= model
.add_svalue (new unknown_svalue (get_type ()));
1131 /* Get a value for this region, either its value if it has one,
1132 or, failing that, "inherit" a value from first ancestor with a
1135 For example, when getting the value for a local variable within
1136 a stack frame that doesn't have one, the frame doesn't have a value
1137 either, but the stack as a whole will have an "uninitialized" poison
1138 value, so inherit that. */
1141 region::get_value (region_model
&model
, bool non_null
,
1142 region_model_context
*ctxt
)
1144 /* If this region has a value, use it. */
1145 if (!m_sval_id
.null_p ())
1148 /* Otherwise, "inherit" value from first ancestor with a
1151 region
*parent
= model
.get_region (m_parent_rid
);
1154 svalue_id inherited_sid
1155 = parent
->get_inherited_child_sid (this, model
, ctxt
);
1156 if (!inherited_sid
.null_p ())
1157 return inherited_sid
;
1160 /* If a non-null value has been requested, then generate
1161 a new unknown value. Store it, so that repeated reads from this
1162 region will yield the same unknown value. */
1165 svalue_id unknown_sid
= model
.add_svalue (new unknown_svalue (m_type
));
1166 m_sval_id
= unknown_sid
;
1170 return svalue_id::null ();
1173 /* Get a value for CHILD, inheriting from this region.
1175 Recurse, so this region will inherit a value if it doesn't already
1179 region::get_inherited_child_sid (region
*child
,
1180 region_model
&model
,
1181 region_model_context
*ctxt
)
1183 if (m_sval_id
.null_p ())
1186 if (!m_parent_rid
.null_p ())
1188 region
*parent
= model
.get_region (m_parent_rid
);
1189 m_sval_id
= parent
->get_inherited_child_sid (this, model
, ctxt
);
1193 if (!m_sval_id
.null_p ())
1195 /* Clone the parent's value, so that attempts to update it
1196 (e.g giving a specific value to an inherited "uninitialized"
1197 value) touch the child, and not the parent. */
1198 svalue
*this_value
= model
.get_svalue (m_sval_id
);
1199 svalue_id new_child_sid
1200 = this_value
->get_child_sid (this, child
, model
, ctxt
);
1202 ctxt
->on_inherited_svalue (m_sval_id
, new_child_sid
);
1203 child
->m_sval_id
= new_child_sid
;
1204 return new_child_sid
;
1207 return svalue_id::null ();
1210 /* Generate a hash value for this region. The work is done by the
1211 add_to_hash vfunc. */
1214 region::hash () const
1216 inchash::hash hstate
;
1217 add_to_hash (hstate
);
1218 return hstate
.end ();
1221 /* Print a one-liner representation of this region to PP, assuming
1222 that this region is within MODEL and its id is THIS_RID. */
1225 region::print (const region_model
&model
,
1227 pretty_printer
*pp
) const
1229 this_rid
.print (pp
);
1230 pp_string (pp
, ": {");
1233 print_fields (model
, this_rid
, pp
);
1235 pp_string (pp
, "}");
1238 /* Base class implementation of region::dump_dot_to_pp vfunc. */
1241 region::dump_dot_to_pp (const region_model
&model
,
1243 pretty_printer
*pp
) const
1245 this_rid
.dump_node_name_to_pp (pp
);
1246 pp_printf (pp
, " [shape=none,margin=0,style=filled,fillcolor=%s,label=\"",
1248 pp_write_text_to_stream (pp
);
1249 print (model
, this_rid
, pp
);
1250 pp_write_text_as_dot_label_to_stream (pp
, /*for_record=*/false);
1251 pp_string (pp
, "\"];");
1254 /* Add edge to svalue. */
1255 if (!m_sval_id
.null_p ())
1257 this_rid
.dump_node_name_to_pp (pp
);
1258 pp_string (pp
, " -> ");
1259 m_sval_id
.dump_node_name_to_pp (pp
);
1260 pp_string (pp
, ";");
1264 /* Add edge to parent. */
1265 if (!m_parent_rid
.null_p ())
1267 this_rid
.dump_node_name_to_pp (pp
);
1268 pp_string (pp
, " -> ");
1269 m_parent_rid
.dump_node_name_to_pp (pp
);
1270 pp_string (pp
, ";");
1275 /* Dump a tree-like ASCII-art representation of this region to PP. */
1278 region::dump_to_pp (const region_model
&model
,
1282 bool is_last_child
) const
1284 print (model
, this_rid
, pp
);
1287 const char *new_prefix
;
1288 if (!m_parent_rid
.null_p ())
1289 new_prefix
= ACONCAT ((prefix
, is_last_child
? " " : "| ", NULL
));
1291 new_prefix
= prefix
;
1293 const char *begin_color
= colorize_start (pp_show_color (pp
), "note");
1294 const char *end_color
= colorize_stop (pp_show_color (pp
));
1296 = ACONCAT ((begin_color
, new_prefix
, "|:", end_color
, NULL
));
1298 if (!m_sval_id
.null_p ())
1300 pp_printf (pp
, "%s sval: ", field_prefix
);
1301 model
.get_svalue (m_sval_id
)->print (model
, m_sval_id
, pp
);
1306 pp_printf (pp
, "%s type: ", field_prefix
);
1307 print_quoted_type (pp
, m_type
);
1311 /* Find the children. */
1313 auto_vec
<region_id
> child_rids
;
1315 for (unsigned i
= 0; i
< model
.get_num_regions (); ++i
)
1317 region_id rid
= region_id::from_int (i
);
1318 region
*child
= model
.get_region (rid
);
1319 if (child
->m_parent_rid
== this_rid
)
1320 child_rids
.safe_push (rid
);
1323 /* Print the children, using dump_child_label to label them. */
1325 region_id
*child_rid
;
1326 FOR_EACH_VEC_ELT (child_rids
, i
, child_rid
)
1328 is_last_child
= (i
== child_rids
.length () - 1);
1329 if (!this_rid
.null_p ())
1331 const char *tail
= is_last_child
? "`-" : "|-";
1332 pp_printf (pp
, "%r%s%s%R", "note", new_prefix
, tail
);
1334 dump_child_label (model
, this_rid
, *child_rid
, pp
);
1335 model
.get_region (*child_rid
)->dump_to_pp (model
, *child_rid
, pp
,
1341 /* Base implementation of region::dump_child_label vfunc. */
1344 region::dump_child_label (const region_model
&model
,
1345 region_id this_rid ATTRIBUTE_UNUSED
,
1346 region_id child_rid
,
1347 pretty_printer
*pp
) const
1349 region
*child
= model
.get_region (child_rid
);
1350 if (child
->m_is_view
)
1352 gcc_assert (TYPE_P (child
->get_type ()));
1353 if (m_active_view_rid
== child_rid
)
1354 pp_string (pp
, "active ");
1356 pp_string (pp
, "inactive ");
1357 pp_string (pp
, "view as ");
1358 print_quoted_type (pp
, child
->get_type ());
1359 pp_string (pp
, ": ");
1363 /* Assert that this object is valid. */
1366 region::validate (const region_model
*model
) const
1368 m_parent_rid
.validate (*model
);
1369 m_sval_id
.validate (*model
);
1371 region_id
*view_rid
;
1372 FOR_EACH_VEC_ELT (m_view_rids
, i
, view_rid
)
1374 gcc_assert (!view_rid
->null_p ());
1375 view_rid
->validate (*model
);
1377 m_active_view_rid
.validate (*model
);
1380 /* Apply MAP to svalue_ids to this region. This updates the value
1381 for the region (if any). */
1384 region::remap_svalue_ids (const svalue_id_map
&map
)
1386 map
.update (&m_sval_id
);
1389 /* Base implementation of region::remap_region_ids vfunc; subclasses should
1390 chain up to this, updating any region_id data. */
1393 region::remap_region_ids (const region_id_map
&map
)
1395 map
.update (&m_parent_rid
);
1397 region_id
*view_rid
;
1398 FOR_EACH_VEC_ELT (m_view_rids
, i
, view_rid
)
1399 map
.update (view_rid
);
1400 map
.update (&m_active_view_rid
);
1403 /* Add a new region with id VIEW_RID as a view of this region. */
1406 region::add_view (region_id view_rid
, region_model
*model
)
1408 gcc_assert (!view_rid
.null_p ());
1409 region
*new_view
= model
->get_region (view_rid
);
1410 new_view
->m_is_view
= true;
1411 gcc_assert (!new_view
->m_parent_rid
.null_p ());
1412 gcc_assert (new_view
->m_sval_id
.null_p ());
1414 //gcc_assert (new_view->get_type () != NULL_TREE);
1415 // TODO: this can sometimes be NULL, when viewing through a (void *)
1417 // TODO: the type ought to not be present yet
1419 m_view_rids
.safe_push (view_rid
);
1422 /* Look for a view of type TYPE of this region, returning its id if found,
1423 or null otherwise. */
1426 region::get_view (tree type
, region_model
*model
) const
1429 region_id
*view_rid
;
1430 FOR_EACH_VEC_ELT (m_view_rids
, i
, view_rid
)
1432 region
*view
= model
->get_region (*view_rid
);
1433 gcc_assert (view
->m_is_view
);
1434 if (view
->get_type () == type
)
1437 return region_id::null ();
1440 /* region's ctor. */
1442 region::region (region_id parent_rid
, svalue_id sval_id
, tree type
)
1443 : m_parent_rid (parent_rid
), m_sval_id (sval_id
), m_type (type
),
1444 m_view_rids (), m_is_view (false), m_active_view_rid (region_id::null ())
1446 gcc_assert (type
== NULL_TREE
|| TYPE_P (type
));
1449 /* region's copy ctor. */
1451 region::region (const region
&other
)
1452 : m_parent_rid (other
.m_parent_rid
), m_sval_id (other
.m_sval_id
),
1453 m_type (other
.m_type
), m_view_rids (other
.m_view_rids
.length ()),
1454 m_is_view (other
.m_is_view
), m_active_view_rid (other
.m_active_view_rid
)
1458 FOR_EACH_VEC_ELT (other
.m_view_rids
, i
, rid
)
1459 m_view_rids
.quick_push (*rid
);
1462 /* Base implementation of region::add_to_hash vfunc; subclasses should
1463 chain up to this. */
1466 region::add_to_hash (inchash::hash
&hstate
) const
1468 inchash::add (m_parent_rid
, hstate
);
1469 inchash::add (m_sval_id
, hstate
);
1470 hstate
.add_ptr (m_type
);
1474 /* Base implementation of region::print_fields vfunc. */
1477 region::print_fields (const region_model
&model ATTRIBUTE_UNUSED
,
1478 region_id this_rid ATTRIBUTE_UNUSED
,
1479 pretty_printer
*pp
) const
1481 pp_printf (pp
, "kind: %qs", region_kind_to_str (get_kind ()));
1483 pp_string (pp
, ", parent: ");
1484 m_parent_rid
.print (pp
);
1486 pp_printf (pp
, ", sval: ");
1487 m_sval_id
.print (pp
);
1491 pp_printf (pp
, ", type: ");
1492 print_quoted_type (pp
, m_type
);
1496 /* Determine if a pointer to this region must be non-NULL.
1498 Generally, pointers to regions must be non-NULL, but pointers
1499 to symbolic_regions might, in fact, be NULL.
1501 This allows us to simulate functions like malloc and calloc with:
1502 - only one "outcome" from each statement,
1503 - the idea that the pointer is on the heap if non-NULL
1504 - the possibility that the pointer could be NULL
1505 - the idea that successive values returned from malloc are non-equal
1506 - to be able to zero-fill for calloc. */
1509 region::non_null_p (const region_model
&model
) const
1511 /* Look through views to get at the underlying region. */
1513 return model
.get_region (m_parent_rid
)->non_null_p (model
);
1515 /* Are we within a symbolic_region? If so, it could be NULL. */
1516 if (const symbolic_region
*sym_reg
= dyn_cast_symbolic_region ())
1518 if (sym_reg
->m_possibly_null
)
1525 /* class primitive_region : public region. */
1527 /* Implementation of region::clone vfunc for primitive_region. */
1530 primitive_region::clone () const
1532 return new primitive_region (*this);
1535 /* Implementation of region::walk_for_canonicalization vfunc for
1536 primitive_region. */
1539 primitive_region::walk_for_canonicalization (canonicalization
*) const
1544 /* class map_region : public region. */
1546 /* map_region's copy ctor. */
1548 map_region::map_region (const map_region
&other
)
1554 /* Compare the fields of this map_region with OTHER, returning true
1556 For use by region::operator==. */
1559 map_region::compare_fields (const map_region
&other
) const
1561 if (m_map
.elements () != other
.m_map
.elements ())
1564 for (map_t::iterator iter
= m_map
.begin ();
1565 iter
!= m_map
.end ();
1568 tree key
= (*iter
).first
;
1569 region_id e
= (*iter
).second
;
1570 region_id
*other_slot
= const_cast <map_t
&> (other
.m_map
).get (key
);
1571 if (other_slot
== NULL
)
1573 if (e
!= *other_slot
)
1579 /* Implementation of region::print_fields vfunc for map_region. */
1582 map_region::print_fields (const region_model
&model
,
1584 pretty_printer
*pp
) const
1586 region::print_fields (model
, this_rid
, pp
);
1587 pp_string (pp
, ", map: {");
1588 for (map_t::iterator iter
= m_map
.begin ();
1589 iter
!= m_map
.end ();
1592 if (iter
!= m_map
.begin ())
1593 pp_string (pp
, ", ");
1594 tree expr
= (*iter
).first
;
1595 region_id child_rid
= (*iter
).second
;
1596 pp_printf (pp
, "%qE: ", expr
);
1597 child_rid
.print (pp
);
1599 pp_string (pp
, "}");
1602 /* Implementation of region::dump_dot_to_pp vfunc for map_region. */
1605 map_region::dump_dot_to_pp (const region_model
&model
,
1607 pretty_printer
*pp
) const
1609 region::dump_dot_to_pp (model
, this_rid
, pp
);
1610 for (map_t::iterator iter
= m_map
.begin ();
1611 iter
!= m_map
.end ();
1614 // TODO: add nodes/edges to label things
1616 tree expr
= (*iter
).first
;
1617 region_id child_rid
= (*iter
).second
;
1619 pp_printf (pp
, "rid_label_%i [label=\"", child_rid
.as_int ());
1620 pp_write_text_to_stream (pp
);
1621 pp_printf (pp
, "%qE", expr
);
1622 pp_write_text_as_dot_label_to_stream (pp
, /*for_record=*/false);
1623 pp_string (pp
, "\"];");
1626 pp_printf (pp
, "rid_label_%i", child_rid
.as_int ());
1627 pp_string (pp
, " -> ");
1628 child_rid
.dump_node_name_to_pp (pp
);
1629 pp_string (pp
, ";");
1634 /* Implementation of region::dump_child_label vfunc for map_region. */
1637 map_region::dump_child_label (const region_model
&model
,
1639 region_id child_rid
,
1640 pretty_printer
*pp
) const
1642 region::dump_child_label (model
, this_rid
, child_rid
, pp
);
1644 for (map_t::iterator iter
= m_map
.begin ();
1645 iter
!= m_map
.end ();
1648 if (child_rid
== (*iter
).second
)
1650 tree key
= (*iter
).first
;
1652 pp_printf (pp
, "%qD: ", key
);
1654 pp_printf (pp
, "%qE: ", key
);
1659 /* Look for a child region for KEY within this map_region.
1660 If it doesn't already exist, create a child map_region, using TYPE for
1662 Return the region_id of the child (whether pre-existing, or
1666 map_region::get_or_create (region_model
*model
,
1672 gcc_assert (valid_key_p (key
));
1673 region_id
*slot
= m_map
.get (key
);
1676 region_id child_rid
= model
->add_region_for_type (this_rid
, type
);
1677 m_map
.put (key
, child_rid
);
1681 /* Get the region_id for the child region for KEY within this
1682 MAP_REGION, or NULL if there is no such child region. */
1685 map_region::get (tree key
)
1688 gcc_assert (valid_key_p (key
));
1689 region_id
*slot
= m_map
.get (key
);
1693 /* Implementation of region::add_to_hash vfunc for map_region. */
1696 map_region::add_to_hash (inchash::hash
&hstate
) const
1698 region::add_to_hash (hstate
);
1702 /* Implementation of region::remap_region_ids vfunc for map_region. */
1705 map_region::remap_region_ids (const region_id_map
&map
)
1707 region::remap_region_ids (map
);
1709 /* Remap the region ids within the map entries. */
1710 for (map_t::iterator iter
= m_map
.begin ();
1711 iter
!= m_map
.end (); ++iter
)
1712 map
.update (&(*iter
).second
);
1715 /* Remove the binding of KEY to its child region (but not the
1716 child region itself).
1717 For use when purging unneeded SSA names. */
1720 map_region::unbind (tree key
)
1723 gcc_assert (valid_key_p (key
));
1727 /* Look for a child region with id CHILD_RID within this map_region.
1728 If one is found, return its tree key, otherwise return NULL_TREE. */
1731 map_region::get_tree_for_child_region (region_id child_rid
) const
1733 // TODO: do we want to store an inverse map?
1734 for (map_t::iterator iter
= m_map
.begin ();
1735 iter
!= m_map
.end ();
1738 tree key
= (*iter
).first
;
1739 region_id r
= (*iter
).second
;
1747 /* Look for a child region CHILD within this map_region.
1748 If one is found, return its tree key, otherwise return NULL_TREE. */
1751 map_region::get_tree_for_child_region (region
*child
,
1752 const region_model
&model
) const
1754 // TODO: do we want to store an inverse map?
1755 for (map_t::iterator iter
= m_map
.begin ();
1756 iter
!= m_map
.end ();
1759 tree key
= (*iter
).first
;
1760 region_id r
= (*iter
).second
;
1761 if (model
.get_region (r
) == child
)
1768 /* Comparator for trees to impose a deterministic ordering on
1772 tree_cmp (const_tree t1
, const_tree t2
)
1777 /* Test tree codes first. */
1778 if (TREE_CODE (t1
) != TREE_CODE (t2
))
1779 return TREE_CODE (t1
) - TREE_CODE (t2
);
1781 /* From this point on, we know T1 and T2 have the same tree code. */
1785 if (DECL_NAME (t1
) && DECL_NAME (t2
))
1786 return strcmp (IDENTIFIER_POINTER (DECL_NAME (t1
)),
1787 IDENTIFIER_POINTER (DECL_NAME (t2
)));
1792 else if (DECL_NAME (t2
))
1795 return DECL_UID (t1
) - DECL_UID (t2
);
1799 switch (TREE_CODE (t1
))
1803 if (SSA_NAME_VAR (t1
) && SSA_NAME_VAR (t2
))
1805 int var_cmp
= tree_cmp (SSA_NAME_VAR (t1
), SSA_NAME_VAR (t2
));
1808 return SSA_NAME_VERSION (t1
) - SSA_NAME_VERSION (t2
);
1812 if (SSA_NAME_VAR (t1
))
1814 else if (SSA_NAME_VAR (t2
))
1817 return SSA_NAME_VERSION (t1
) - SSA_NAME_VERSION (t2
);
1823 return tree_int_cst_compare (t1
, t2
);
1827 const real_value
*rv1
= TREE_REAL_CST_PTR (t1
);
1828 const real_value
*rv2
= TREE_REAL_CST_PTR (t2
);
1829 if (real_compare (UNORDERED_EXPR
, rv1
, rv2
))
1831 /* Impose an arbitrary order on NaNs relative to other NaNs
1833 if (int cmp_isnan
= real_isnan (rv1
) - real_isnan (rv2
))
1835 if (int cmp_issignaling_nan
1836 = real_issignaling_nan (rv1
) - real_issignaling_nan (rv2
))
1837 return cmp_issignaling_nan
;
1838 return real_isneg (rv1
) - real_isneg (rv2
);
1840 if (real_compare (LT_EXPR
, rv1
, rv2
))
1842 if (real_compare (GT_EXPR
, rv1
, rv2
))
1848 return strcmp (TREE_STRING_POINTER (t1
),
1849 TREE_STRING_POINTER (t2
));
1861 /* qsort comparator for trees to impose a deterministic ordering on
1865 tree_cmp (const void *p1
, const void *p2
)
1867 const_tree t1
= *(const_tree
const *)p1
;
1868 const_tree t2
= *(const_tree
const *)p2
;
1870 return tree_cmp (t1
, t2
);
1873 /* Attempt to merge MAP_REGION_A and MAP_REGION_B into MERGED_MAP_REGION,
1874 which has region_id MERGED_RID, using MERGER.
1875 Return true if the merger is possible, false otherwise. */
1878 map_region::can_merge_p (const map_region
*map_region_a
,
1879 const map_region
*map_region_b
,
1880 map_region
*merged_map_region
,
1881 region_id merged_rid
,
1882 model_merger
*merger
)
1884 for (map_t::iterator iter
= map_region_a
->m_map
.begin ();
1885 iter
!= map_region_a
->m_map
.end ();
1888 tree key_a
= (*iter
).first
;
1889 region_id rid_a
= (*iter
).second
;
1891 if (const region_id
*slot_b
1892 = const_cast<map_region
*>(map_region_b
)->m_map
.get (key_a
))
1894 region_id rid_b
= *slot_b
;
1896 region
*child_region_a
= merger
->get_region_a
<region
> (rid_a
);
1897 region
*child_region_b
= merger
->get_region_b
<region
> (rid_b
);
1899 gcc_assert (child_region_a
->get_type ()
1900 == child_region_b
->get_type ());
1902 gcc_assert (child_region_a
->get_kind ()
1903 == child_region_b
->get_kind ());
1905 region_id child_merged_rid
1906 = merged_map_region
->get_or_create (merger
->m_merged_model
,
1909 child_region_a
->get_type ());
1911 region
*child_merged_region
1912 = merger
->m_merged_model
->get_region (child_merged_rid
);
1914 /* Consider values. */
1915 svalue_id child_a_sid
= child_region_a
->get_value_direct ();
1916 svalue_id child_b_sid
= child_region_b
->get_value_direct ();
1917 svalue_id child_merged_sid
;
1918 if (!merger
->can_merge_values_p (child_a_sid
, child_b_sid
,
1921 if (!child_merged_sid
.null_p ())
1922 child_merged_region
->set_value (*merger
->m_merged_model
,
1927 if (map_region
*map_region_a
= child_region_a
->dyn_cast_map_region ())
1930 if (!can_merge_p (map_region_a
,
1931 as_a
<map_region
*> (child_region_b
),
1932 as_a
<map_region
*> (child_merged_region
),
1941 /* TODO: region is present in A, but absent in B. */
1945 /* TODO: check for keys in B that aren't in A. */
1951 /* Implementation of region::walk_for_canonicalization vfunc for
1955 map_region::walk_for_canonicalization (canonicalization
*c
) const
1957 auto_vec
<tree
> keys (m_map
.elements ());
1958 for (map_t::iterator iter
= m_map
.begin ();
1959 iter
!= m_map
.end ();
1962 tree key_a
= (*iter
).first
;
1963 keys
.quick_push (key_a
);
1965 keys
.qsort (tree_cmp
);
1969 FOR_EACH_VEC_ELT (keys
, i
, key
)
1971 region_id rid
= *const_cast<map_region
*>(this)->m_map
.get (key
);
1976 /* For debugging purposes: look for a child region for a decl named
1977 IDENTIFIER (or an SSA_NAME for such a decl), returning its value,
1978 or svalue_id::null if none are found. */
1981 map_region::get_value_by_name (tree identifier
,
1982 const region_model
&model
) const
1984 for (map_t::iterator iter
= m_map
.begin ();
1985 iter
!= m_map
.end ();
1988 tree key
= (*iter
).first
;
1989 if (TREE_CODE (key
) == SSA_NAME
)
1990 if (SSA_NAME_VAR (key
))
1991 key
= SSA_NAME_VAR (key
);
1993 if (DECL_NAME (key
) == identifier
)
1995 region_id rid
= (*iter
).second
;
1996 region
*region
= model
.get_region (rid
);
1997 return region
->get_value (const_cast<region_model
&>(model
),
2001 return svalue_id::null ();
2004 /* class struct_or_union_region : public map_region. */
2006 /* Implementation of map_region::valid_key_p vfunc for
2007 struct_or_union_region. */
2010 struct_or_union_region::valid_key_p (tree key
) const
2012 return TREE_CODE (key
) == FIELD_DECL
;
2015 /* Compare the fields of this struct_or_union_region with OTHER, returning
2016 true if they are equal.
2017 For use by region::operator==. */
2020 struct_or_union_region::compare_fields (const struct_or_union_region
&other
)
2023 return map_region::compare_fields (other
);
2026 /* class struct_region : public struct_or_union_region. */
2028 /* Implementation of region::clone vfunc for struct_region. */
2031 struct_region::clone () const
2033 return new struct_region (*this);
2036 /* Compare the fields of this struct_region with OTHER, returning true
2038 For use by region::operator==. */
2041 struct_region::compare_fields (const struct_region
&other
) const
2043 return struct_or_union_region::compare_fields (other
);
2046 /* class union_region : public struct_or_union_region. */
2048 /* Implementation of region::clone vfunc for union_region. */
2051 union_region::clone () const
2053 return new union_region (*this);
2056 /* Compare the fields of this union_region with OTHER, returning true
2058 For use by region::operator==. */
2061 union_region::compare_fields (const union_region
&other
) const
2063 return struct_or_union_region::compare_fields (other
);
2066 /* class frame_region : public map_region. */
2068 /* Compare the fields of this frame_region with OTHER, returning true
2070 For use by region::operator==. */
2073 frame_region::compare_fields (const frame_region
&other
) const
2075 if (!map_region::compare_fields (other
))
2077 if (m_fun
!= other
.m_fun
)
2079 if (m_depth
!= other
.m_depth
)
2084 /* Implementation of region::clone vfunc for frame_region. */
2087 frame_region::clone () const
2089 return new frame_region (*this);
2092 /* Implementation of map_region::valid_key_p vfunc for frame_region. */
2095 frame_region::valid_key_p (tree key
) const
2097 // TODO: could also check that VAR_DECLs are locals
2098 return (TREE_CODE (key
) == PARM_DECL
2099 || TREE_CODE (key
) == VAR_DECL
2100 || TREE_CODE (key
) == SSA_NAME
2101 || TREE_CODE (key
) == RESULT_DECL
);
2104 /* Implementation of region::print_fields vfunc for frame_region. */
2107 frame_region::print_fields (const region_model
&model
,
2109 pretty_printer
*pp
) const
2111 map_region::print_fields (model
, this_rid
, pp
);
2112 pp_printf (pp
, ", function: %qs, depth: %i", function_name (m_fun
), m_depth
);
2115 /* Implementation of region::add_to_hash vfunc for frame_region. */
2118 frame_region::add_to_hash (inchash::hash
&hstate
) const
2120 map_region::add_to_hash (hstate
);
2121 hstate
.add_ptr (m_fun
);
2122 hstate
.add_int (m_depth
);
2125 /* class globals_region : public scope_region. */
2127 /* Compare the fields of this globals_region with OTHER, returning true
2129 For use by region::operator==. */
2132 globals_region::compare_fields (const globals_region
&other
) const
2134 return map_region::compare_fields (other
);
2137 /* Implementation of region::clone vfunc for globals_region. */
2140 globals_region::clone () const
2142 return new globals_region (*this);
2145 /* Implementation of map_region::valid_key_p vfunc for globals_region. */
2148 globals_region::valid_key_p (tree key
) const
2150 return TREE_CODE (key
) == VAR_DECL
;
2153 /* class code_region : public map_region. */
2155 /* Compare the fields of this code_region with OTHER, returning true
2157 For use by region::operator==. */
2160 code_region::compare_fields (const code_region
&other
) const
2162 return map_region::compare_fields (other
);
2165 /* Implementation of region::clone vfunc for code_region. */
2168 code_region::clone () const
2170 return new code_region (*this);
2173 /* Implementation of map_region::valid_key_p vfunc for code_region. */
2176 code_region::valid_key_p (tree key
) const
2178 return TREE_CODE (key
) == FUNCTION_DECL
;
2181 /* class array_region : public region. */
2183 /* array_region's copy ctor. */
2185 array_region::array_region (const array_region
&other
)
2191 /* Get a child region for the element with index INDEX_SID. */
2194 array_region::get_element (region_model
*model
,
2196 svalue_id index_sid
,
2197 region_model_context
*ctxt ATTRIBUTE_UNUSED
)
2199 tree element_type
= TREE_TYPE (get_type ());
2200 svalue
*index_sval
= model
->get_svalue (index_sid
);
2201 if (tree cst_index
= index_sval
->maybe_get_constant ())
2203 key_t key
= key_from_constant (cst_index
);
2204 region_id element_rid
2205 = get_or_create (model
, this_rid
, key
, element_type
);
2209 return model
->get_or_create_view (this_rid
, element_type
);
2212 /* Implementation of region::clone vfunc for array_region. */
2215 array_region::clone () const
2217 return new array_region (*this);
2220 /* Compare the fields of this array_region with OTHER, returning true
2222 For use by region::operator==. */
2225 array_region::compare_fields (const array_region
&other
) const
2227 if (m_map
.elements () != other
.m_map
.elements ())
2230 for (map_t::iterator iter
= m_map
.begin ();
2231 iter
!= m_map
.end ();
2234 int key
= (*iter
).first
;
2235 region_id e
= (*iter
).second
;
2236 region_id
*other_slot
= const_cast <map_t
&> (other
.m_map
).get (key
);
2237 if (other_slot
== NULL
)
2239 if (e
!= *other_slot
)
2245 /* Implementation of region::print_fields vfunc for array_region. */
2248 array_region::print_fields (const region_model
&model
,
2250 pretty_printer
*pp
) const
2252 region::print_fields (model
, this_rid
, pp
);
2253 pp_string (pp
, ", array: {");
2254 for (map_t::iterator iter
= m_map
.begin ();
2255 iter
!= m_map
.end ();
2258 if (iter
!= m_map
.begin ())
2259 pp_string (pp
, ", ");
2260 int key
= (*iter
).first
;
2261 region_id child_rid
= (*iter
).second
;
2262 pp_printf (pp
, "[%i]: ", key
);
2263 child_rid
.print (pp
);
2265 pp_string (pp
, "}");
2268 /* Implementation of region::dump_dot_to_pp vfunc for array_region. */
2271 array_region::dump_dot_to_pp (const region_model
&model
,
2273 pretty_printer
*pp
) const
2275 region::dump_dot_to_pp (model
, this_rid
, pp
);
2276 for (map_t::iterator iter
= m_map
.begin ();
2277 iter
!= m_map
.end ();
2280 // TODO: add nodes/edges to label things
2282 int key
= (*iter
).first
;
2283 region_id child_rid
= (*iter
).second
;
2285 pp_printf (pp
, "rid_label_%i [label=\"", child_rid
.as_int ());
2286 pp_write_text_to_stream (pp
);
2287 pp_printf (pp
, "%qi", key
);
2288 pp_write_text_as_dot_label_to_stream (pp
, /*for_record=*/false);
2289 pp_string (pp
, "\"];");
2292 pp_printf (pp
, "rid_label_%i", child_rid
.as_int ());
2293 pp_string (pp
, " -> ");
2294 child_rid
.dump_node_name_to_pp (pp
);
2295 pp_string (pp
, ";");
2300 /* Implementation of region::dump_child_label vfunc for array_region. */
2303 array_region::dump_child_label (const region_model
&model
,
2305 region_id child_rid
,
2306 pretty_printer
*pp
) const
2308 region::dump_child_label (model
, this_rid
, child_rid
, pp
);
2310 for (map_t::iterator iter
= m_map
.begin ();
2311 iter
!= m_map
.end ();
2314 if (child_rid
== (*iter
).second
)
2316 int key
= (*iter
).first
;
2317 pp_printf (pp
, "[%i]: ", key
);
2322 /* Look for a child region for KEY within this array_region.
2323 If it doesn't already exist, create a child array_region, using TYPE for
2325 Return the region_id of the child (whether pre-existing, or
2329 array_region::get_or_create (region_model
*model
,
2334 region_id
*slot
= m_map
.get (key
);
2337 region_id child_rid
= model
->add_region_for_type (this_rid
, type
);
2338 m_map
.put (key
, child_rid
);
2342 /* Get the region_id for the child region for KEY within this
2343 ARRAY_REGION, or NULL if there is no such child region. */
2346 array_region::get (key_t key
)
2348 region_id
*slot
= m_map
.get (key
);
2352 /* Implementation of region::add_to_hash vfunc for array_region. */
2355 array_region::add_to_hash (inchash::hash
&hstate
) const
2357 region::add_to_hash (hstate
);
2361 /* Implementation of region::remap_region_ids vfunc for array_region. */
2364 array_region::remap_region_ids (const region_id_map
&map
)
2366 region::remap_region_ids (map
);
2368 /* Remap the region ids within the map entries. */
2369 for (map_t::iterator iter
= m_map
.begin ();
2370 iter
!= m_map
.end (); ++iter
)
2371 map
.update (&(*iter
).second
);
2374 /* Look for a child region with id CHILD_RID within this array_region.
2375 If one is found, write its key to *OUT and return true,
2376 otherwise return false. */
2379 array_region::get_key_for_child_region (region_id child_rid
, key_t
*out
) const
2381 // TODO: do we want to store an inverse map?
2382 for (map_t::iterator iter
= m_map
.begin ();
2383 iter
!= m_map
.end ();
2386 key_t key
= (*iter
).first
;
2387 region_id r
= (*iter
).second
;
2398 /* qsort comparator for array_region's keys. */
2401 array_region::key_cmp (const void *p1
, const void *p2
)
2403 key_t i1
= *(const key_t
*)p1
;
2404 key_t i2
= *(const key_t
*)p2
;
2414 /* Implementation of region::walk_for_canonicalization vfunc for
2418 array_region::walk_for_canonicalization (canonicalization
*c
) const
2420 auto_vec
<int> keys (m_map
.elements ());
2421 for (map_t::iterator iter
= m_map
.begin ();
2422 iter
!= m_map
.end ();
2425 int key_a
= (*iter
).first
;
2426 keys
.quick_push (key_a
);
2428 keys
.qsort (key_cmp
);
2432 FOR_EACH_VEC_ELT (keys
, i
, key
)
2434 region_id rid
= *const_cast<array_region
*>(this)->m_map
.get (key
);
2439 /* Convert constant CST into an array_region::key_t. */
2442 array_region::key_from_constant (tree cst
)
2444 gcc_assert (CONSTANT_CLASS_P (cst
));
2445 wide_int w
= wi::to_wide (cst
);
2446 key_t result
= w
.to_shwi ();
2450 /* class function_region : public map_region. */
2452 /* Compare the fields of this function_region with OTHER, returning true
2454 For use by region::operator==. */
2457 function_region::compare_fields (const function_region
&other
) const
2459 return map_region::compare_fields (other
);
2462 /* Implementation of region::clone vfunc for function_region. */
2465 function_region::clone () const
2467 return new function_region (*this);
2470 /* Implementation of map_region::valid_key_p vfunc for function_region. */
2473 function_region::valid_key_p (tree key
) const
2475 return TREE_CODE (key
) == LABEL_DECL
;
2478 /* class stack_region : public region. */
2480 /* stack_region's copy ctor. */
2482 stack_region::stack_region (const stack_region
&other
)
2484 m_frame_rids (other
.m_frame_rids
.length ())
2487 region_id
*frame_rid
;
2488 FOR_EACH_VEC_ELT (other
.m_frame_rids
, i
, frame_rid
)
2489 m_frame_rids
.quick_push (*frame_rid
);
2492 /* Compare the fields of this stack_region with OTHER, returning true
2494 For use by region::operator==. */
2497 stack_region::compare_fields (const stack_region
&other
) const
2499 if (m_frame_rids
.length () != other
.m_frame_rids
.length ())
2503 region_id
*frame_rid
;
2504 FOR_EACH_VEC_ELT (m_frame_rids
, i
, frame_rid
)
2505 if (m_frame_rids
[i
] != other
.m_frame_rids
[i
])
2511 /* Implementation of region::clone vfunc for stack_region. */
2514 stack_region::clone () const
2516 return new stack_region (*this);
2519 /* Implementation of region::print_fields vfunc for stack_region. */
2522 stack_region::print_fields (const region_model
&model
,
2524 pretty_printer
*pp
) const
2526 region::print_fields (model
, this_rid
, pp
);
2530 /* Implementation of region::dump_child_label vfunc for stack_region. */
2533 stack_region::dump_child_label (const region_model
&model
,
2534 region_id this_rid ATTRIBUTE_UNUSED
,
2535 region_id child_rid
,
2536 pretty_printer
*pp
) const
2538 function
*fun
= model
.get_region
<frame_region
> (child_rid
)->get_function ();
2539 pp_printf (pp
, "frame for %qs: ", function_name (fun
));
2542 /* Push FRAME_RID (for a frame_region) onto this stack. */
2545 stack_region::push_frame (region_id frame_rid
)
2547 m_frame_rids
.safe_push (frame_rid
);
2550 /* Get the region_id of the top-most frame in this stack, if any. */
2553 stack_region::get_current_frame_id () const
2555 if (m_frame_rids
.length () > 0)
2556 return m_frame_rids
[m_frame_rids
.length () - 1];
2558 return region_id::null ();
2561 /* Pop the topmost frame_region from this stack.
2563 Purge the frame region and all its descendent regions.
2564 Convert any pointers that point into such regions into
2565 POISON_KIND_POPPED_STACK svalues.
2567 Return the ID of any return value from the frame.
2569 If PURGE, then purge all unused svalues, with the exception of any
2570 return value for the frame, which is temporarily
2571 preserved in case no regions reference it, so it can
2572 be written into a region in the caller.
2574 Accumulate stats on purged entities into STATS. */
2577 stack_region::pop_frame (region_model
*model
, bool purge
, purge_stats
*stats
,
2578 region_model_context
*ctxt
)
2580 gcc_assert (m_frame_rids
.length () > 0);
2582 region_id frame_rid
= get_current_frame_id ();
2583 frame_region
*frame
= model
->get_region
<frame_region
> (frame_rid
);
2585 /* Evaluate the result, within the callee frame. */
2586 svalue_id result_sid
;
2587 tree fndecl
= frame
->get_function ()->decl
;
2588 tree result
= DECL_RESULT (fndecl
);
2589 if (result
&& TREE_TYPE (result
) != void_type_node
)
2590 result_sid
= model
->get_rvalue (result
, ctxt
);
2592 /* Pop the frame RID. */
2593 m_frame_rids
.pop ();
2595 model
->delete_region_and_descendents (frame_rid
,
2596 POISON_KIND_POPPED_STACK
,
2598 ctxt
? ctxt
->get_logger () : NULL
);
2600 /* Delete unused svalues, but don't delete the return value. */
2602 model
->purge_unused_svalues (stats
, ctxt
, &result_sid
);
2609 /* Implementation of region::add_to_hash vfunc for stack_region. */
2612 stack_region::add_to_hash (inchash::hash
&hstate
) const
2614 region::add_to_hash (hstate
);
2617 region_id
*frame_rid
;
2618 FOR_EACH_VEC_ELT (m_frame_rids
, i
, frame_rid
)
2619 inchash::add (*frame_rid
, hstate
);
2622 /* Implementation of region::remap_region_ids vfunc for stack_region. */
2625 stack_region::remap_region_ids (const region_id_map
&map
)
2627 region::remap_region_ids (map
);
2629 region_id
*frame_rid
;
2630 FOR_EACH_VEC_ELT (m_frame_rids
, i
, frame_rid
)
2631 map
.update (&m_frame_rids
[i
]);
2634 /* Attempt to merge STACK_REGION_A and STACK_REGION_B using MERGER.
2635 Return true if the merger is possible, false otherwise. */
2638 stack_region::can_merge_p (const stack_region
*stack_region_a
,
2639 const stack_region
*stack_region_b
,
2640 model_merger
*merger
)
2642 if (stack_region_a
->get_num_frames ()
2643 != stack_region_b
->get_num_frames ())
2646 region_model
*merged_model
= merger
->m_merged_model
;
2648 region_id rid_merged_stack
2649 = merged_model
->get_root_region ()->ensure_stack_region (merged_model
);
2651 stack_region
*merged_stack
2652 = merged_model
->get_region
<stack_region
> (rid_merged_stack
);
2654 /* First, create all frames in the merged model, without populating them.
2655 The merging code assumes that all frames in the merged model already exist,
2656 so we have to do this first to handle the case in which a local in an
2657 older frame points at a local in a more recent frame. */
2658 for (unsigned i
= 0; i
< stack_region_a
->get_num_frames (); i
++)
2660 region_id rid_a
= stack_region_a
->get_frame_rid (i
);
2661 frame_region
*frame_a
= merger
->get_region_a
<frame_region
> (rid_a
);
2663 region_id rid_b
= stack_region_b
->get_frame_rid (i
);
2664 frame_region
*frame_b
= merger
->get_region_b
<frame_region
> (rid_b
);
2666 if (frame_a
->get_function () != frame_b
->get_function ())
2669 frame_region
*merged_frame
= new frame_region (rid_merged_stack
,
2670 frame_a
->get_function (),
2671 frame_a
->get_depth ());
2672 region_id rid_merged_frame
= merged_model
->add_region (merged_frame
);
2673 merged_stack
->push_frame (rid_merged_frame
);
2676 /* Now populate the frames we created. */
2677 for (unsigned i
= 0; i
< stack_region_a
->get_num_frames (); i
++)
2679 region_id rid_a
= stack_region_a
->get_frame_rid (i
);
2680 frame_region
*frame_a
= merger
->get_region_a
<frame_region
> (rid_a
);
2682 region_id rid_b
= stack_region_b
->get_frame_rid (i
);
2683 frame_region
*frame_b
= merger
->get_region_b
<frame_region
> (rid_b
);
2685 region_id rid_merged_frame
= merged_stack
->get_frame_rid (i
);
2686 frame_region
*merged_frame
2687 = merged_model
->get_region
<frame_region
> (rid_merged_frame
);
2688 if (!map_region::can_merge_p (frame_a
, frame_b
,
2689 merged_frame
, rid_merged_frame
,
2697 /* Implementation of region::walk_for_canonicalization vfunc for
2701 stack_region::walk_for_canonicalization (canonicalization
*c
) const
2704 region_id
*frame_rid
;
2705 FOR_EACH_VEC_ELT (m_frame_rids
, i
, frame_rid
)
2706 c
->walk_rid (*frame_rid
);
2709 /* For debugging purposes: look for a grandchild region within one of
2710 the child frame regions, where the grandchild is for a decl named
2711 IDENTIFIER (or an SSA_NAME for such a decl):
2715 `-region for decl named IDENTIFIER
2717 returning its value, or svalue_id::null if none are found. */
2720 stack_region::get_value_by_name (tree identifier
,
2721 const region_model
&model
) const
2724 region_id
*frame_rid
;
2725 FOR_EACH_VEC_ELT (m_frame_rids
, i
, frame_rid
)
2727 frame_region
*frame
= model
.get_region
<frame_region
> (*frame_rid
);
2728 svalue_id sid
= frame
->get_value_by_name (identifier
, model
);
2733 return svalue_id::null ();
2736 /* class heap_region : public region. */
2738 /* heap_region's copy ctor. */
2740 heap_region::heap_region (const heap_region
&other
)
2745 /* Compare the fields of this heap_region with OTHER, returning true
2747 For use by region::operator==. */
2750 heap_region::compare_fields (const heap_region
&) const
2756 /* Implementation of region::clone vfunc for heap_region. */
2759 heap_region::clone () const
2761 return new heap_region (*this);
2764 /* Implementation of region::walk_for_canonicalization vfunc for
2768 heap_region::walk_for_canonicalization (canonicalization
*) const
2773 /* class root_region : public region. */
2775 /* root_region's default ctor. */
2777 root_region::root_region ()
2778 : region (region_id::null (),
2784 /* root_region's copy ctor. */
2786 root_region::root_region (const root_region
&other
)
2788 m_stack_rid (other
.m_stack_rid
),
2789 m_globals_rid (other
.m_globals_rid
),
2790 m_code_rid (other
.m_code_rid
),
2791 m_heap_rid (other
.m_heap_rid
)
2795 /* Compare the fields of this root_region with OTHER, returning true
2797 For use by region::operator==. */
2800 root_region::compare_fields (const root_region
&other
) const
2802 if (m_stack_rid
!= other
.m_stack_rid
)
2804 if (m_globals_rid
!= other
.m_globals_rid
)
2806 if (m_code_rid
!= other
.m_code_rid
)
2808 if (m_heap_rid
!= other
.m_heap_rid
)
2813 /* Implementation of region::clone vfunc for root_region. */
2816 root_region::clone () const
2818 return new root_region (*this);
2821 /* Implementation of region::print_fields vfunc for root_region. */
2824 root_region::print_fields (const region_model
&model
,
2826 pretty_printer
*pp
) const
2828 region::print_fields (model
, this_rid
, pp
);
2832 /* Implementation of region::dump_child_label vfunc for root_region. */
2835 root_region::dump_child_label (const region_model
&model ATTRIBUTE_UNUSED
,
2836 region_id this_rid ATTRIBUTE_UNUSED
,
2837 region_id child_rid
,
2838 pretty_printer
*pp
) const
2840 if (child_rid
== m_stack_rid
)
2841 pp_printf (pp
, "stack: ");
2842 else if (child_rid
== m_globals_rid
)
2843 pp_printf (pp
, "globals: ");
2844 else if (child_rid
== m_code_rid
)
2845 pp_printf (pp
, "code: ");
2846 else if (child_rid
== m_heap_rid
)
2847 pp_printf (pp
, "heap: ");
2850 /* Create a new frame_region for a call to FUN and push it onto
2853 If ARG_SIDS is non-NULL, use it to populate the parameters
2855 Otherwise, populate them with unknown values.
2857 Return the region_id of the new frame. */
2860 root_region::push_frame (region_model
*model
, function
*fun
,
2861 vec
<svalue_id
> *arg_sids
,
2862 region_model_context
*ctxt
)
2865 /* arg_sids can be NULL. */
2867 ensure_stack_region (model
);
2868 stack_region
*stack
= model
->get_region
<stack_region
> (m_stack_rid
);
2870 frame_region
*region
= new frame_region (m_stack_rid
, fun
,
2871 stack
->get_num_frames ());
2872 region_id frame_rid
= model
->add_region (region
);
2874 // TODO: unify these cases by building a vec of unknown?
2878 /* Arguments supplied from a caller frame. */
2880 tree fndecl
= fun
->decl
;
2882 for (tree iter_parm
= DECL_ARGUMENTS (fndecl
); iter_parm
;
2883 iter_parm
= DECL_CHAIN (iter_parm
), ++idx
)
2885 /* If there's a mismatching declaration, the call stmt might
2886 not have enough args. Handle this case by leaving the
2887 rest of the params as uninitialized. */
2888 if (idx
>= arg_sids
->length ())
2890 svalue_id arg_sid
= (*arg_sids
)[idx
];
2892 = region
->get_or_create (model
, frame_rid
, iter_parm
,
2893 TREE_TYPE (iter_parm
));
2894 model
->set_value (parm_rid
, arg_sid
, ctxt
);
2896 /* Also do it for default SSA name (sharing the same unknown
2898 tree parm_default_ssa
= ssa_default_def (fun
, iter_parm
);
2899 if (parm_default_ssa
)
2901 region_id defssa_rid
2902 = region
->get_or_create (model
, frame_rid
, parm_default_ssa
,
2903 TREE_TYPE (iter_parm
));
2904 model
->set_value (defssa_rid
, arg_sid
, ctxt
);
2910 /* No known arguments (a top-level call within the analysis). */
2912 /* Params have a defined, unknown value; they should not inherit
2913 from the poisoned uninit value. */
2914 tree fndecl
= fun
->decl
;
2915 for (tree iter_parm
= DECL_ARGUMENTS (fndecl
); iter_parm
;
2916 iter_parm
= DECL_CHAIN (iter_parm
))
2919 = region
->get_or_create (model
, frame_rid
, iter_parm
,
2920 TREE_TYPE (iter_parm
));
2922 = model
->set_to_new_unknown_value (parm_rid
, TREE_TYPE (iter_parm
),
2925 /* Also do it for default SSA name (sharing the same unknown
2927 tree parm_default_ssa
= ssa_default_def (fun
, iter_parm
);
2928 if (parm_default_ssa
)
2930 region_id defssa_rid
2931 = region
->get_or_create (model
, frame_rid
, parm_default_ssa
,
2932 TREE_TYPE (iter_parm
));
2933 model
->get_region (defssa_rid
)->set_value (*model
, defssa_rid
,
2939 stack
->push_frame (frame_rid
);
2944 /* Get the region_id of the top-most frame in this root_region's stack,
2948 root_region::get_current_frame_id (const region_model
&model
) const
2950 stack_region
*stack
= model
.get_region
<stack_region
> (m_stack_rid
);
2952 return stack
->get_current_frame_id ();
2954 return region_id::null ();
2957 /* Pop the topmost frame_region from this root_region's stack;
2958 see the comment for stack_region::pop_frame. */
2961 root_region::pop_frame (region_model
*model
, bool purge
, purge_stats
*out
,
2962 region_model_context
*ctxt
)
2964 stack_region
*stack
= model
->get_region
<stack_region
> (m_stack_rid
);
2965 return stack
->pop_frame (model
, purge
, out
, ctxt
);
2968 /* Return the region_id of the stack region, creating it if doesn't
2972 root_region::ensure_stack_region (region_model
*model
)
2974 if (m_stack_rid
.null_p ())
2976 svalue_id uninit_sid
2977 = model
->add_svalue (new poisoned_svalue (POISON_KIND_UNINIT
,
2980 = model
->add_region (new stack_region (model
->get_root_rid (),
2986 /* Return the stack region (which could be NULL). */
2989 root_region::get_stack_region (const region_model
*model
) const
2991 return model
->get_region
<stack_region
> (m_stack_rid
);
2994 /* Return the region_id of the globals region, creating it if doesn't
2998 root_region::ensure_globals_region (region_model
*model
)
3000 if (m_globals_rid
.null_p ())
3002 = model
->add_region (new globals_region (model
->get_root_rid ()));
3003 return m_globals_rid
;
3006 /* Return the code region (which could be NULL). */
3009 root_region::get_code_region (const region_model
*model
) const
3011 return model
->get_region
<code_region
> (m_code_rid
);
3014 /* Return the region_id of the code region, creating it if doesn't
3018 root_region::ensure_code_region (region_model
*model
)
3020 if (m_code_rid
.null_p ())
3022 = model
->add_region (new code_region (model
->get_root_rid ()));
3026 /* Return the globals region (which could be NULL). */
3029 root_region::get_globals_region (const region_model
*model
) const
3031 return model
->get_region
<globals_region
> (m_globals_rid
);
3034 /* Return the region_id of the heap region, creating it if doesn't
3038 root_region::ensure_heap_region (region_model
*model
)
3040 if (m_heap_rid
.null_p ())
3042 svalue_id uninit_sid
3043 = model
->add_svalue (new poisoned_svalue (POISON_KIND_UNINIT
,
3046 = model
->add_region (new heap_region (model
->get_root_rid (),
3052 /* Return the heap region (which could be NULL). */
3055 root_region::get_heap_region (const region_model
*model
) const
3057 return model
->get_region
<heap_region
> (m_heap_rid
);
3060 /* Implementation of region::remap_region_ids vfunc for root_region. */
3063 root_region::remap_region_ids (const region_id_map
&map
)
3065 map
.update (&m_stack_rid
);
3066 map
.update (&m_globals_rid
);
3067 map
.update (&m_code_rid
);
3068 map
.update (&m_heap_rid
);
3071 /* Attempt to merge ROOT_REGION_A and ROOT_REGION_B into
3072 MERGED_ROOT_REGION using MERGER.
3073 Return true if the merger is possible, false otherwise. */
3076 root_region::can_merge_p (const root_region
*root_region_a
,
3077 const root_region
*root_region_b
,
3078 root_region
*merged_root_region
,
3079 model_merger
*merger
)
3081 /* We can only merge if the stacks are sufficiently similar. */
3082 stack_region
*stack_a
= root_region_a
->get_stack_region (merger
->m_model_a
);
3083 stack_region
*stack_b
= root_region_b
->get_stack_region (merger
->m_model_b
);
3084 if (stack_a
&& stack_b
)
3086 /* If the two models both have a stack, attempt to merge them. */
3087 merged_root_region
->ensure_stack_region (merger
->m_merged_model
);
3088 if (!stack_region::can_merge_p (stack_a
, stack_b
, merger
))
3091 else if (stack_a
|| stack_b
)
3092 /* Don't attempt to merge if one model has a stack and the other
3096 map_region
*globals_a
= root_region_a
->get_globals_region (merger
->m_model_a
);
3097 map_region
*globals_b
= root_region_b
->get_globals_region (merger
->m_model_b
);
3098 if (globals_a
&& globals_b
)
3100 /* If both models have globals regions, attempt to merge them. */
3101 region_id merged_globals_rid
3102 = merged_root_region
->ensure_globals_region (merger
->m_merged_model
);
3103 map_region
*merged_globals
3104 = merged_root_region
->get_globals_region (merger
->m_merged_model
);
3105 if (!map_region::can_merge_p (globals_a
, globals_b
,
3106 merged_globals
, merged_globals_rid
,
3110 /* otherwise, merge as "no globals". */
3112 map_region
*code_a
= root_region_a
->get_code_region (merger
->m_model_a
);
3113 map_region
*code_b
= root_region_b
->get_code_region (merger
->m_model_b
);
3114 if (code_a
&& code_b
)
3116 /* If both models have code regions, attempt to merge them. */
3117 region_id merged_code_rid
3118 = merged_root_region
->ensure_code_region (merger
->m_merged_model
);
3119 map_region
*merged_code
3120 = merged_root_region
->get_code_region (merger
->m_merged_model
);
3121 if (!map_region::can_merge_p (code_a
, code_b
,
3122 merged_code
, merged_code_rid
,
3126 /* otherwise, merge as "no code". */
3128 heap_region
*heap_a
= root_region_a
->get_heap_region (merger
->m_model_a
);
3129 heap_region
*heap_b
= root_region_b
->get_heap_region (merger
->m_model_b
);
3130 if (heap_a
&& heap_b
)
3132 /* If both have a heap, create a "merged" heap.
3133 Actually merging the heap contents happens via the region_svalue
3134 instances, as needed, when seeing pairs of region_svalue instances. */
3135 merged_root_region
->ensure_heap_region (merger
->m_merged_model
);
3137 /* otherwise, merge as "no heap". */
3142 /* Implementation of region::add_to_hash vfunc for root_region. */
3145 root_region::add_to_hash (inchash::hash
&hstate
) const
3147 region::add_to_hash (hstate
);
3148 inchash::add (m_stack_rid
, hstate
);
3149 inchash::add (m_globals_rid
, hstate
);
3150 inchash::add (m_code_rid
, hstate
);
3151 inchash::add (m_heap_rid
, hstate
);
3154 /* Implementation of region::walk_for_canonicalization vfunc for
3158 root_region::walk_for_canonicalization (canonicalization
*c
) const
3160 c
->walk_rid (m_stack_rid
);
3161 c
->walk_rid (m_globals_rid
);
3162 c
->walk_rid (m_code_rid
);
3163 c
->walk_rid (m_heap_rid
);
3166 /* For debugging purposes: look for a descendant region for a local
3167 or global decl named IDENTIFIER (or an SSA_NAME for such a decl),
3168 returning its value, or svalue_id::null if none are found. */
3171 root_region::get_value_by_name (tree identifier
,
3172 const region_model
&model
) const
3174 if (stack_region
*stack
= get_stack_region (&model
))
3176 svalue_id sid
= stack
->get_value_by_name (identifier
, model
);
3180 if (map_region
*globals
= get_globals_region (&model
))
3182 svalue_id sid
= globals
->get_value_by_name (identifier
, model
);
3186 return svalue_id::null ();
3189 /* class symbolic_region : public map_region. */
3191 /* symbolic_region's copy ctor. */
3193 symbolic_region::symbolic_region (const symbolic_region
&other
)
3195 m_possibly_null (other
.m_possibly_null
)
3199 /* Compare the fields of this symbolic_region with OTHER, returning true
3201 For use by region::operator==. */
3204 symbolic_region::compare_fields (const symbolic_region
&other
) const
3206 return m_possibly_null
== other
.m_possibly_null
;
3209 /* Implementation of region::clone vfunc for symbolic_region. */
3212 symbolic_region::clone () const
3214 return new symbolic_region (*this);
3217 /* Implementation of region::walk_for_canonicalization vfunc for
3221 symbolic_region::walk_for_canonicalization (canonicalization
*) const
3226 /* class region_model. */
3228 /* region_model's default ctor. */
3230 region_model::region_model ()
3232 m_root_rid
= add_region (new root_region ());
3233 m_constraints
= new impl_constraint_manager (this);
3237 /* region_model's copy ctor. */
3239 region_model::region_model (const region_model
&other
)
3240 : m_svalues (other
.m_svalues
.length ()),
3241 m_regions (other
.m_regions
.length ()),
3242 m_root_rid (other
.m_root_rid
)
3244 /* Clone the svalues and regions. */
3248 FOR_EACH_VEC_ELT (other
.m_svalues
, i
, svalue
)
3249 m_svalues
.quick_push (svalue
->clone ());
3252 FOR_EACH_VEC_ELT (other
.m_regions
, i
, region
)
3253 m_regions
.quick_push (region
->clone ());
3255 m_constraints
= other
.m_constraints
->clone (this);
3258 /* region_model's dtor. */
3260 region_model::~region_model ()
3262 delete m_constraints
;
3265 /* region_model's assignment operator. */
3268 region_model::operator= (const region_model
&other
)
3274 /* Delete existing content. */
3275 FOR_EACH_VEC_ELT (m_svalues
, i
, svalue
)
3277 m_svalues
.truncate (0);
3279 FOR_EACH_VEC_ELT (m_regions
, i
, region
)
3281 m_regions
.truncate (0);
3283 delete m_constraints
;
3285 /* Clone the svalues and regions. */
3286 m_svalues
.reserve (other
.m_svalues
.length (), true);
3287 FOR_EACH_VEC_ELT (other
.m_svalues
, i
, svalue
)
3288 m_svalues
.quick_push (svalue
->clone ());
3290 m_regions
.reserve (other
.m_regions
.length (), true);
3291 FOR_EACH_VEC_ELT (other
.m_regions
, i
, region
)
3292 m_regions
.quick_push (region
->clone ());
3294 m_root_rid
= other
.m_root_rid
;
3296 m_constraints
= other
.m_constraints
->clone (this);
3301 /* Equality operator for region_model.
3303 Amongst other things this directly compares the svalue and region
3304 vectors and so for this to be meaningful both this and OTHER should
3305 have been canonicalized. */
3308 region_model::operator== (const region_model
&other
) const
3310 if (m_root_rid
!= other
.m_root_rid
)
3313 if (m_svalues
.length () != other
.m_svalues
.length ())
3316 if (m_regions
.length () != other
.m_regions
.length ())
3319 if (*m_constraints
!= *other
.m_constraints
)
3324 FOR_EACH_VEC_ELT (other
.m_svalues
, i
, svalue
)
3325 if (!(*m_svalues
[i
] == *other
.m_svalues
[i
]))
3329 FOR_EACH_VEC_ELT (other
.m_regions
, i
, region
)
3330 if (!(*m_regions
[i
] == *other
.m_regions
[i
]))
3333 gcc_checking_assert (hash () == other
.hash ());
3338 /* Generate a hash value for this region_model. */
3341 region_model::hash () const
3343 hashval_t result
= 0;
3347 FOR_EACH_VEC_ELT (m_svalues
, i
, svalue
)
3348 result
^= svalue
->hash ();
3351 FOR_EACH_VEC_ELT (m_regions
, i
, region
)
3352 result
^= region
->hash ();
3354 result
^= m_constraints
->hash ();
3359 /* Print an all-on-one-line representation of this region_model to PP,
3360 which must support %E for trees. */
3363 region_model::print (pretty_printer
*pp
) const
3367 pp_string (pp
, "svalues: [");
3369 FOR_EACH_VEC_ELT (m_svalues
, i
, svalue
)
3372 pp_string (pp
, ", ");
3373 print_svalue (svalue_id::from_int (i
), pp
);
3376 pp_string (pp
, "], regions: [");
3379 FOR_EACH_VEC_ELT (m_regions
, i
, region
)
3382 pp_string (pp
, ", ");
3383 region
->print (*this, region_id::from_int (i
), pp
);
3386 pp_string (pp
, "], constraints: ");
3388 m_constraints
->print (pp
);
3391 /* Print the svalue with id SID to PP. */
3394 region_model::print_svalue (svalue_id sid
, pretty_printer
*pp
) const
3396 get_svalue (sid
)->print (*this, sid
, pp
);
3399 /* Dump a .dot representation of this region_model to PP, showing
3400 the values and the hierarchy of regions. */
3403 region_model::dump_dot_to_pp (pretty_printer
*pp
) const
3405 graphviz_out
gv (pp
);
3407 pp_string (pp
, "digraph \"");
3408 pp_write_text_to_stream (pp
);
3409 pp_write_text_as_dot_label_to_stream (pp
, /*for_record=*/false);
3410 pp_string (pp
, "\" {\n");
3414 pp_string (pp
, "overlap=false;\n");
3415 pp_string (pp
, "compound=true;\n");
3420 FOR_EACH_VEC_ELT (m_svalues
, i
, svalue
)
3421 svalue
->dump_dot_to_pp (*this, svalue_id::from_int (i
), pp
);
3424 FOR_EACH_VEC_ELT (m_regions
, i
, region
)
3425 region
->dump_dot_to_pp (*this, region_id::from_int (i
), pp
);
3427 /* TODO: constraints. */
3429 /* Terminate "digraph" */
3431 pp_string (pp
, "}");
3435 /* Dump a .dot representation of this region_model to FP. */
3438 region_model::dump_dot_to_file (FILE *fp
) const
3441 pp_format_decoder (&pp
) = default_tree_printer
;
3442 pp
.buffer
->stream
= fp
;
3443 dump_dot_to_pp (&pp
);
3447 /* Dump a .dot representation of this region_model to PATH. */
3450 region_model::dump_dot (const char *path
) const
3452 FILE *fp
= fopen (path
, "w");
3453 dump_dot_to_file (fp
);
3457 /* Dump a multiline representation of this model to PP, showing the
3458 region hierarchy, the svalues, and any constraints.
3460 If SUMMARIZE is true, show only the most pertient information,
3461 in a form that attempts to be less verbose.
3462 Otherwise, show all information. */
3465 region_model::dump_to_pp (pretty_printer
*pp
, bool summarize
) const
3469 bool is_first
= true;
3470 region_id frame_id
= get_current_frame_id ();
3471 frame_region
*frame
= get_region
<frame_region
> (frame_id
);
3473 dump_summary_of_map (pp
, frame
, &is_first
);
3475 region_id globals_id
= get_globals_region_id ();
3476 map_region
*globals
= get_region
<map_region
> (globals_id
);
3478 dump_summary_of_map (pp
, globals
, &is_first
);
3483 FOR_EACH_VEC_ELT (m_constraints
->m_equiv_classes
, i
, ec
)
3485 for (unsigned j
= 0; j
< ec
->m_vars
.length (); j
++)
3487 svalue_id lhs_sid
= ec
->m_vars
[j
];
3488 tree lhs_tree
= get_representative_tree (lhs_sid
);
3489 if (lhs_tree
== NULL_TREE
)
3491 for (unsigned k
= j
+ 1; k
< ec
->m_vars
.length (); k
++)
3493 svalue_id rhs_sid
= ec
->m_vars
[k
];
3494 tree rhs_tree
= get_representative_tree (rhs_sid
);
3496 && !(CONSTANT_CLASS_P (lhs_tree
)
3497 && CONSTANT_CLASS_P (rhs_tree
)))
3499 dump_separator (pp
, &is_first
);
3500 dump_tree (pp
, lhs_tree
);
3501 pp_string (pp
, " == ");
3502 dump_tree (pp
, rhs_tree
);
3509 FOR_EACH_VEC_ELT (m_constraints
->m_constraints
, i
, c
)
3511 const equiv_class
&lhs
= c
->m_lhs
.get_obj (*m_constraints
);
3512 const equiv_class
&rhs
= c
->m_rhs
.get_obj (*m_constraints
);
3513 svalue_id lhs_sid
= lhs
.get_representative ();
3514 svalue_id rhs_sid
= rhs
.get_representative ();
3515 tree lhs_tree
= get_representative_tree (lhs_sid
);
3516 tree rhs_tree
= get_representative_tree (rhs_sid
);
3517 if (lhs_tree
&& rhs_tree
3518 && !(CONSTANT_CLASS_P (lhs_tree
) && CONSTANT_CLASS_P (rhs_tree
)))
3520 dump_separator (pp
, &is_first
);
3521 dump_tree (pp
, lhs_tree
);
3522 pp_printf (pp
, " %s ", constraint_op_code (c
->m_op
));
3523 dump_tree (pp
, rhs_tree
);
3530 get_region (m_root_rid
)->dump_to_pp (*this, m_root_rid
, pp
, "", true);
3532 pp_string (pp
, "svalues:");
3536 FOR_EACH_VEC_ELT (m_svalues
, i
, svalue
)
3538 pp_string (pp
, " ");
3539 svalue_id sid
= svalue_id::from_int (i
);
3540 print_svalue (sid
, pp
);
3544 pp_string (pp
, "constraint manager:");
3546 m_constraints
->dump_to_pp (pp
);
3549 /* Dump a multiline representation of this model to FILE. */
3552 region_model::dump (FILE *fp
, bool summarize
) const
3555 pp_format_decoder (&pp
) = default_tree_printer
;
3556 pp_show_color (&pp
) = pp_show_color (global_dc
->printer
);
3557 pp
.buffer
->stream
= fp
;
3558 dump_to_pp (&pp
, summarize
);
3562 /* Dump a multiline representation of this model to stderr. */
3565 region_model::dump (bool summarize
) const
3567 dump (stderr
, summarize
);
3570 /* Dump RMODEL fully to stderr (i.e. without summarization). */
3573 region_model::debug () const
3578 /* Dump VEC to PP, in the form "{VEC elements}: LABEL". */
3581 dump_vec_of_tree (pretty_printer
*pp
,
3583 const auto_vec
<tree
> &vec
,
3586 if (vec
.length () == 0)
3589 dump_separator (pp
, is_first
);
3590 pp_printf (pp
, "{");
3593 FOR_EACH_VEC_ELT (vec
, i
, key
)
3596 pp_string (pp
, ", ");
3597 dump_tree (pp
, key
);
3599 pp_printf (pp
, "}: %s", label
);
3602 /* Dump *MAP_REGION to PP in compact form, updating *IS_FIRST.
3603 Subroutine of region_model::dump_to_pp for use on stack frames and for
3604 the "globals" region. */
3607 region_model::dump_summary_of_map (pretty_printer
*pp
,
3608 map_region
*map_region
,
3609 bool *is_first
) const
3611 /* Get the keys, sorted by tree_cmp. In particular, this ought
3612 to alphabetize any decls. */
3613 auto_vec
<tree
> keys (map_region
->elements ());
3614 for (map_region::iterator_t iter
= map_region
->begin ();
3615 iter
!= map_region
->end ();
3618 tree key_a
= (*iter
).first
;
3619 keys
.quick_push (key_a
);
3621 keys
.qsort (tree_cmp
);
3623 /* Print pointers, constants, and poisoned values that aren't "uninit";
3624 gather keys for unknown and uninit values. */
3627 auto_vec
<tree
> unknown_keys
;
3628 auto_vec
<tree
> uninit_keys
;
3629 FOR_EACH_VEC_ELT (keys
, i
, key
)
3631 region_id child_rid
= *map_region
->get (key
);
3633 region
*child_region
= get_region (child_rid
);
3636 svalue_id sid
= child_region
->get_value_direct ();
3639 svalue
*sval
= get_svalue (sid
);
3640 switch (sval
->get_kind ())
3646 region_svalue
*region_sval
= as_a
<region_svalue
*> (sval
);
3647 region_id pointee_rid
= region_sval
->get_pointee ();
3648 tree pointee
= get_representative_path_var (pointee_rid
).m_tree
;
3649 dump_separator (pp
, is_first
);
3650 dump_tree (pp
, key
);
3651 pp_string (pp
, ": ");
3654 pp_character (pp
, '&');
3655 dump_tree (pp
, pointee
);
3658 pp_string (pp
, "NULL");
3662 dump_separator (pp
, is_first
);
3663 dump_tree (pp
, key
);
3664 pp_string (pp
, ": ");
3665 dump_tree (pp
, sval
->dyn_cast_constant_svalue ()->get_constant ());
3668 unknown_keys
.safe_push (key
);
3672 poisoned_svalue
*poisoned_sval
= as_a
<poisoned_svalue
*> (sval
);
3673 enum poison_kind pkind
= poisoned_sval
->get_poison_kind ();
3674 if (pkind
== POISON_KIND_UNINIT
)
3675 uninit_keys
.safe_push (key
);
3678 dump_separator (pp
, is_first
);
3679 dump_tree (pp
, key
);
3680 pp_printf (pp
, ": %s", poison_kind_to_str (pkind
));
3685 dump_separator (pp
, is_first
);
3686 pp_printf (pp
, "setjmp: EN: %i",
3687 sval
->dyn_cast_setjmp_svalue ()->get_enode_index ());
3692 /* Print unknown and uninitialized values in consolidated form. */
3693 dump_vec_of_tree (pp
, is_first
, unknown_keys
, "unknown");
3694 dump_vec_of_tree (pp
, is_first
, uninit_keys
, "uninit");
3697 /* Assert that this object is valid. */
3700 region_model::validate () const
3702 /* Skip this in a release build. */
3707 m_constraints
->validate ();
3711 FOR_EACH_VEC_ELT (m_regions
, i
, r
)
3714 // TODO: anything else?
3716 /* Verify that the stack region (if any) has an "uninitialized" value. */
3717 region
*stack_region
= get_root_region ()->get_stack_region (this);
3720 svalue_id stack_value_sid
= stack_region
->get_value_direct ();
3721 svalue
*stack_value
= get_svalue (stack_value_sid
);
3722 gcc_assert (stack_value
->get_kind () == SK_POISONED
);
3723 poisoned_svalue
*subclass
= stack_value
->dyn_cast_poisoned_svalue ();
3724 gcc_assert (subclass
);
3725 gcc_assert (subclass
->get_poison_kind () == POISON_KIND_UNINIT
);
3729 /* Global data for use by svalue_id_cmp_by_constant_svalue. */
3731 static region_model
*svalue_id_cmp_by_constant_svalue_model
= NULL
;
3733 /* Comparator for use by region_model::canonicalize. */
3736 svalue_id_cmp_by_constant_svalue (const void *p1
, const void *p2
)
3738 const svalue_id
*sid1
= (const svalue_id
*)p1
;
3739 const svalue_id
*sid2
= (const svalue_id
*)p2
;
3740 gcc_assert (!sid1
->null_p ());
3741 gcc_assert (!sid2
->null_p ());
3742 gcc_assert (svalue_id_cmp_by_constant_svalue_model
);
3744 = *svalue_id_cmp_by_constant_svalue_model
->get_svalue (*sid1
);
3746 = *svalue_id_cmp_by_constant_svalue_model
->get_svalue (*sid2
);
3747 gcc_assert (sval1
.get_kind () == SK_CONSTANT
);
3748 gcc_assert (sval2
.get_kind () == SK_CONSTANT
);
3750 tree cst1
= ((const constant_svalue
&)sval1
).get_constant ();
3751 tree cst2
= ((const constant_svalue
&)sval2
).get_constant ();
3752 return tree_cmp (cst1
, cst2
);
3755 /* Reorder the regions and svalues into a deterministic "canonical" order,
3756 to maximize the chance of equality.
3757 If non-NULL, notify CTXT about the svalue id remapping. */
3760 region_model::canonicalize (region_model_context
*ctxt
)
3762 /* Walk all regions and values in a deterministic order, visiting
3763 rids and sids, generating a rid and sid map. */
3764 canonicalization
c (*this);
3766 /* (1): Walk all svalues, putting constants first, sorting the constants
3767 (thus imposing an ordering on any constants that are purely referenced
3769 Ignore other svalues for now. */
3772 auto_vec
<svalue_id
> sids
;
3774 FOR_EACH_VEC_ELT (m_svalues
, i
, sval
)
3776 if (sval
->get_kind () == SK_CONSTANT
)
3777 sids
.safe_push (svalue_id::from_int (i
));
3779 svalue_id_cmp_by_constant_svalue_model
= this;
3780 sids
.qsort (svalue_id_cmp_by_constant_svalue
);
3781 svalue_id_cmp_by_constant_svalue_model
= NULL
;
3783 FOR_EACH_VEC_ELT (sids
, i
, sid
)
3787 /* (2): Walk all regions (and thus their values) in a deterministic
3789 c
.walk_rid (m_root_rid
);
3791 /* (3): Ensure we've visited everything, as we don't want to purge
3792 at this stage. Anything we visit for the first time here has
3797 FOR_EACH_VEC_ELT (m_regions
, i
, region
)
3798 c
.walk_rid (region_id::from_int (i
));
3800 FOR_EACH_VEC_ELT (m_svalues
, i
, sval
)
3801 c
.walk_sid (svalue_id::from_int (i
));
3804 /* (4): We now have a reordering of the regions and values.
3806 remap_svalue_ids (c
.m_sid_map
);
3807 remap_region_ids (c
.m_rid_map
);
3809 ctxt
->remap_svalue_ids (c
.m_sid_map
);
3811 /* (5): Canonicalize the constraint_manager (it has already had its
3812 svalue_ids remapped above). This makes use of the new svalue_id
3813 values, and so must happen last. */
3814 m_constraints
->canonicalize (get_num_svalues ());
3819 /* Return true if this region_model is in canonical form. */
3822 region_model::canonicalized_p () const
3824 region_model
copy (*this);
3825 copy
.canonicalize (NULL
);
3826 return *this == copy
;
3829 /* A subclass of pending_diagnostic for complaining about uses of
3832 class poisoned_value_diagnostic
3833 : public pending_diagnostic_subclass
<poisoned_value_diagnostic
>
3836 poisoned_value_diagnostic (tree expr
, enum poison_kind pkind
)
3837 : m_expr (expr
), m_pkind (pkind
)
3840 const char *get_kind () const FINAL OVERRIDE
{ return "poisoned_value_diagnostic"; }
3842 bool operator== (const poisoned_value_diagnostic
&other
) const
3844 return m_expr
== other
.m_expr
;
3847 bool emit (rich_location
*rich_loc
) FINAL OVERRIDE
3853 case POISON_KIND_UNINIT
:
3855 diagnostic_metadata m
;
3856 m
.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
3857 return warning_meta (rich_loc
, m
,
3858 OPT_Wanalyzer_use_of_uninitialized_value
,
3859 "use of uninitialized value %qE",
3863 case POISON_KIND_FREED
:
3865 diagnostic_metadata m
;
3866 m
.add_cwe (416); /* "CWE-416: Use After Free". */
3867 return warning_meta (rich_loc
, m
,
3868 OPT_Wanalyzer_use_after_free
,
3869 "use after %<free%> of %qE",
3873 case POISON_KIND_POPPED_STACK
:
3875 /* TODO: which CWE? */
3876 return warning_at (rich_loc
,
3877 OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame
,
3878 "use of pointer %qE within stale stack frame",
3885 label_text
describe_final_event (const evdesc::final_event
&ev
) FINAL OVERRIDE
3891 case POISON_KIND_UNINIT
:
3892 return ev
.formatted_print ("use of uninitialized value %qE here",
3894 case POISON_KIND_FREED
:
3895 return ev
.formatted_print ("use after %<free%> of %qE here",
3897 case POISON_KIND_POPPED_STACK
:
3898 return ev
.formatted_print
3899 ("use of pointer %qE within stale stack frame here",
3906 enum poison_kind m_pkind
;
3909 /* Determine if EXPR is poisoned, and if so, queue a diagnostic to CTXT. */
3912 region_model::check_for_poison (tree expr
, region_model_context
*ctxt
)
3917 // TODO: this is disabled for now (too many false positives)
3920 svalue_id expr_sid
= get_rvalue (expr
, ctxt
);
3921 gcc_assert (!expr_sid
.null_p ());
3922 svalue
*expr_svalue
= get_svalue (expr_sid
);
3923 gcc_assert (expr_svalue
);
3924 if (const poisoned_svalue
*poisoned_sval
3925 = expr_svalue
->dyn_cast_poisoned_svalue ())
3927 enum poison_kind pkind
= poisoned_sval
->get_poison_kind ();
3928 ctxt
->warn (new poisoned_value_diagnostic (expr
, pkind
));
3932 /* Update this model for the ASSIGN stmt, using CTXT to report any
3936 region_model::on_assignment (const gassign
*assign
, region_model_context
*ctxt
)
3938 tree lhs
= gimple_assign_lhs (assign
);
3939 tree rhs1
= gimple_assign_rhs1 (assign
);
3941 region_id lhs_rid
= get_lvalue (lhs
, ctxt
);
3943 /* Check for uses of poisoned values. */
3944 switch (get_gimple_rhs_class (gimple_expr_code (assign
)))
3946 case GIMPLE_INVALID_RHS
:
3949 case GIMPLE_TERNARY_RHS
:
3950 check_for_poison (gimple_assign_rhs3 (assign
), ctxt
);
3952 case GIMPLE_BINARY_RHS
:
3953 check_for_poison (gimple_assign_rhs2 (assign
), ctxt
);
3955 case GIMPLE_UNARY_RHS
:
3956 case GIMPLE_SINGLE_RHS
:
3957 check_for_poison (gimple_assign_rhs1 (assign
), ctxt
);
3960 if (lhs_rid
.null_p ())
3962 // TODO: issue a warning for this case
3964 enum tree_code op
= gimple_assign_rhs_code (assign
);
3970 sorry_at (assign
->location
, "unhandled assignment op: %qs",
3971 get_tree_code_name (op
));
3972 set_to_new_unknown_value (lhs_rid
, TREE_TYPE (lhs
), ctxt
);
3984 /* e.g. "x ={v} {CLOBBER};" */
3989 case POINTER_PLUS_EXPR
:
3991 /* e.g. "_1 = a_10(D) + 12;" */
3993 tree offset
= gimple_assign_rhs2 (assign
);
3995 svalue_id ptr_sid
= get_rvalue (ptr
, ctxt
);
3996 svalue_id offset_sid
= get_rvalue (offset
, ctxt
);
3997 region_id element_rid
3998 = get_or_create_pointer_plus_expr (TREE_TYPE (TREE_TYPE (ptr
)),
3999 ptr_sid
, offset_sid
,
4001 svalue_id element_ptr_sid
4002 = get_or_create_ptr_svalue (TREE_TYPE (ptr
), element_rid
);
4003 set_value (lhs_rid
, element_ptr_sid
, ctxt
);
4007 case POINTER_DIFF_EXPR
:
4009 /* e.g. "_1 = p_2(D) - q_3(D);". */
4013 set_to_new_unknown_value (lhs_rid
, TREE_TYPE (lhs
), ctxt
);
4020 svalue_id ptr_sid
= get_rvalue (rhs1
, ctxt
);
4021 set_value (lhs_rid
, ptr_sid
, ctxt
);
4027 region_id rhs_rid
= get_lvalue (rhs1
, ctxt
);
4029 = get_region (rhs_rid
)->get_value (*this, true, ctxt
);
4030 set_value (lhs_rid
, rhs_sid
, ctxt
);
4039 svalue_id cst_sid
= get_rvalue (rhs1
, ctxt
);
4040 set_value (lhs_rid
, cst_sid
, ctxt
);
4044 case FIX_TRUNC_EXPR
:
4048 // fall though for now
4054 svalue_id var_sid
= get_rvalue (rhs1
, ctxt
);
4055 set_value (lhs_rid
, var_sid
, ctxt
);
4066 tree rhs2
= gimple_assign_rhs2 (assign
);
4068 // TODO: constraints between svalues
4069 svalue_id rhs1_sid
= get_rvalue (rhs1
, ctxt
);
4070 svalue_id rhs2_sid
= get_rvalue (rhs2
, ctxt
);
4072 tristate t
= eval_condition (rhs1_sid
, op
, rhs2_sid
);
4075 get_rvalue (t
.is_true ()
4077 : boolean_false_node
,
4081 set_to_new_unknown_value (lhs_rid
, TREE_TYPE (lhs
), ctxt
);
4092 set_to_new_unknown_value (lhs_rid
, TREE_TYPE (lhs
), ctxt
);
4099 case TRUNC_DIV_EXPR
:
4100 case TRUNC_MOD_EXPR
:
4110 tree rhs2
= gimple_assign_rhs2 (assign
);
4112 svalue_id rhs1_sid
= get_rvalue (rhs1
, ctxt
);
4113 svalue_id rhs2_sid
= get_rvalue (rhs2
, ctxt
);
4115 if (tree rhs1_cst
= maybe_get_constant (rhs1_sid
))
4116 if (tree rhs2_cst
= maybe_get_constant (rhs2_sid
))
4118 tree result
= fold_binary (op
, TREE_TYPE (lhs
),
4119 rhs1_cst
, rhs2_cst
);
4120 if (result
&& CONSTANT_CLASS_P (result
))
4122 svalue_id result_sid
4123 = get_or_create_constant_svalue (result
);
4124 set_value (lhs_rid
, result_sid
, ctxt
);
4128 set_to_new_unknown_value (lhs_rid
, TREE_TYPE (lhs
), ctxt
);
4134 /* LHS = op0.op1; */
4135 region_id child_rid
= get_lvalue (rhs1
, ctxt
);
4137 = get_region (child_rid
)->get_value (*this, true, ctxt
);
4138 set_value (lhs_rid
, child_sid
, ctxt
);
4144 /* Update this model for the CALL stmt, using CTXT to report any
4145 diagnostics - the first half.
4147 Updates to the region_model that should be made *before* sm-states
4148 are updated are done here; other updates to the region_model are done
4149 in region_model::on_call_post.
4151 Return true if the function call has unknown side effects (it wasn't
4152 recognized and we don't have a body for it, or are unable to tell which
4156 region_model::on_call_pre (const gcall
*call
, region_model_context
*ctxt
)
4159 tree lhs_type
= NULL_TREE
;
4160 if (tree lhs
= gimple_call_lhs (call
))
4162 lhs_rid
= get_lvalue (lhs
, ctxt
);
4163 lhs_type
= TREE_TYPE (lhs
);
4166 /* Check for uses of poisoned values.
4167 For now, special-case "free", to avoid warning about "use-after-free"
4168 when "double free" would be more precise. */
4169 if (!is_special_named_call_p (call
, "free", 1))
4170 for (unsigned i
= 0; i
< gimple_call_num_args (call
); i
++)
4171 check_for_poison (gimple_call_arg (call
, i
), ctxt
);
4173 bool unknown_side_effects
= false;
4175 if (tree callee_fndecl
= get_fndecl_for_call (call
, ctxt
))
4177 if (is_named_call_p (callee_fndecl
, "malloc", call
, 1))
4179 // TODO: capture size as a svalue?
4180 region_id new_rid
= add_new_malloc_region ();
4181 if (!lhs_rid
.null_p ())
4184 = get_or_create_ptr_svalue (lhs_type
, new_rid
);
4185 set_value (lhs_rid
, ptr_sid
, ctxt
);
4189 else if (is_named_call_p (callee_fndecl
, "__builtin_alloca", call
, 1))
4191 region_id frame_rid
= get_current_frame_id ();
4193 = add_region (new symbolic_region (frame_rid
, NULL_TREE
, false));
4194 if (!lhs_rid
.null_p ())
4197 = get_or_create_ptr_svalue (lhs_type
, new_rid
);
4198 set_value (lhs_rid
, ptr_sid
, ctxt
);
4202 else if (is_named_call_p (callee_fndecl
, "strlen", call
, 1))
4204 region_id buf_rid
= deref_rvalue (gimple_call_arg (call
, 0), ctxt
);
4206 = get_region (buf_rid
)->get_value (*this, true, ctxt
);
4207 if (tree cst_expr
= maybe_get_constant (buf_sid
))
4209 if (TREE_CODE (cst_expr
) == STRING_CST
4210 && !lhs_rid
.null_p ())
4212 /* TREE_STRING_LENGTH is sizeof, not strlen. */
4213 int sizeof_cst
= TREE_STRING_LENGTH (cst_expr
);
4214 int strlen_cst
= sizeof_cst
- 1;
4215 tree t_cst
= build_int_cst (lhs_type
, strlen_cst
);
4216 svalue_id result_sid
4217 = get_or_create_constant_svalue (t_cst
);
4218 set_value (lhs_rid
, result_sid
, ctxt
);
4222 /* Otherwise an unknown value. */
4224 else if (is_named_call_p (callee_fndecl
,
4225 "__analyzer_dump_num_heap_regions", call
, 0))
4227 /* Handle the builtin "__analyzer_dump_num_heap_regions" by emitting
4228 a warning (for use in DejaGnu tests). */
4229 int num_heap_regions
= 0;
4230 region_id heap_rid
= get_root_region ()->ensure_heap_region (this);
4233 FOR_EACH_VEC_ELT (m_regions
, i
, region
)
4234 if (region
->get_parent () == heap_rid
)
4236 /* Use quotes to ensure the output isn't truncated. */
4237 warning_at (call
->location
, 0,
4238 "num heap regions: %qi", num_heap_regions
);
4241 else if (!fndecl_has_gimple_body_p (callee_fndecl
)
4242 && !DECL_PURE_P (callee_fndecl
))
4243 unknown_side_effects
= true;
4246 unknown_side_effects
= true;
4248 /* Unknown return value. */
4249 if (!lhs_rid
.null_p ())
4250 set_to_new_unknown_value (lhs_rid
, lhs_type
, ctxt
);
4252 return unknown_side_effects
;
4255 /* Update this model for the CALL stmt, using CTXT to report any
4256 diagnostics - the second half.
4258 Updates to the region_model that should be made *after* sm-states
4259 are updated are done here; other updates to the region_model are done
4260 in region_model::on_call_pre.
4262 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
4266 region_model::on_call_post (const gcall
*call
,
4267 bool unknown_side_effects
,
4268 region_model_context
*ctxt
)
4270 /* Update for "free" here, after sm-handling.
4272 If the ptr points to an underlying heap region, delete the region,
4273 poisoning pointers to it and regions within it.
4275 We delay this until after sm-state has been updated so that the
4276 sm-handling can transition all of the various casts of the pointer
4277 to a "freed" state *before* we delete the related region here.
4279 This has to be done here so that the sm-handling can use the fact
4280 that they point to the same region to establish that they are equal
4281 (in region_model::eval_condition_without_cm), and thus transition
4282 all pointers to the region to the "freed" state together, regardless
4284 if (tree callee_fndecl
= get_fndecl_for_call (call
, ctxt
))
4285 if (is_named_call_p (callee_fndecl
, "free", call
, 1))
4287 tree ptr
= gimple_call_arg (call
, 0);
4288 svalue_id ptr_sid
= get_rvalue (ptr
, ctxt
);
4289 svalue
*ptr_sval
= get_svalue (ptr_sid
);
4290 if (region_svalue
*ptr_to_region_sval
4291 = ptr_sval
->dyn_cast_region_svalue ())
4293 /* If the ptr points to an underlying heap region, delete it,
4294 poisoning pointers. */
4295 region_id pointee_rid
= ptr_to_region_sval
->get_pointee ();
4296 region_id heap_rid
= get_root_region ()->ensure_heap_region (this);
4297 if (!pointee_rid
.null_p ()
4298 && get_region (pointee_rid
)->get_parent () == heap_rid
)
4301 delete_region_and_descendents (pointee_rid
,
4303 &stats
, ctxt
->get_logger ());
4304 purge_unused_svalues (&stats
, ctxt
);
4306 // TODO: do anything with stats?
4312 if (unknown_side_effects
)
4313 handle_unrecognized_call (call
, ctxt
);
4316 /* Helper class for region_model::handle_unrecognized_call, for keeping
4317 track of all regions that are reachable, and, of those, which are
4320 class reachable_regions
4323 reachable_regions (region_model
*model
)
4324 : m_model (model
), m_reachable_rids (), m_mutable_rids ()
4327 /* Lazily mark RID as being reachable, recursively adding regions
4328 reachable from RID. */
4329 void add (region_id rid
, bool is_mutable
)
4331 gcc_assert (!rid
.null_p ());
4333 unsigned idx
= rid
.as_int ();
4334 /* Bail out if this region is already in the sets at the IS_MUTABLE
4335 level of mutability. */
4336 if (!is_mutable
&& bitmap_bit_p (m_reachable_rids
, idx
))
4338 bitmap_set_bit (m_reachable_rids
, idx
);
4342 if (bitmap_bit_p (m_mutable_rids
, idx
))
4345 bitmap_set_bit (m_mutable_rids
, idx
);
4348 /* If this region's value is a pointer, add the pointee. */
4349 region
*reg
= m_model
->get_region (rid
);
4350 svalue_id sid
= reg
->get_value_direct ();
4351 svalue
*sval
= m_model
->get_svalue (sid
);
4353 if (region_svalue
*ptr
= sval
->dyn_cast_region_svalue ())
4355 region_id pointee_rid
= ptr
->get_pointee ();
4356 /* Use const-ness of pointer type to affect mutability. */
4357 bool ptr_is_mutable
= true;
4358 if (ptr
->get_type ()
4359 && TREE_CODE (ptr
->get_type ()) == POINTER_TYPE
4360 && TYPE_READONLY (TREE_TYPE (ptr
->get_type ())))
4361 ptr_is_mutable
= false;
4362 add (pointee_rid
, ptr_is_mutable
);
4365 /* Add descendents of this region. */
4366 region_id_set
descendents (m_model
);
4367 m_model
->get_descendents (rid
, &descendents
, region_id::null ());
4368 for (unsigned i
= 0; i
< m_model
->get_num_regions (); i
++)
4370 region_id iter_rid
= region_id::from_int (i
);
4371 if (descendents
.region_p (iter_rid
))
4372 add (iter_rid
, is_mutable
);
4376 bool mutable_p (region_id rid
)
4378 gcc_assert (!rid
.null_p ());
4379 return bitmap_bit_p (m_mutable_rids
, rid
.as_int ());
4383 region_model
*m_model
;
4385 /* The region ids already seen. This has to be an auto_bitmap rather than
4386 an auto_sbitmap as new regions can be created within the model during
4388 auto_bitmap m_reachable_rids
;
4390 /* The region_ids that can be changed (accessed via non-const pointers). */
4391 auto_bitmap m_mutable_rids
;
4394 /* Handle a call CALL to a function with unknown behavior.
4396 Traverse the regions in this model, determining what regions are
4397 reachable from pointer arguments to CALL and from global variables,
4400 Set all reachable regions to new unknown values and purge sm-state
4401 from their values, and from values that point to them. */
4404 region_model::handle_unrecognized_call (const gcall
*call
,
4405 region_model_context
*ctxt
)
4407 tree fndecl
= get_fndecl_for_call (call
, ctxt
);
4409 reachable_regions
reachable_regions (this);
4411 /* Determine the reachable regions and their mutability. */
4414 region_id globals_rid
= get_globals_region_id ();
4415 if (!globals_rid
.null_p ())
4416 reachable_regions
.add (globals_rid
, true);
4418 /* Params that are pointers. */
4419 tree iter_param_types
= NULL_TREE
;
4421 iter_param_types
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
4422 for (unsigned arg_idx
= 0; arg_idx
< gimple_call_num_args (call
); arg_idx
++)
4424 /* Track expected param type, where available. */
4425 tree param_type
= NULL_TREE
;
4426 if (iter_param_types
)
4428 param_type
= TREE_VALUE (iter_param_types
);
4429 gcc_assert (param_type
);
4430 iter_param_types
= TREE_CHAIN (iter_param_types
);
4433 tree parm
= gimple_call_arg (call
, arg_idx
);
4434 svalue_id parm_sid
= get_rvalue (parm
, NULL
);
4435 svalue
*parm_sval
= get_svalue (parm_sid
);
4437 if (region_svalue
*parm_ptr
= parm_sval
->dyn_cast_region_svalue ())
4439 region_id pointee_rid
= parm_ptr
->get_pointee ();
4440 bool is_mutable
= true;
4442 && TREE_CODE (param_type
) == POINTER_TYPE
4443 && TYPE_READONLY (TREE_TYPE (param_type
)))
4445 reachable_regions
.add (pointee_rid
, is_mutable
);
4447 // FIXME: what about compound parms that contain ptrs?
4451 /* OK: we now have all reachable regions.
4452 Set them all to new unknown values. */
4453 for (unsigned i
= 0; i
< get_num_regions (); i
++)
4455 region_id iter_rid
= region_id::from_int (i
);
4456 if (reachable_regions
.mutable_p (iter_rid
))
4458 region
*reg
= get_region (iter_rid
);
4460 /* Purge any sm-state for any underlying svalue. */
4461 svalue_id curr_sid
= reg
->get_value_direct ();
4462 if (!curr_sid
.null_p ())
4463 ctxt
->on_unknown_change (curr_sid
);
4465 set_to_new_unknown_value (iter_rid
,
4471 /* Purge sm-state for any remaining svalues that point to regions that
4472 were reachable. This helps suppress leak false-positives.
4474 For example, if we had a malloc call that was cast to a "foo *" type,
4475 we could have a temporary void * for the result of malloc which has its
4476 own svalue, not reachable from the function call, but for which the
4477 "foo *" svalue was reachable. If we don't purge it, the temporary will
4478 be reported as a leak. */
4481 FOR_EACH_VEC_ELT (m_svalues
, i
, svalue
)
4482 if (region_svalue
*ptr
= svalue
->dyn_cast_region_svalue ())
4484 region_id pointee_rid
= ptr
->get_pointee ();
4485 if (reachable_regions
.mutable_p (pointee_rid
))
4486 ctxt
->on_unknown_change (svalue_id::from_int (i
));
4492 /* Update this model for the RETURN_STMT, using CTXT to report any
4496 region_model::on_return (const greturn
*return_stmt
, region_model_context
*ctxt
)
4498 tree callee
= get_current_function ()->decl
;
4499 tree lhs
= DECL_RESULT (callee
);
4500 tree rhs
= gimple_return_retval (return_stmt
);
4503 set_value (get_lvalue (lhs
, ctxt
), get_rvalue (rhs
, ctxt
), ctxt
);
4506 /* Update this model for a call and return of setjmp/sigsetjmp at CALL within
4507 ENODE, using CTXT to report any diagnostics.
4509 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
4510 0), as opposed to any second return due to longjmp/sigsetjmp. */
4513 region_model::on_setjmp (const gcall
*call
, const exploded_node
*enode
,
4514 region_model_context
*ctxt
)
4516 region_id buf_rid
= deref_rvalue (gimple_call_arg (call
, 0), ctxt
);
4517 region
*buf
= get_region (buf_rid
);
4519 /* Create a setjmp_svalue for this call and store it in BUF_RID's region. */
4522 setjmp_record
r (enode
, call
);
4523 svalue
*sval
= new setjmp_svalue (r
, buf
->get_type ());
4524 svalue_id new_sid
= add_svalue (sval
);
4525 set_value (buf_rid
, new_sid
, ctxt
);
4528 /* Direct calls to setjmp return 0. */
4529 if (tree lhs
= gimple_call_lhs (call
))
4531 tree zero
= build_int_cst (TREE_TYPE (lhs
), 0);
4532 svalue_id new_sid
= get_or_create_constant_svalue (zero
);
4533 region_id lhs_rid
= get_lvalue (lhs
, ctxt
);
4534 set_value (lhs_rid
, new_sid
, ctxt
);
4538 /* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
4539 to a "setjmp" at SETJMP_CALL where the final stack depth should be
4540 SETJMP_STACK_DEPTH. Purge any stack frames, potentially reporting on
4544 region_model::on_longjmp (const gcall
*longjmp_call
, const gcall
*setjmp_call
,
4545 int setjmp_stack_depth
,
4546 region_model_context
*ctxt
)
4548 /* Evaluate the val, using the frame of the "longjmp". */
4549 tree fake_retval
= gimple_call_arg (longjmp_call
, 1);
4550 svalue_id fake_retval_sid
= get_rvalue (fake_retval
, ctxt
);
4552 /* Pop any frames until we reach the stack depth of the function where
4553 setjmp was called. */
4554 gcc_assert (get_stack_depth () >= setjmp_stack_depth
);
4555 while (get_stack_depth () > setjmp_stack_depth
)
4557 /* Don't purge unused svalues yet, as we're using fake_retval_sid. */
4558 pop_frame (false, NULL
, ctxt
);
4561 gcc_assert (get_stack_depth () == setjmp_stack_depth
);
4563 /* Assign to LHS of "setjmp" in new_state. */
4564 if (tree lhs
= gimple_call_lhs (setjmp_call
))
4566 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
4567 tree t_zero
= build_int_cst (TREE_TYPE (fake_retval
), 0);
4568 svalue_id zero_sid
= get_or_create_constant_svalue (t_zero
);
4569 tristate eq_zero
= eval_condition (fake_retval_sid
, EQ_EXPR
, zero_sid
);
4570 /* If we have 0, use 1. */
4571 if (eq_zero
.is_true ())
4573 tree t_one
= build_int_cst (TREE_TYPE (fake_retval
), 1);
4574 svalue_id one_sid
= get_or_create_constant_svalue (t_one
);
4575 fake_retval_sid
= one_sid
;
4579 /* Otherwise note that the value is nonzero. */
4580 m_constraints
->add_constraint (fake_retval_sid
, NE_EXPR
, zero_sid
);
4583 region_id lhs_rid
= get_lvalue (lhs
, ctxt
);
4584 set_value (lhs_rid
, fake_retval_sid
, ctxt
);
4587 /* Now that we've assigned the fake_retval, we can purge the unused
4588 svalues, which could detect leaks. */
4589 purge_unused_svalues (NULL
, ctxt
, NULL
);
4593 /* Update this region_model for a phi stmt of the form
4594 LHS = PHI <...RHS...>.
4595 where RHS is for the appropriate edge. */
4598 region_model::handle_phi (const gphi
*phi
,
4599 tree lhs
, tree rhs
, bool is_back_edge
,
4600 region_model_context
*ctxt
)
4602 /* For now, don't bother tracking the .MEM SSA names. */
4603 if (tree var
= SSA_NAME_VAR (lhs
))
4604 if (TREE_CODE (var
) == VAR_DECL
)
4605 if (VAR_DECL_IS_VIRTUAL_OPERAND (var
))
4608 svalue_id rhs_sid
= get_rvalue (rhs
, ctxt
);
4610 if (is_back_edge
&& get_svalue (rhs_sid
)->get_kind () != SK_UNKNOWN
)
4612 /* If we have a back edge, we probably have a loop.
4613 Use an unknown value, to avoid effectively unrolling the
4615 To terminate, we need to avoid generating a series of
4616 models with an unbounded monotonically increasing number of
4617 redundant unknown values; hence we need to purge svalues
4618 before inserting the state into the exploded graph, to
4619 collect unused svalues. */
4620 set_to_new_unknown_value (get_lvalue (lhs
, ctxt
), TREE_TYPE (lhs
), ctxt
);
4623 set_value (get_lvalue (lhs
, ctxt
), rhs_sid
, ctxt
);
4626 ctxt
->on_phi (phi
, rhs
);
4629 /* Implementation of region_model::get_lvalue; the latter adds type-checking.
4631 Get the id of the region for PV within this region_model,
4632 emitting any diagnostics to CTXT. */
4635 region_model::get_lvalue_1 (path_var pv
, region_model_context
*ctxt
)
4637 tree expr
= pv
.m_tree
;
4641 switch (TREE_CODE (expr
))
4645 /* If we see a tree code we we don't know how to handle, rather than
4646 ICE or generate bogus results, create a dummy region, and notify
4647 CTXT so that it can mark the new state as being not properly
4648 modelled. The exploded graph can then stop exploring that path,
4649 since any diagnostics we might issue will have questionable
4652 = add_region (new symbolic_region (m_root_rid
, NULL_TREE
, false));
4653 ctxt
->on_unknown_tree_code (pv
, dump_location_t ());
4660 tree array
= TREE_OPERAND (expr
, 0);
4661 tree index
= TREE_OPERAND (expr
, 1);
4663 // TODO: operands 2 and 3, if present:
4664 gcc_assert (TREE_OPERAND (expr
, 2) == NULL_TREE
);
4665 gcc_assert (TREE_OPERAND (expr
, 3) == NULL_TREE
);
4668 region_id array_rid
= get_lvalue (array
, ctxt
);
4669 svalue_id index_sid
= get_rvalue (index
, ctxt
);
4670 array_region
*array_reg
= get_region
<array_region
> (array_rid
);
4671 return array_reg
->get_element (this, array_rid
, index_sid
, ctxt
);
4677 /* For now, create a view, as if a cast, ignoring the bit positions. */
4678 tree obj
= TREE_OPERAND (expr
, 0);
4679 return get_or_create_view (get_lvalue (obj
, ctxt
), TREE_TYPE (expr
));
4685 tree ptr
= TREE_OPERAND (expr
, 0);
4686 tree offset
= TREE_OPERAND (expr
, 1);
4687 svalue_id ptr_sid
= get_rvalue (ptr
, ctxt
);
4688 svalue_id offset_sid
= get_rvalue (offset
, ctxt
);
4689 return get_or_create_mem_ref (TREE_TYPE (expr
), ptr_sid
,
4695 /* Handle globals. */
4696 if (is_global_var (expr
))
4698 region_id globals_rid
4699 = get_root_region ()->ensure_globals_region (this);
4700 map_region
*globals
= get_region
<map_region
> (globals_rid
);
4701 region_id var_rid
= globals
->get_or_create (this, globals_rid
, expr
,
4712 gcc_assert (TREE_CODE (expr
) == SSA_NAME
4713 || TREE_CODE (expr
) == PARM_DECL
4714 || TREE_CODE (expr
) == VAR_DECL
4715 || TREE_CODE (expr
) == RESULT_DECL
);
4717 int stack_depth
= pv
.m_stack_depth
;
4718 stack_region
*stack
= get_root_region ()->get_stack_region (this);
4720 region_id frame_rid
= stack
->get_frame_rid (stack_depth
);
4721 frame_region
*frame
= get_region
<frame_region
> (frame_rid
);
4723 region_id child_rid
= frame
->get_or_create (this, frame_rid
, expr
,
4731 tree obj
= TREE_OPERAND (expr
, 0);
4732 tree field
= TREE_OPERAND (expr
, 1);
4733 region_id obj_rid
= get_lvalue (obj
, ctxt
);
4734 region_id struct_or_union_rid
4735 = get_or_create_view (obj_rid
, TREE_TYPE (obj
));
4736 return get_field_region (struct_or_union_rid
, field
);
4742 tree cst_type
= TREE_TYPE (expr
);
4743 region_id cst_rid
= add_region_for_type (m_root_rid
, cst_type
);
4744 if (tree value
= DECL_INITIAL (expr
))
4746 svalue_id sid
= get_rvalue (value
, ctxt
);
4747 get_region (cst_rid
)->set_value (*this, cst_rid
, sid
, ctxt
);
4755 tree cst_type
= TREE_TYPE (expr
);
4756 array_region
*cst_region
= new array_region (m_root_rid
, cst_type
);
4757 region_id cst_rid
= add_region (cst_region
);
4758 svalue_id cst_sid
= get_or_create_constant_svalue (expr
);
4759 cst_region
->set_value (*this, cst_rid
, cst_sid
, ctxt
);
4764 case VIEW_CONVERT_EXPR
:
4766 tree obj
= TREE_OPERAND (expr
, 0);
4767 return get_or_create_view (get_lvalue (obj
, ctxt
), TREE_TYPE (expr
));
4773 /* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
4776 assert_compat_types (tree src_type
, tree dst_type
)
4778 if (src_type
&& dst_type
&& !VOID_TYPE_P (dst_type
))
4779 gcc_checking_assert (useless_type_conversion_p (src_type
, dst_type
));
4782 /* Get the id of the region for PV within this region_model,
4783 emitting any diagnostics to CTXT. */
4786 region_model::get_lvalue (path_var pv
, region_model_context
*ctxt
)
4788 if (pv
.m_tree
== NULL_TREE
)
4789 return region_id::null ();
4791 region_id result_rid
= get_lvalue_1 (pv
, ctxt
);
4792 assert_compat_types (get_region (result_rid
)->get_type (),
4793 TREE_TYPE (pv
.m_tree
));
4797 /* Get the region_id for EXPR within this region_model (assuming the most
4798 recent stack frame if it's a local). */
4801 region_model::get_lvalue (tree expr
, region_model_context
*ctxt
)
4803 return get_lvalue (path_var (expr
, get_stack_depth () - 1), ctxt
);
4806 /* Implementation of region_model::get_rvalue; the latter adds type-checking.
4808 Get the value of PV within this region_model,
4809 emitting any diagnostics to CTXT. */
4812 region_model::get_rvalue_1 (path_var pv
, region_model_context
*ctxt
)
4814 gcc_assert (pv
.m_tree
);
4816 switch (TREE_CODE (pv
.m_tree
))
4820 svalue
*unknown_sval
= new unknown_svalue (TREE_TYPE (pv
.m_tree
));
4821 return add_svalue (unknown_sval
);
4828 tree expr
= pv
.m_tree
;
4829 tree op0
= TREE_OPERAND (expr
, 0);
4830 if (TREE_CODE (op0
) == FUNCTION_DECL
)
4831 return get_svalue_for_fndecl (TREE_TYPE (expr
), op0
);
4832 else if (TREE_CODE (op0
) == LABEL_DECL
)
4833 return get_svalue_for_label (TREE_TYPE (expr
), op0
);
4834 region_id expr_rid
= get_lvalue (op0
, ctxt
);
4835 return get_or_create_ptr_svalue (TREE_TYPE (expr
), expr_rid
);
4841 region_id element_rid
= get_lvalue (pv
, ctxt
);
4842 return get_region (element_rid
)->get_value (*this, true, ctxt
);
4848 return get_or_create_constant_svalue (pv
.m_tree
);
4857 region_id var_rid
= get_lvalue (pv
, ctxt
);
4858 return get_region (var_rid
)->get_value (*this, true, ctxt
);
4863 /* Get the value of PV within this region_model,
4864 emitting any diagnostics to CTXT. */
4867 region_model::get_rvalue (path_var pv
, region_model_context
*ctxt
)
4869 if (pv
.m_tree
== NULL_TREE
)
4870 return svalue_id::null ();
4871 svalue_id result_sid
= get_rvalue_1 (pv
, ctxt
);
4873 assert_compat_types (get_svalue (result_sid
)->get_type (),
4874 TREE_TYPE (pv
.m_tree
));
4879 /* Get the value of EXPR within this region_model (assuming the most
4880 recent stack frame if it's a local). */
4883 region_model::get_rvalue (tree expr
, region_model_context
*ctxt
)
4885 return get_rvalue (path_var (expr
, get_stack_depth () - 1), ctxt
);
4888 /* Return an svalue_id for a pointer to RID of type PTR_TYPE, reusing
4889 existing pointer values if one is available. */
4892 region_model::get_or_create_ptr_svalue (tree ptr_type
, region_id rid
)
4894 /* Reuse existing region_svalue, if one of the right type is
4896 /* In theory we could stash a svalue_id in "region", but differing
4897 pointer types muddles things.
4898 For now, just do a linear search through all existing svalues. */
4901 FOR_EACH_VEC_ELT (m_svalues
, i
, svalue
)
4902 if (region_svalue
*ptr_svalue
= svalue
->dyn_cast_region_svalue ())
4903 if (ptr_svalue
->get_pointee () == rid
4904 && ptr_svalue
->get_type () == ptr_type
)
4905 return svalue_id::from_int (i
);
4907 return add_svalue (new region_svalue (ptr_type
, rid
));
4910 /* Return an svalue_id for a constant_svalue for CST_EXPR,
4911 creating the constant_svalue if necessary.
4912 The constant_svalue instances are reused, based on pointer equality
4916 region_model::get_or_create_constant_svalue (tree cst_expr
)
4918 gcc_assert (cst_expr
);
4920 /* Reuse one if it already exists. */
4921 // TODO: maybe store a map, rather than do linear search?
4924 FOR_EACH_VEC_ELT (m_svalues
, i
, svalue
)
4925 if (svalue
->maybe_get_constant () == cst_expr
)
4926 return svalue_id::from_int (i
);
4928 svalue_id cst_sid
= add_svalue (new constant_svalue (cst_expr
));
4932 /* Return an svalue_id for a region_svalue for FNDECL,
4933 creating the function_region if necessary. */
4936 region_model::get_svalue_for_fndecl (tree ptr_type
, tree fndecl
)
4938 gcc_assert (TREE_CODE (fndecl
) == FUNCTION_DECL
);
4939 region_id function_rid
= get_region_for_fndecl (fndecl
);
4940 return get_or_create_ptr_svalue (ptr_type
, function_rid
);
4943 /* Return a region_id for a function_region for FNDECL,
4944 creating it if necessary. */
4947 region_model::get_region_for_fndecl (tree fndecl
)
4949 gcc_assert (TREE_CODE (fndecl
) == FUNCTION_DECL
);
4951 region_id code_rid
= get_root_region ()->ensure_code_region (this);
4952 code_region
*code
= get_root_region ()->get_code_region (this);
4954 return code
->get_or_create (this, code_rid
, fndecl
, TREE_TYPE (fndecl
));
4957 /* Return an svalue_id for a region_svalue for LABEL,
4958 creating the label_region if necessary. */
4961 region_model::get_svalue_for_label (tree ptr_type
, tree label
)
4963 gcc_assert (TREE_CODE (label
) == LABEL_DECL
);
4964 region_id label_rid
= get_region_for_label (label
);
4965 return get_or_create_ptr_svalue (ptr_type
, label_rid
);
4968 /* Return a region_id for a label_region for LABEL,
4969 creating it if necessary. */
4972 region_model::get_region_for_label (tree label
)
4974 gcc_assert (TREE_CODE (label
) == LABEL_DECL
);
4976 tree fndecl
= DECL_CONTEXT (label
);
4977 gcc_assert (fndecl
&& TREE_CODE (fndecl
) == FUNCTION_DECL
);
4979 region_id func_rid
= get_region_for_fndecl (fndecl
);
4980 function_region
*func_reg
= get_region
<function_region
> (func_rid
);
4981 return func_reg
->get_or_create (this, func_rid
, label
, TREE_TYPE (label
));
4984 /* Build a cast of SRC_EXPR to DST_TYPE, or return NULL_TREE.
4986 Adapted from gcc::jit::playback::context::build_cast, which in turn is
4988 - c/c-typeck.c:build_c_cast
4989 - c/c-convert.c: convert
4991 Only some kinds of cast are currently supported here. */
4994 build_cast (tree dst_type
, tree src_expr
)
4996 tree result
= targetm
.convert_to_type (dst_type
, src_expr
);
4999 enum tree_code dst_code
= TREE_CODE (dst_type
);
5004 result
= convert_to_integer (dst_type
, src_expr
);
5008 /* Compare with c_objc_common_truthvalue_conversion and
5009 c_common_truthvalue_conversion. */
5010 /* For now, convert to: (src_expr != 0) */
5011 result
= build2 (NE_EXPR
, dst_type
,
5013 build_int_cst (TREE_TYPE (src_expr
), 0));
5017 result
= convert_to_real (dst_type
, src_expr
);
5021 result
= build1 (NOP_EXPR
, dst_type
, src_expr
);
5028 if (TREE_CODE (result
) != C_MAYBE_CONST_EXPR
)
5029 result
= fold (result
);
5034 /* If the type of SID's underlying value is DST_TYPE, return SID.
5035 Otherwise, attempt to create (or reuse) an svalue representing an access
5036 of SID as a DST_TYPE and return that value's svalue_id. */
5039 region_model::maybe_cast_1 (tree dst_type
, svalue_id sid
)
5041 svalue
*sval
= get_svalue (sid
);
5042 tree src_type
= sval
->get_type ();
5043 if (src_type
== dst_type
)
5046 if (POINTER_TYPE_P (dst_type
)
5047 || POINTER_TYPE_P (src_type
))
5049 /* Pointer to region. */
5050 if (region_svalue
*ptr_sval
= sval
->dyn_cast_region_svalue ())
5051 return get_or_create_ptr_svalue (dst_type
, ptr_sval
->get_pointee ());
5053 /* Unknown pointer? Get or create a new unknown pointer of the
5054 correct type, preserving the equality between the pointers. */
5055 if (sval
->dyn_cast_unknown_svalue ())
5057 equiv_class
&ec
= m_constraints
->get_equiv_class (sid
);
5059 /* Look for an existing pointer of the correct type within the EC. */
5061 svalue_id
*equiv_sid
;
5062 FOR_EACH_VEC_ELT (ec
.m_vars
, i
, equiv_sid
)
5064 svalue
*equiv_val
= get_svalue (*equiv_sid
);
5065 if (equiv_val
->get_type () == dst_type
)
5069 /* Otherwise, create a new unknown pointer of the correct type. */
5070 svalue
*unknown_sval
= new unknown_svalue (dst_type
);
5071 svalue_id new_ptr_sid
= add_svalue (unknown_sval
);
5072 m_constraints
->add_constraint (sid
, EQ_EXPR
, new_ptr_sid
);
5077 /* Attempt to cast constants. */
5078 if (tree src_cst
= sval
->maybe_get_constant ())
5080 tree dst
= build_cast (dst_type
, src_cst
);
5081 gcc_assert (dst
!= NULL_TREE
);
5082 if (CONSTANT_CLASS_P (dst
))
5083 return get_or_create_constant_svalue (dst
);
5086 /* Otherwise, return a new unknown value. */
5087 svalue
*unknown_sval
= new unknown_svalue (dst_type
);
5088 return add_svalue (unknown_sval
);
5091 /* If the type of SID's underlying value is DST_TYPE, return SID.
5092 Otherwise, attempt to create (or reuse) an svalue representing an access
5093 of SID as a DST_TYPE and return that value's svalue_id.
5095 If the result != SID, then call CTXT's on_cast vfunc (if CTXT is non-NULL),
5096 so that sm-state can be propagated from SID to the result. */
5099 region_model::maybe_cast (tree dst_type
, svalue_id sid
,
5100 region_model_context
*ctxt
)
5102 svalue_id result
= maybe_cast_1 (dst_type
, sid
);
5106 /* Notify ctxt about a cast, so any sm-state can be copied. */
5107 ctxt
->on_cast (sid
, result
);
5112 /* Ensure that the region for OBJ_RID has a child region for FIELD;
5113 return the child region's region_id. */
5116 region_model::get_field_region (region_id struct_or_union_rid
, tree field
)
5118 struct_or_union_region
*sou_reg
5119 = get_region
<struct_or_union_region
> (struct_or_union_rid
);
5121 /* Inherit constness from parent type. */
5122 const int qual_mask
= TYPE_QUAL_CONST
;
5123 int sou_quals
= TYPE_QUALS (sou_reg
->get_type ()) & qual_mask
;
5124 tree field_type
= TREE_TYPE (field
);
5125 tree field_type_with_quals
= build_qualified_type (field_type
, sou_quals
);
5127 // TODO: maybe convert to a vfunc?
5128 if (sou_reg
->get_kind () == RK_UNION
)
5131 Get a view of the union as a whole, with the type of the field. */
5133 = get_or_create_view (struct_or_union_rid
, field_type_with_quals
);
5140 = sou_reg
->get_or_create (this, struct_or_union_rid
, field
,
5141 field_type_with_quals
);
5146 /* Get a region_id for referencing PTR_SID, creating a region if need be, and
5147 potentially generating warnings via CTXT. */
5150 region_model::deref_rvalue (svalue_id ptr_sid
, region_model_context
*ctxt
)
5152 gcc_assert (!ptr_sid
.null_p ());
5153 svalue
*ptr_svalue
= get_svalue (ptr_sid
);
5154 gcc_assert (ptr_svalue
);
5156 switch (ptr_svalue
->get_kind ())
5160 region_svalue
*region_sval
= as_a
<region_svalue
*> (ptr_svalue
);
5161 return region_sval
->get_pointee ();
5165 goto create_symbolic_region
;
5170 if (tree ptr
= get_representative_tree (ptr_sid
))
5172 poisoned_svalue
*poisoned_sval
5173 = as_a
<poisoned_svalue
*> (ptr_svalue
);
5174 enum poison_kind pkind
= poisoned_sval
->get_poison_kind ();
5175 ctxt
->warn (new poisoned_value_diagnostic (ptr
, pkind
));
5177 goto create_symbolic_region
;
5182 create_symbolic_region
:
5183 /* We need a symbolic_region to represent this unknown region.
5184 We don't know if it on the heap, stack, or a global,
5185 so use the root region as parent. */
5187 = add_region (new symbolic_region (m_root_rid
, NULL_TREE
, false));
5189 /* We need to write the region back into the pointer,
5190 or we'll get a new, different region each time.
5191 We do this by changing the meaning of ptr_sid, replacing
5192 the unknown value with the ptr to the new region.
5193 We replace the meaning of the ID rather than simply writing
5194 to PTR's lvalue since there could be several places sharing
5195 the same unknown ptr value. */
5197 = new region_svalue (ptr_svalue
->get_type (), new_rid
);
5198 replace_svalue (ptr_sid
, ptr_val
);
5204 goto create_symbolic_region
;
5210 /* Get a region_id for referencing PTR, creating a region if need be, and
5211 potentially generating warnings via CTXT. */
5214 region_model::deref_rvalue (tree ptr
, region_model_context
*ctxt
)
5216 svalue_id ptr_sid
= get_rvalue (ptr
, ctxt
);
5217 return deref_rvalue (ptr_sid
, ctxt
);
5220 /* Set the value of the region given by LHS_RID to the value given
5224 region_model::set_value (region_id lhs_rid
, svalue_id rhs_sid
,
5225 region_model_context
*ctxt
)
5227 gcc_assert (!lhs_rid
.null_p ());
5228 gcc_assert (!rhs_sid
.null_p ());
5229 get_region (lhs_rid
)->set_value (*this, lhs_rid
, rhs_sid
, ctxt
);
5232 /* Determine what is known about the condition "LHS_SID OP RHS_SID" within
5236 region_model::eval_condition (svalue_id lhs_sid
,
5238 svalue_id rhs_sid
) const
5240 svalue
*lhs
= get_svalue (lhs_sid
);
5241 svalue
*rhs
= get_svalue (rhs_sid
);
5243 /* For now, make no attempt to capture constraints on floating-point
5245 if ((lhs
->get_type () && FLOAT_TYPE_P (lhs
->get_type ()))
5246 || (rhs
->get_type () && FLOAT_TYPE_P (rhs
->get_type ())))
5247 return tristate::unknown ();
5249 tristate ts
= eval_condition_without_cm (lhs_sid
, op
, rhs_sid
);
5254 /* Otherwise, try constraints. */
5255 return m_constraints
->eval_condition (lhs_sid
, op
, rhs_sid
);
5258 /* Determine what is known about the condition "LHS_SID OP RHS_SID" within
5259 this model, without resorting to the constraint_manager.
5261 This is exposed so that impl_region_model_context::on_state_leak can
5262 check for equality part-way through region_model::purge_unused_svalues
5263 without risking creating new ECs. */
5266 region_model::eval_condition_without_cm (svalue_id lhs_sid
,
5268 svalue_id rhs_sid
) const
5270 svalue
*lhs
= get_svalue (lhs_sid
);
5271 svalue
*rhs
= get_svalue (rhs_sid
);
5275 /* See what we know based on the values. */
5278 /* For now, make no attempt to capture constraints on floating-point
5280 if ((lhs
->get_type () && FLOAT_TYPE_P (lhs
->get_type ()))
5281 || (rhs
->get_type () && FLOAT_TYPE_P (rhs
->get_type ())))
5282 return tristate::unknown ();
5286 /* If we have the same svalue, then we have equality
5287 (apart from NaN-handling).
5288 TODO: should this definitely be the case for poisoned values? */
5294 return tristate::TS_TRUE
;
5299 return tristate::TS_FALSE
;
5302 /* For other ops, use the logic below. */
5307 /* If we have a pair of region_svalues, compare them. */
5308 if (region_svalue
*lhs_ptr
= lhs
->dyn_cast_region_svalue ())
5309 if (region_svalue
*rhs_ptr
= rhs
->dyn_cast_region_svalue ())
5311 tristate res
= region_svalue::eval_condition (lhs_ptr
, op
, rhs_ptr
);
5312 if (res
.is_known ())
5314 /* Otherwise, only known through constraints. */
5317 /* If we have a pair of constants, compare them. */
5318 if (constant_svalue
*cst_lhs
= lhs
->dyn_cast_constant_svalue ())
5319 if (constant_svalue
*cst_rhs
= rhs
->dyn_cast_constant_svalue ())
5320 return constant_svalue::eval_condition (cst_lhs
, op
, cst_rhs
);
5322 /* Handle comparison of a region_svalue against zero. */
5323 if (region_svalue
*ptr
= lhs
->dyn_cast_region_svalue ())
5324 if (constant_svalue
*cst_rhs
= rhs
->dyn_cast_constant_svalue ())
5325 if (zerop (cst_rhs
->get_constant ()))
5327 /* A region_svalue is a non-NULL pointer, except in certain
5328 special cases (see the comment for region::non_null_p. */
5329 region
*pointee
= get_region (ptr
->get_pointee ());
5330 if (pointee
->non_null_p (*this))
5340 return tristate::TS_FALSE
;
5345 return tristate::TS_TRUE
;
5351 return tristate::TS_UNKNOWN
;
5354 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
5355 If it is consistent with existing constraints, add it, and return true.
5356 Return false if it contradicts existing constraints.
5357 Use CTXT for reporting any diagnostics associated with the accesses. */
5360 region_model::add_constraint (tree lhs
, enum tree_code op
, tree rhs
,
5361 region_model_context
*ctxt
)
5363 /* For now, make no attempt to capture constraints on floating-point
5365 if (FLOAT_TYPE_P (TREE_TYPE (lhs
)) || FLOAT_TYPE_P (TREE_TYPE (rhs
)))
5368 svalue_id lhs_sid
= get_rvalue (lhs
, ctxt
);
5369 svalue_id rhs_sid
= get_rvalue (rhs
, ctxt
);
5371 tristate t_cond
= eval_condition (lhs_sid
, op
, rhs_sid
);
5373 /* If we already have the condition, do nothing. */
5374 if (t_cond
.is_true ())
5377 /* Reject a constraint that would contradict existing knowledge, as
5379 if (t_cond
.is_false ())
5382 /* Store the constraint. */
5383 m_constraints
->add_constraint (lhs_sid
, op
, rhs_sid
);
5385 add_any_constraints_from_ssa_def_stmt (lhs
, op
, rhs
, ctxt
);
5387 /* Notify the context, if any. This exists so that the state machines
5388 in a program_state can be notified about the condition, and so can
5389 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
5390 when synthesizing constraints as above. */
5392 ctxt
->on_condition (lhs
, op
, rhs
);
5397 /* Subroutine of region_model::add_constraint for handling optimized
5398 && and || conditionals.
5400 If we have an SSA_NAME for a boolean compared against 0,
5401 look at anything implied by the def stmt and call add_constraint
5402 for it (which could recurse).
5404 For example, if we have
5408 and add the constraint
5410 then the def stmt for _3 implies that _1 and _2 are both false,
5411 and hence we can add the constraints:
5416 region_model::add_any_constraints_from_ssa_def_stmt (tree lhs
,
5419 region_model_context
*ctxt
)
5421 if (TREE_CODE (lhs
) != SSA_NAME
)
5424 if (rhs
!= boolean_false_node
)
5427 if (op
!= NE_EXPR
&& op
!= EQ_EXPR
)
5431 - "LHS != false" (i.e. LHS is true), or
5432 - "LHS == false" (i.e. LHS is false). */
5433 bool is_true
= op
== NE_EXPR
;
5435 gimple
*def_stmt
= SSA_NAME_DEF_STMT (lhs
);
5436 gassign
*assign
= dyn_cast
<gassign
*> (def_stmt
);
5440 enum tree_code rhs_code
= gimple_assign_rhs_code (assign
);
5450 /* ...and "LHS == (rhs1 & rhs2) i.e. "(rhs1 & rhs2)" is true
5451 then both rhs1 and rhs2 must be true. */
5452 tree rhs1
= gimple_assign_rhs1 (assign
);
5453 tree rhs2
= gimple_assign_rhs2 (assign
);
5454 add_constraint (rhs1
, NE_EXPR
, boolean_false_node
, ctxt
);
5455 add_constraint (rhs2
, NE_EXPR
, boolean_false_node
, ctxt
);
5464 /* ...and "LHS == (rhs1 | rhs2)
5465 i.e. "(rhs1 | rhs2)" is false
5466 then both rhs1 and rhs2 must be false. */
5467 tree rhs1
= gimple_assign_rhs1 (assign
);
5468 tree rhs2
= gimple_assign_rhs2 (assign
);
5469 add_constraint (rhs1
, EQ_EXPR
, boolean_false_node
, ctxt
);
5470 add_constraint (rhs2
, EQ_EXPR
, boolean_false_node
, ctxt
);
5478 /* ...and "LHS == (rhs1 OP rhs2)"
5479 then rhs1 OP rhs2 must have the same logical value as LHS. */
5480 tree rhs1
= gimple_assign_rhs1 (assign
);
5481 tree rhs2
= gimple_assign_rhs2 (assign
);
5484 = invert_tree_comparison (rhs_code
, false /* honor_nans */);
5485 add_constraint (rhs1
, rhs_code
, rhs2
, ctxt
);
5491 /* Determine what is known about the condition "LHS OP RHS" within
5493 Use CTXT for reporting any diagnostics associated with the accesses. */
5496 region_model::eval_condition (tree lhs
,
5499 region_model_context
*ctxt
)
5501 /* For now, make no attempt to model constraints on floating-point
5503 if (FLOAT_TYPE_P (TREE_TYPE (lhs
)) || FLOAT_TYPE_P (TREE_TYPE (rhs
)))
5504 return tristate::unknown ();
5506 return eval_condition (get_rvalue (lhs
, ctxt
), op
, get_rvalue (rhs
, ctxt
));
5509 /* If SID is a constant value, return the underlying tree constant.
5510 Otherwise, return NULL_TREE. */
5513 region_model::maybe_get_constant (svalue_id sid
) const
5515 gcc_assert (!sid
.null_p ());
5516 svalue
*sval
= get_svalue (sid
);
5517 return sval
->maybe_get_constant ();
5520 /* Create a new child region of the heap (creating the heap region if
5522 Return the region_id of the new child region. */
5525 region_model::add_new_malloc_region ()
5528 = get_root_region ()->ensure_heap_region (this);
5529 return add_region (new symbolic_region (heap_rid
, NULL_TREE
, true));
5532 /* Attempt to return a tree that represents SID, or return NULL_TREE.
5533 Find the first region that stores the value (e.g. a local) and
5534 generate a representative tree for it. */
5537 region_model::get_representative_tree (svalue_id sid
) const
5544 FOR_EACH_VEC_ELT (m_regions
, i
, region
)
5545 if (sid
== region
->get_value_direct ())
5547 path_var pv
= get_representative_path_var (region_id::from_int (i
));
5552 return maybe_get_constant (sid
);
5555 /* Attempt to return a path_var that represents the region, or return
5557 For example, a region for a field of a local would be a path_var
5558 wrapping a COMPONENT_REF. */
5561 region_model::get_representative_path_var (region_id rid
) const
5563 region
*reg
= get_region (rid
);
5564 region
*parent_region
= get_region (reg
->get_parent ());
5565 region_id stack_rid
= get_stack_region_id ();
5566 if (!stack_rid
.null_p ())
5567 if (parent_region
->get_parent () == stack_rid
)
5569 frame_region
*parent_frame
= (frame_region
*)parent_region
;
5570 tree t
= parent_frame
->get_tree_for_child_region (rid
);
5571 return path_var (t
, parent_frame
->get_depth ());
5573 if (reg
->get_parent () == get_globals_region_id ())
5575 map_region
*globals
= get_root_region ()->get_globals_region (this);
5577 return path_var (globals
->get_tree_for_child_region (rid
), -1);
5580 /* Handle e.g. fields of a local by recursing. */
5581 region_id parent_rid
= reg
->get_parent ();
5582 region
*parent_reg
= get_region (parent_rid
);
5585 if (parent_reg
->get_kind () == RK_STRUCT
)
5587 map_region
*parent_map_region
= (map_region
*)parent_reg
;
5588 /* This can fail if we have a view, rather than a field. */
5590 = parent_map_region
->get_tree_for_child_region (rid
))
5592 path_var parent_pv
= get_representative_path_var (parent_rid
);
5593 if (parent_pv
.m_tree
&& TREE_CODE (child_key
) == FIELD_DECL
)
5594 return path_var (build3 (COMPONENT_REF
,
5595 TREE_TYPE (child_key
),
5596 parent_pv
.m_tree
, child_key
,
5598 parent_pv
.m_stack_depth
);
5603 return path_var (NULL_TREE
, 0);
5606 /* Locate all regions that directly have value SID and append representative
5607 path_var instances for them into *OUT. */
5610 region_model::get_path_vars_for_svalue (svalue_id sid
, vec
<path_var
> *out
) const
5614 FOR_EACH_VEC_ELT (m_regions
, i
, region
)
5615 if (sid
== region
->get_value_direct ())
5617 path_var pv
= get_representative_path_var (region_id::from_int (i
));
5619 out
->safe_push (pv
);
5623 /* Set DST_RID value to be a new unknown value of type TYPE. */
5626 region_model::set_to_new_unknown_value (region_id dst_rid
, tree type
,
5627 region_model_context
*ctxt
)
5629 gcc_assert (!dst_rid
.null_p ());
5630 svalue_id new_sid
= add_svalue (new unknown_svalue (type
));
5631 set_value (dst_rid
, new_sid
, ctxt
);
5633 // TODO: presumably purge all child regions too (but do this in set_value?)
5638 /* Update this model for any phis in SNODE, assuming we came from
5639 LAST_CFG_SUPEREDGE. */
5642 region_model::update_for_phis (const supernode
*snode
,
5643 const cfg_superedge
*last_cfg_superedge
,
5644 region_model_context
*ctxt
)
5646 gcc_assert (last_cfg_superedge
);
5648 for (gphi_iterator gpi
= const_cast<supernode
*>(snode
)->start_phis ();
5649 !gsi_end_p (gpi
); gsi_next (&gpi
))
5651 gphi
*phi
= gpi
.phi ();
5653 tree src
= last_cfg_superedge
->get_phi_arg (phi
);
5654 tree lhs
= gimple_phi_result (phi
);
5656 /* Update next_state based on phi. */
5657 bool is_back_edge
= last_cfg_superedge
->back_edge_p ();
5658 handle_phi (phi
, lhs
, src
, is_back_edge
, ctxt
);
5662 /* Attempt to update this model for taking EDGE (where the last statement
5663 was LAST_STMT), returning true if the edge can be taken, false
5666 For CFG superedges where LAST_STMT is a conditional or a switch
5667 statement, attempt to add the relevant conditions for EDGE to this
5668 model, returning true if they are feasible, or false if they are
5671 For call superedges, push frame information and store arguments
5674 For return superedges, pop frame information and store return
5675 values into any lhs.
5677 Rejection of call/return superedges happens elsewhere, in
5678 program_point::on_edge (i.e. based on program point, rather
5679 than program state). */
5682 region_model::maybe_update_for_edge (const superedge
&edge
,
5683 const gimple
*last_stmt
,
5684 region_model_context
*ctxt
)
5686 /* Handle frame updates for interprocedural edges. */
5687 switch (edge
.m_kind
)
5692 case SUPEREDGE_CALL
:
5694 const call_superedge
*call_edge
= as_a
<const call_superedge
*> (&edge
);
5695 update_for_call_superedge (*call_edge
, ctxt
);
5699 case SUPEREDGE_RETURN
:
5701 const return_superedge
*return_edge
5702 = as_a
<const return_superedge
*> (&edge
);
5703 update_for_return_superedge (*return_edge
, ctxt
);
5707 case SUPEREDGE_INTRAPROCEDURAL_CALL
:
5709 const callgraph_superedge
*cg_sedge
5710 = as_a
<const callgraph_superedge
*> (&edge
);
5711 update_for_call_summary (*cg_sedge
, ctxt
);
5716 if (last_stmt
== NULL
)
5719 /* Apply any constraints for conditionals/switch statements. */
5721 if (const gcond
*cond_stmt
= dyn_cast
<const gcond
*> (last_stmt
))
5723 const cfg_superedge
*cfg_sedge
= as_a
<const cfg_superedge
*> (&edge
);
5724 return apply_constraints_for_gcond (*cfg_sedge
, cond_stmt
, ctxt
);
5727 if (const gswitch
*switch_stmt
= dyn_cast
<const gswitch
*> (last_stmt
))
5729 const switch_cfg_superedge
*switch_sedge
5730 = as_a
<const switch_cfg_superedge
*> (&edge
);
5731 return apply_constraints_for_gswitch (*switch_sedge
, switch_stmt
, ctxt
);
5737 /* Push a new frame_region on to the stack region.
5738 Populate the frame_region with child regions for the function call's
5739 parameters, using values from the arguments at the callsite in the
5743 region_model::update_for_call_superedge (const call_superedge
&call_edge
,
5744 region_model_context
*ctxt
)
5746 /* Build a vec of argument svalue_id, using the current top
5747 frame for resolving tree expressions. */
5748 const gcall
*call_stmt
= call_edge
.get_call_stmt ();
5749 auto_vec
<svalue_id
> arg_sids (gimple_call_num_args (call_stmt
));
5751 for (unsigned i
= 0; i
< gimple_call_num_args (call_stmt
); i
++)
5753 tree arg
= gimple_call_arg (call_stmt
, i
);
5754 arg_sids
.quick_push (get_rvalue (arg
, ctxt
));
5757 push_frame (call_edge
.get_callee_function (), &arg_sids
, ctxt
);
5760 /* Pop the top-most frame_region from the stack, and store the svalue
5761 for any returned value into the region for the lvalue of the LHS of
5762 the call (if any). */
5765 region_model::update_for_return_superedge (const return_superedge
&return_edge
,
5766 region_model_context
*ctxt
)
5769 svalue_id result_sid
= pop_frame (true, &stats
, ctxt
);
5770 // TODO: do something with the stats?
5772 if (result_sid
.null_p ())
5775 /* Set the result of the call, within the caller frame. */
5776 const gcall
*call_stmt
= return_edge
.get_call_stmt ();
5777 tree lhs
= gimple_call_lhs (call_stmt
);
5779 set_value (get_lvalue (lhs
, ctxt
), result_sid
, ctxt
);
5782 /* This could be a leak; try purging again, but this time,
5783 don't special-case the result_sid. */
5785 purge_unused_svalues (&stats
, ctxt
);
5789 /* Update this region_model with a summary of the effect of calling
5790 and returning from CG_SEDGE.
5792 TODO: Currently this is extremely simplistic: we merely set the
5793 return value to "unknown". A proper implementation would e.g. update
5794 sm-state, and presumably be reworked to support multiple outcomes. */
5797 region_model::update_for_call_summary (const callgraph_superedge
&cg_sedge
,
5798 region_model_context
*ctxt
)
5800 /* For now, set any return value to "unknown". */
5801 const gcall
*call_stmt
= cg_sedge
.get_call_stmt ();
5802 tree lhs
= gimple_call_lhs (call_stmt
);
5804 set_to_new_unknown_value (get_lvalue (lhs
, ctxt
), TREE_TYPE (lhs
), ctxt
);
5806 // TODO: actually implement some kind of summary here
5809 /* Given a true or false edge guarded by conditional statement COND_STMT,
5810 determine appropriate constraints for the edge to be taken.
5812 If they are feasible, add the constraints and return true.
5814 Return false if the constraints contradict existing knowledge
5815 (and so the edge should not be taken). */
5818 region_model::apply_constraints_for_gcond (const cfg_superedge
&sedge
,
5819 const gcond
*cond_stmt
,
5820 region_model_context
*ctxt
)
5822 ::edge cfg_edge
= sedge
.get_cfg_edge ();
5823 gcc_assert (cfg_edge
!= NULL
);
5824 gcc_assert (cfg_edge
->flags
& (EDGE_TRUE_VALUE
| EDGE_FALSE_VALUE
));
5826 enum tree_code op
= gimple_cond_code (cond_stmt
);
5827 tree lhs
= gimple_cond_lhs (cond_stmt
);
5828 tree rhs
= gimple_cond_rhs (cond_stmt
);
5829 if (cfg_edge
->flags
& EDGE_FALSE_VALUE
)
5830 op
= invert_tree_comparison (op
, false /* honor_nans */);
5831 return add_constraint (lhs
, op
, rhs
, ctxt
);
5834 /* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
5835 for the edge to be taken.
5837 If they are feasible, add the constraints and return true.
5839 Return false if the constraints contradict existing knowledge
5840 (and so the edge should not be taken). */
5843 region_model::apply_constraints_for_gswitch (const switch_cfg_superedge
&edge
,
5844 const gswitch
*switch_stmt
,
5845 region_model_context
*ctxt
)
5847 tree index
= gimple_switch_index (switch_stmt
);
5848 tree case_label
= edge
.get_case_label ();
5849 gcc_assert (TREE_CODE (case_label
) == CASE_LABEL_EXPR
);
5850 tree lower_bound
= CASE_LOW (case_label
);
5851 tree upper_bound
= CASE_HIGH (case_label
);
5857 if (!add_constraint (index
, GE_EXPR
, lower_bound
, ctxt
))
5859 return add_constraint (index
, LE_EXPR
, upper_bound
, ctxt
);
5863 return add_constraint (index
, EQ_EXPR
, lower_bound
, ctxt
);
5867 /* The default case.
5868 Add exclusions based on the other cases. */
5869 for (unsigned other_idx
= 1;
5870 other_idx
< gimple_switch_num_labels (switch_stmt
);
5873 tree other_label
= gimple_switch_label (switch_stmt
,
5875 tree other_lower_bound
= CASE_LOW (other_label
);
5876 tree other_upper_bound
= CASE_HIGH (other_label
);
5877 gcc_assert (other_lower_bound
);
5878 if (other_upper_bound
)
5880 /* Exclude this range-valued case.
5881 For now, we just exclude the boundary values.
5882 TODO: exclude the values within the region. */
5883 if (!add_constraint (index
, NE_EXPR
, other_lower_bound
, ctxt
))
5885 if (!add_constraint (index
, NE_EXPR
, other_upper_bound
, ctxt
))
5889 /* Exclude this single-valued case. */
5890 if (!add_constraint (index
, NE_EXPR
, other_lower_bound
, ctxt
))
5897 /* Get the root_region within this model (guaranteed to be non-null). */
5900 region_model::get_root_region () const
5902 return get_region
<root_region
> (m_root_rid
);
5905 /* Get the region_id of this model's stack region (if any). */
5908 region_model::get_stack_region_id () const
5910 return get_root_region ()->get_stack_region_id ();
5913 /* Create a new frame_region for a call to FUN and push it onto
5916 If ARG_SIDS is non-NULL, use it to populate the parameters
5918 Otherwise, populate them with unknown values.
5920 Return the region_id of the new frame_region. */
5923 region_model::push_frame (function
*fun
, vec
<svalue_id
> *arg_sids
,
5924 region_model_context
*ctxt
)
5926 return get_root_region ()->push_frame (this, fun
, arg_sids
, ctxt
);
5929 /* Get the region_id of the top-most frame in this region_model's stack,
5933 region_model::get_current_frame_id () const
5935 return get_root_region ()->get_current_frame_id (*this);
5938 /* Get the function of the top-most frame in this region_model's stack.
5939 There must be such a frame. */
5942 region_model::get_current_function () const
5944 region_id frame_id
= get_current_frame_id ();
5945 frame_region
*frame
= get_region
<frame_region
> (frame_id
);
5946 return frame
->get_function ();
5949 /* Pop the topmost frame_region from this region_model's stack;
5950 see the comment for stack_region::pop_frame. */
5953 region_model::pop_frame (bool purge
, purge_stats
*out
,
5954 region_model_context
*ctxt
)
5956 return get_root_region ()->pop_frame (this, purge
, out
, ctxt
);
5959 /* Get the number of frames in this region_model's stack. */
5962 region_model::get_stack_depth () const
5964 stack_region
*stack
= get_root_region ()->get_stack_region (this);
5966 return stack
->get_num_frames ();
5971 /* Get the function * at DEPTH within the call stack. */
5974 region_model::get_function_at_depth (unsigned depth
) const
5976 stack_region
*stack
= get_root_region ()->get_stack_region (this);
5978 region_id frame_rid
= stack
->get_frame_rid (depth
);
5979 frame_region
*frame
= get_region
<frame_region
> (frame_rid
);
5980 return frame
->get_function ();
5983 /* Get the region_id of this model's globals region (if any). */
5986 region_model::get_globals_region_id () const
5988 return get_root_region ()->get_globals_region_id ();
5991 /* Add SVAL to this model, taking ownership, and returning its new
5995 region_model::add_svalue (svalue
*sval
)
5998 m_svalues
.safe_push (sval
);
5999 return svalue_id::from_int (m_svalues
.length () - 1);
6002 /* Change the meaning of SID to be NEW_SVAL
6003 (e.g. when deferencing an unknown pointer, the pointer
6004 becomes a pointer to a symbolic region, so that all users
6005 of the former unknown pointer are now effectively pointing
6006 at the same region). */
6009 region_model::replace_svalue (svalue_id sid
, svalue
*new_sval
)
6011 gcc_assert (!sid
.null_p ());
6012 int idx
= sid
.as_int ();
6014 gcc_assert (m_svalues
[idx
]);
6015 gcc_assert (m_svalues
[idx
]->get_type () == new_sval
->get_type ());
6016 delete m_svalues
[idx
];
6018 m_svalues
[idx
] = new_sval
;
6021 /* Add region R to this model, taking ownership, and returning its new
6025 region_model::add_region (region
*r
)
6028 m_regions
.safe_push (r
);
6029 return region_id::from_int (m_regions
.length () - 1);
6032 /* Return the svalue with id SVAL_ID, or NULL for a null id. */
6035 region_model::get_svalue (svalue_id sval_id
) const
6037 if (sval_id
.null_p ())
6039 return m_svalues
[sval_id
.as_int ()];
6042 /* Return the region with id RID, or NULL for a null id. */
6045 region_model::get_region (region_id rid
) const
6049 return m_regions
[rid
.as_int ()];
6052 /* Make a region of an appropriate subclass for TYPE,
6053 with parent PARENT_RID. */
6056 make_region_for_type (region_id parent_rid
, tree type
)
6058 gcc_assert (TYPE_P (type
));
6060 if (INTEGRAL_TYPE_P (type
)
6061 || SCALAR_FLOAT_TYPE_P (type
)
6062 || POINTER_TYPE_P (type
)
6063 || TREE_CODE (type
) == COMPLEX_TYPE
6064 || TREE_CODE (type
) == VECTOR_TYPE
)
6065 return new primitive_region (parent_rid
, type
);
6067 if (TREE_CODE (type
) == RECORD_TYPE
)
6068 return new struct_region (parent_rid
, type
);
6070 if (TREE_CODE (type
) == ARRAY_TYPE
)
6071 return new array_region (parent_rid
, type
);
6073 if (TREE_CODE (type
) == UNION_TYPE
)
6074 return new union_region (parent_rid
, type
);
6076 if (FUNC_OR_METHOD_TYPE_P (type
))
6077 return new function_region (parent_rid
, type
);
6079 /* If we have a void *, make a new symbolic region. */
6080 if (VOID_TYPE_P (type
))
6081 return new symbolic_region (parent_rid
, type
, false);
6086 /* Add a region with type TYPE and parent PARENT_RID. */
6089 region_model::add_region_for_type (region_id parent_rid
, tree type
)
6091 gcc_assert (TYPE_P (type
));
6093 region
*new_region
= make_region_for_type (parent_rid
, type
);
6094 return add_region (new_region
);
6097 /* Helper class for region_model::purge_unused_svalues. */
6099 class restrict_to_used_svalues
: public purge_criteria
6102 restrict_to_used_svalues (const auto_sbitmap
&used
) : m_used (used
) {}
6104 bool should_purge_p (svalue_id sid
) const FINAL OVERRIDE
6106 gcc_assert (!sid
.null_p ());
6107 return !bitmap_bit_p (m_used
, sid
.as_int ());
6111 const auto_sbitmap
&m_used
;
6114 /* Remove unused svalues from this model, accumulating stats into STATS.
6115 Unused svalues are deleted. Doing so could reorder the svalues, and
6116 thus change the meaning of svalue_ids.
6118 If CTXT is non-NULL, then it is notified about svalue_id remappings,
6119 and about svalue_ids that are about to be deleted. This allows e.g.
6120 for warning about resource leaks, for the case where the svalue
6121 represents a resource handle in the user code (e.g. a FILE * or a malloc
6124 Amongst other things, removing unused svalues is important for ensuring
6125 that the analysis of loops terminates. Otherwise, we could generate a
6126 succession of models with unreferenced "unknown" values, where the
6127 number of redundant unknown values could grow without bounds, and each
6128 such model would be treated as distinct.
6130 If KNOWN_USED is non-NULL, treat *KNOWN_USED as used (this is for
6131 handling values being returned from functions as their frame is popped,
6132 since otherwise we'd have to simultaneously determine both the rvalue
6133 of the return expr in the callee frame and the lvalue for the gcall's
6134 assignment in the caller frame, and it seems cleaner to express all
6135 lvalue and rvalue lookups implicitly relative to a "current" frame). */
6138 region_model::purge_unused_svalues (purge_stats
*stats
,
6139 region_model_context
*ctxt
,
6140 svalue_id
*known_used_sid
)
6142 // TODO: might want to avoid a vfunc call just to do logging here:
6143 logger
*logger
= ctxt
? ctxt
->get_logger () : NULL
;
6147 auto_sbitmap
used (m_svalues
.length ());
6148 bitmap_clear (used
);
6151 if (!known_used_sid
->null_p ())
6152 bitmap_set_bit (used
, known_used_sid
->as_int ());
6154 /* Walk the regions, marking sids that are used. */
6157 FOR_EACH_VEC_ELT (m_regions
, i
, r
)
6159 svalue_id sid
= r
->get_value_direct ();
6161 bitmap_set_bit (used
, sid
.as_int ());
6164 /* Now purge any constraints involving svalues we don't care about. */
6165 restrict_to_used_svalues
criterion (used
);
6166 m_constraints
->purge (criterion
, stats
);
6168 /* Mark any sids that are in constraints that survived. */
6171 FOR_EACH_VEC_ELT (m_constraints
->m_equiv_classes
, i
, ec
)
6175 FOR_EACH_VEC_ELT (ec
->m_vars
, j
, sid
)
6177 gcc_assert (!sid
->null_p ());
6178 bitmap_set_bit (used
, sid
->as_int ());
6183 /* Build a mapping from old-sid to new-sid so that we can preserve
6184 order of the used IDs and move all redundant ones to the end.
6185 Iterate though svalue IDs, adding used ones to the front of
6186 the new list, and unused ones to the back. */
6187 svalue_id_map
map (m_svalues
.length ());
6188 int next_used_new_sid
= 0;
6189 int after_next_unused_new_sid
= m_svalues
.length ();
6190 for (unsigned i
= 0; i
< m_svalues
.length (); i
++)
6192 svalue_id
src (svalue_id::from_int (i
));
6193 if (bitmap_bit_p (used
, i
))
6196 logger
->log ("sv%i is used", i
);
6197 map
.put (src
, svalue_id::from_int (next_used_new_sid
++));
6202 logger
->log ("sv%i is unused", i
);
6203 map
.put (src
, svalue_id::from_int (--after_next_unused_new_sid
));
6206 /* The two insertion points should have met. */
6207 gcc_assert (next_used_new_sid
== after_next_unused_new_sid
);
6209 /* Now walk the regions and the constraints, remapping sids,
6210 so that all the redundant svalues are at the end. */
6211 remap_svalue_ids (map
);
6215 logger
->start_log_line ();
6216 logger
->log_partial ("map: ");
6217 map
.dump_to_pp (logger
->get_printer ());
6218 logger
->end_log_line ();
6221 /* Notify any client about the remapping and pending deletion.
6222 Potentially this could trigger leak warnings. */
6225 ctxt
->remap_svalue_ids (map
);
6226 int num_client_items_purged
6227 = ctxt
->on_svalue_purge (svalue_id::from_int (next_used_new_sid
), map
);
6229 stats
->m_num_client_items
+= num_client_items_purged
;
6232 /* Drop the redundant svalues from the end of the vector. */
6233 while ((signed)m_svalues
.length () > next_used_new_sid
)
6237 svalue_id victim
= svalue_id::from_int (m_svalues
.length () - 1);
6238 logger
->log ("deleting sv%i (was sv%i)",
6240 map
.get_src_for_dst (victim
).as_int ());
6242 delete m_svalues
.pop ();
6244 stats
->m_num_svalues
++;
6248 map
.update (known_used_sid
);
6253 /* Renumber the svalues within this model according to MAP. */
6256 region_model::remap_svalue_ids (const svalue_id_map
&map
)
6258 /* Update IDs within regions. */
6261 FOR_EACH_VEC_ELT (m_regions
, i
, r
)
6262 r
->remap_svalue_ids (map
);
6264 /* Update IDs within ECs within constraints. */
6265 m_constraints
->remap_svalue_ids (map
);
6267 /* Build a reordered svalues vector. */
6268 auto_vec
<svalue
*> new_svalues (m_svalues
.length ());
6269 for (unsigned i
= 0; i
< m_svalues
.length (); i
++)
6271 svalue_id
dst (svalue_id::from_int (i
));
6272 svalue_id src
= map
.get_src_for_dst (dst
);
6273 new_svalues
.quick_push (get_svalue (src
));
6276 /* Copy over the reordered vec to m_svalues. */
6277 m_svalues
.truncate (0);
6278 gcc_assert (m_svalues
.space (new_svalues
.length ()));
6280 FOR_EACH_VEC_ELT (new_svalues
, i
, sval
)
6281 m_svalues
.quick_push (sval
);
6284 /* Renumber the regions within this model according to MAP. */
6287 region_model::remap_region_ids (const region_id_map
&map
)
6289 /* Update IDs within regions. */
6292 FOR_EACH_VEC_ELT (m_regions
, i
, r
)
6293 r
->remap_region_ids (map
);
6295 /* Update IDs within svalues. */
6297 FOR_EACH_VEC_ELT (m_svalues
, i
, sval
)
6298 sval
->remap_region_ids (map
);
6300 /* Build a reordered regions vector. */
6301 auto_vec
<region
*> new_regions (m_regions
.length ());
6302 for (unsigned i
= 0; i
< m_regions
.length (); i
++)
6304 region_id
dst (region_id::from_int (i
));
6305 region_id src
= map
.get_src_for_dst (dst
);
6306 new_regions
.quick_push (get_region (src
));
6309 /* Copy over the reordered vec to m_regions. */
6310 m_regions
.truncate (0);
6311 gcc_assert (m_regions
.space (new_regions
.length ()));
6312 FOR_EACH_VEC_ELT (new_regions
, i
, r
)
6313 m_regions
.quick_push (r
);
6316 /* Delete all regions within SET_TO_PURGE, remapping region IDs for
6317 other regions. It's required that there are no uses of the
6318 regions within the set (or the region IDs will become invalid).
6320 Accumulate stats to STATS. */
6323 region_model::purge_regions (const region_id_set
&set_to_purge
,
6327 /* Build a mapping from old-rid to new-rid so that we can preserve
6328 order of the used IDs and move all redundant ones to the end.
6329 Iterate though region IDs, adding used ones to the front of
6330 the new list, and unused ones to the back. */
6331 region_id_map
map (m_regions
.length ());
6332 int next_used_new_rid
= 0;
6333 int after_next_unused_new_rid
= m_regions
.length ();
6334 for (unsigned i
= 0; i
< m_regions
.length (); i
++)
6336 region_id
src (region_id::from_int (i
));
6337 if (set_to_purge
.region_p (src
))
6338 map
.put (src
, region_id::from_int (--after_next_unused_new_rid
));
6340 map
.put (src
, region_id::from_int (next_used_new_rid
++));
6342 /* The two insertion points should have met. */
6343 gcc_assert (next_used_new_rid
== after_next_unused_new_rid
);
6345 /* Now walk the regions and svalues, remapping rids,
6346 so that all the redundant regions are at the end. */
6347 remap_region_ids (map
);
6349 /* Drop the redundant regions from the end of the vector. */
6350 while ((signed)m_regions
.length () > next_used_new_rid
)
6352 delete m_regions
.pop ();
6354 stats
->m_num_regions
++;
6358 /* Populate *OUT with RID and all of its descendents.
6359 If EXCLUDE_RID is non-null, then don't add it or its descendents. */
6362 region_model::get_descendents (region_id rid
, region_id_set
*out
,
6363 region_id exclude_rid
) const
6365 out
->add_region (rid
);
6367 bool changed
= true;
6373 FOR_EACH_VEC_ELT (m_regions
, i
, r
)
6375 region_id iter_rid
= region_id::from_int (i
);
6376 if (iter_rid
== exclude_rid
)
6378 if (!out
->region_p (iter_rid
))
6380 region_id parent_rid
= r
->get_parent ();
6381 if (!parent_rid
.null_p ())
6382 if (out
->region_p (parent_rid
))
6384 out
->add_region (iter_rid
);
6392 /* Delete RID and all descendent regions.
6393 Find any pointers to such regions; convert convert them to
6394 poisoned values of kind PKIND.
6395 Accumulate stats on purged entities into STATS. */
6398 region_model::delete_region_and_descendents (region_id rid
,
6399 enum poison_kind pkind
,
6403 /* Find all child and descendent regions. */
6404 region_id_set
descendents (this);
6405 get_descendents (rid
, &descendents
, region_id::null ());
6407 /* Find any pointers to such regions; convert to poisoned. */
6408 poison_any_pointers_to_bad_regions (descendents
, pkind
);
6410 /* Delete all such regions. */
6411 purge_regions (descendents
, stats
, logger
);
6414 /* Find any pointers to regions within BAD_REGIONS; convert them to
6415 poisoned values of kind PKIND. */
6418 region_model::poison_any_pointers_to_bad_regions (const region_id_set
&
6420 enum poison_kind pkind
)
6424 FOR_EACH_VEC_ELT (m_svalues
, i
, sval
)
6425 if (region_svalue
*ptr_sval
= sval
->dyn_cast_region_svalue ())
6427 region_id ptr_dst
= ptr_sval
->get_pointee ();
6428 if (!ptr_dst
.null_p ())
6429 if (bad_regions
.region_p (ptr_dst
))
6431 (svalue_id::from_int (i
),
6432 new poisoned_svalue (pkind
, sval
->get_type ()));
6436 /* Attempt to merge THIS with OTHER_MODEL, writing the result
6437 to OUT_MODEL, and populating SID_MAPPING. */
6440 region_model::can_merge_with_p (const region_model
&other_model
,
6441 region_model
*out_model
,
6442 svalue_id_merger_mapping
*sid_mapping
) const
6444 gcc_assert (m_root_rid
== other_model
.m_root_rid
);
6445 gcc_assert (m_root_rid
.as_int () == 0);
6446 gcc_assert (sid_mapping
);
6447 gcc_assert (out_model
);
6449 model_merger
merger (this, &other_model
, out_model
, sid_mapping
);
6451 if (!root_region::can_merge_p (get_root_region (),
6452 other_model
.get_root_region (),
6453 out_model
->get_root_region (),
6457 /* Merge constraints. */
6458 constraint_manager::merge (*m_constraints
,
6459 *other_model
.m_constraints
,
6460 out_model
->m_constraints
,
6463 out_model
->validate ();
6465 /* The merged model should be simpler (or as simple) as the inputs. */
6467 gcc_assert (out_model
->m_svalues
.length () <= m_svalues
.length ());
6468 gcc_assert (out_model
->m_svalues
.length ()
6469 <= other_model
.m_svalues
.length ());
6471 gcc_assert (out_model
->m_regions
.length () <= m_regions
.length ());
6472 gcc_assert (out_model
->m_regions
.length ()
6473 <= other_model
.m_regions
.length ());
6474 // TODO: same, for constraints
6479 /* As above, but supply a placeholder svalue_id_merger_mapping
6480 instance to be used and receive output. For use in selftests. */
6483 region_model::can_merge_with_p (const region_model
&other_model
,
6484 region_model
*out_model
) const
6486 svalue_id_merger_mapping
sid_mapping (*this, other_model
);
6487 return can_merge_with_p (other_model
, out_model
, &sid_mapping
);
6490 /* For debugging purposes: look for a region within this region_model
6491 for a decl named NAME (or an SSA_NAME for such a decl),
6492 returning its value, or svalue_id::null if none are found. */
6495 region_model::get_value_by_name (const char *name
) const
6498 tree identifier
= get_identifier (name
);
6499 return get_root_region ()->get_value_by_name (identifier
, *this);
6502 /* Generate or reuse an svalue_id within this model for an index
6503 into an array of type PTR_TYPE, based on OFFSET_SID. */
6506 region_model::convert_byte_offset_to_array_index (tree ptr_type
,
6507 svalue_id offset_sid
)
6509 gcc_assert (POINTER_TYPE_P (ptr_type
));
6511 if (tree offset_cst
= maybe_get_constant (offset_sid
))
6513 tree elem_type
= TREE_TYPE (ptr_type
);
6515 /* Arithmetic on void-pointers is a GNU C extension, treating the size
6517 https://gcc.gnu.org/onlinedocs/gcc/Pointer-Arith.html
6519 Returning early for this case avoids a diagnostic from within the
6520 call to size_in_bytes. */
6521 if (TREE_CODE (elem_type
) == VOID_TYPE
)
6524 /* This might not be a constant. */
6525 tree byte_size
= size_in_bytes (elem_type
);
6527 /* Try to get a constant by dividing, ensuring that we're in a
6528 signed representation first. */
6530 = fold_binary (TRUNC_DIV_EXPR
, ssizetype
,
6531 fold_convert (ssizetype
, offset_cst
),
6532 fold_convert (ssizetype
, byte_size
));
6533 if (index
&& TREE_CODE (index
) == INTEGER_CST
)
6534 return get_or_create_constant_svalue (index
);
6537 /* Otherwise, we don't know the array index; generate a new unknown value.
6538 TODO: do we need to capture the relationship between two unknown
6539 values (the offset and the index)? */
6540 return add_svalue (new unknown_svalue (integer_type_node
));
6543 /* Get a region of type TYPE for PTR_SID[OFFSET_SID/sizeof (*PTR_SID)].
6545 If OFFSET_SID is known to be zero, then dereference PTR_SID.
6546 Otherwise, impose a view of "typeof(*PTR_SID)[]" on *PTR_SID,
6547 and then get a view of type TYPE on the relevant array element. */
6550 region_model::get_or_create_mem_ref (tree type
,
6552 svalue_id offset_sid
,
6553 region_model_context
*ctxt
)
6555 svalue
*ptr_sval
= get_svalue (ptr_sid
);
6556 tree ptr_type
= ptr_sval
->get_type ();
6557 gcc_assert (ptr_type
);
6559 region_id raw_rid
= deref_rvalue (ptr_sid
, ctxt
);
6561 svalue
*offset_sval
= get_svalue (offset_sid
);
6562 tree offset_type
= offset_sval
->get_type ();
6563 gcc_assert (offset_type
);
6565 if (constant_svalue
*cst_sval
= offset_sval
->dyn_cast_constant_svalue ())
6567 if (zerop (cst_sval
->get_constant ()))
6569 /* Handle the zero offset case. */
6570 return get_or_create_view (raw_rid
, type
);
6573 /* If we're already within an array of the correct type,
6574 then we want to reuse that array, rather than starting
6576 If so, figure out our raw_rid's offset from its parent,
6577 if we can, and use that to offset OFFSET_SID, and create
6578 the element within the parent region. */
6579 region
*raw_reg
= get_region (raw_rid
);
6580 region_id parent_rid
= raw_reg
->get_parent ();
6581 tree parent_type
= get_region (parent_rid
)->get_type ();
6583 && TREE_CODE (parent_type
) == ARRAY_TYPE
)
6585 // TODO: check we have the correct parent type
6586 array_region
*parent_array
= get_region
<array_region
> (parent_rid
);
6587 array_region::key_t key_for_raw_rid
;
6588 if (parent_array
->get_key_for_child_region (raw_rid
,
6591 /* Convert from offset to index. */
6593 = convert_byte_offset_to_array_index (ptr_type
, offset_sid
);
6595 = get_svalue (index_sid
)->maybe_get_constant ())
6597 array_region::key_t index_offset
6598 = array_region::key_from_constant (index_cst
);
6599 array_region::key_t index_rel_to_parent
6600 = key_for_raw_rid
+ index_offset
;
6601 tree index_rel_to_parent_cst
6602 = wide_int_to_tree (integer_type_node
,
6603 index_rel_to_parent
);
6605 = get_or_create_constant_svalue (index_rel_to_parent_cst
);
6607 /* Carry on, using the parent region and adjusted index. */
6608 region_id element_rid
6609 = parent_array
->get_element (this, raw_rid
, index_sid
,
6611 return get_or_create_view (element_rid
, type
);
6617 tree array_type
= build_array_type (TREE_TYPE (ptr_type
),
6619 region_id array_view_rid
= get_or_create_view (raw_rid
, array_type
);
6620 array_region
*array_reg
= get_region
<array_region
> (array_view_rid
);
6623 = convert_byte_offset_to_array_index (ptr_type
, offset_sid
);
6625 region_id element_rid
6626 = array_reg
->get_element (this, array_view_rid
, index_sid
, ctxt
);
6628 return get_or_create_view (element_rid
, type
);
6631 /* Get a region of type TYPE for PTR_SID + OFFSET_SID.
6633 If OFFSET_SID is known to be zero, then dereference PTR_SID.
6634 Otherwise, impose a view of "typeof(*PTR_SID)[]" on *PTR_SID,
6635 and then get a view of type TYPE on the relevant array element. */
6638 region_model::get_or_create_pointer_plus_expr (tree type
,
6640 svalue_id offset_in_bytes_sid
,
6641 region_model_context
*ctxt
)
6643 return get_or_create_mem_ref (type
,
6645 offset_in_bytes_sid
,
6649 /* Get or create a view of type TYPE of the region with id RAW_ID.
6650 Return the id of the view (or RAW_ID if it of the same type). */
6653 region_model::get_or_create_view (region_id raw_rid
, tree type
)
6655 region
*raw_region
= get_region (raw_rid
);
6657 gcc_assert (TYPE_P (type
));
6658 if (type
!= raw_region
->get_type ())
6660 /* If the region already has a view of the requested type,
6662 region_id existing_view_rid
= raw_region
->get_view (type
, this);
6663 if (!existing_view_rid
.null_p ())
6664 return existing_view_rid
;
6666 /* Otherwise, make one (adding it to the region_model and
6667 to the viewed region). */
6668 region_id view_rid
= add_region_for_type (raw_rid
, type
);
6669 raw_region
->add_view (view_rid
, this);
6670 // TODO: something to signify that this is a "view"
6677 /* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
6681 region_model::get_fndecl_for_call (const gcall
*call
,
6682 region_model_context
*ctxt
)
6684 tree fn_ptr
= gimple_call_fn (call
);
6685 if (fn_ptr
== NULL_TREE
)
6687 svalue_id fn_ptr_sid
= get_rvalue (fn_ptr
, ctxt
);
6688 svalue
*fn_ptr_sval
= get_svalue (fn_ptr_sid
);
6689 if (region_svalue
*fn_ptr_ptr
= fn_ptr_sval
->dyn_cast_region_svalue ())
6691 region_id fn_rid
= fn_ptr_ptr
->get_pointee ();
6692 code_region
*code
= get_root_region ()->get_code_region (this);
6695 tree fn_decl
= code
->get_tree_for_child_region (fn_rid
);
6696 const cgraph_node
*ultimate_node
6697 = cgraph_node::get (fn_decl
)->ultimate_alias_target ();
6699 return ultimate_node
->decl
;
6706 /* struct model_merger. */
6708 /* Dump a multiline representation of this merger to PP. */
6711 model_merger::dump_to_pp (pretty_printer
*pp
) const
6713 pp_string (pp
, "model A:");
6715 m_model_a
->dump_to_pp (pp
, false);
6718 pp_string (pp
, "model B:");
6720 m_model_b
->dump_to_pp (pp
, false);
6723 pp_string (pp
, "merged model:");
6725 m_merged_model
->dump_to_pp (pp
, false);
6728 pp_string (pp
, "region map: model A to merged model:");
6730 m_map_regions_from_a_to_m
.dump_to_pp (pp
);
6733 pp_string (pp
, "region map: model B to merged model:");
6735 m_map_regions_from_b_to_m
.dump_to_pp (pp
);
6738 m_sid_mapping
->dump_to_pp (pp
);
6741 /* Dump a multiline representation of this merger to FILE. */
6744 model_merger::dump (FILE *fp
) const
6747 pp_format_decoder (&pp
) = default_tree_printer
;
6748 pp_show_color (&pp
) = pp_show_color (global_dc
->printer
);
6749 pp
.buffer
->stream
= fp
;
6754 /* Dump a multiline representation of this merger to stderr. */
6757 model_merger::dump () const
6762 /* Attempt to merge the svalues of SID_A and SID_B (from their
6763 respective models), writing the id of the resulting svalue
6765 Return true if the merger is possible, false otherwise. */
6768 model_merger::can_merge_values_p (svalue_id sid_a
,
6770 svalue_id
*merged_sid
)
6772 gcc_assert (merged_sid
);
6773 svalue
*sval_a
= m_model_a
->get_svalue (sid_a
);
6774 svalue
*sval_b
= m_model_b
->get_svalue (sid_b
);
6776 /* If both are NULL, then the "values" are trivially mergeable. */
6777 if (!sval_a
&& !sval_b
)
6780 /* If one is NULL and the other non-NULL, then the "values"
6781 are not mergeable. */
6782 if (!(sval_a
&& sval_b
))
6785 /* Have they both already been mapped to the same new svalue_id?
6787 svalue_id sid_a_in_m
6788 = m_sid_mapping
->m_map_from_a_to_m
.get_dst_for_src (sid_a
);
6789 svalue_id sid_b_in_m
6790 = m_sid_mapping
->m_map_from_b_to_m
.get_dst_for_src (sid_b
);
6791 if (!sid_a_in_m
.null_p ()
6792 && !sid_b_in_m
.null_p ()
6793 && sid_a_in_m
== sid_b_in_m
)
6795 *merged_sid
= sid_a_in_m
;
6799 tree type
= sval_a
->get_type ();
6800 if (type
== NULL_TREE
)
6801 type
= sval_b
->get_type ();
6803 /* If the values have different kinds, or are both unknown,
6804 then merge as "unknown". */
6805 if (sval_a
->get_kind () != sval_b
->get_kind ()
6806 || sval_a
->get_kind () == SK_UNKNOWN
)
6808 svalue
*merged_sval
= new unknown_svalue (type
);
6809 *merged_sid
= m_merged_model
->add_svalue (merged_sval
);
6810 record_svalues (sid_a
, sid_b
, *merged_sid
);
6814 gcc_assert (sval_a
->get_kind () == sval_b
->get_kind ());
6816 switch (sval_a
->get_kind ())
6819 case SK_UNKNOWN
: /* SK_UNKNOWN handled above. */
6824 /* If we have two region pointers, then we can merge (possibly to
6826 const region_svalue
®ion_sval_a
= *as_a
<region_svalue
*> (sval_a
);
6827 const region_svalue
®ion_sval_b
= *as_a
<region_svalue
*> (sval_b
);
6828 region_svalue::merge_values (region_sval_a
, region_sval_b
,
6831 record_svalues (sid_a
, sid_b
, *merged_sid
);
6837 /* If we have two constants, then we can merge. */
6838 const constant_svalue
&cst_sval_a
= *as_a
<constant_svalue
*> (sval_a
);
6839 const constant_svalue
&cst_sval_b
= *as_a
<constant_svalue
*> (sval_b
);
6840 constant_svalue::merge_values (cst_sval_a
, cst_sval_b
,
6842 record_svalues (sid_a
, sid_b
, *merged_sid
);
6853 /* Record that A_RID in model A and B_RID in model B
6854 correspond to MERGED_RID in the merged model, so
6855 that pointers can be accurately merged. */
6858 model_merger::record_regions (region_id a_rid
,
6860 region_id merged_rid
)
6862 m_map_regions_from_a_to_m
.put (a_rid
, merged_rid
);
6863 m_map_regions_from_b_to_m
.put (b_rid
, merged_rid
);
6866 /* Record that A_SID in model A and B_SID in model B
6867 correspond to MERGED_SID in the merged model. */
6870 model_merger::record_svalues (svalue_id a_sid
,
6872 svalue_id merged_sid
)
6874 gcc_assert (m_sid_mapping
);
6875 m_sid_mapping
->m_map_from_a_to_m
.put (a_sid
, merged_sid
);
6876 m_sid_mapping
->m_map_from_b_to_m
.put (b_sid
, merged_sid
);
6879 /* struct svalue_id_merger_mapping. */
6881 /* svalue_id_merger_mapping's ctor. */
6883 svalue_id_merger_mapping::svalue_id_merger_mapping (const region_model
&a
,
6884 const region_model
&b
)
6885 : m_map_from_a_to_m (a
.get_num_svalues ()),
6886 m_map_from_b_to_m (b
.get_num_svalues ())
6890 /* Dump a multiline representation of this to PP. */
6893 svalue_id_merger_mapping::dump_to_pp (pretty_printer
*pp
) const
6895 pp_string (pp
, "svalue_id map: model A to merged model:");
6897 m_map_from_a_to_m
.dump_to_pp (pp
);
6900 pp_string (pp
, "svalue_id map: model B to merged model:");
6902 m_map_from_b_to_m
.dump_to_pp (pp
);
6906 /* Dump a multiline representation of this to FILE. */
6909 svalue_id_merger_mapping::dump (FILE *fp
) const
6912 pp_format_decoder (&pp
) = default_tree_printer
;
6913 pp_show_color (&pp
) = pp_show_color (global_dc
->printer
);
6914 pp
.buffer
->stream
= fp
;
6919 /* Dump a multiline representation of this to stderr. */
6922 svalue_id_merger_mapping::dump () const
6927 /* struct canonicalization. */
6929 /* canonicalization's ctor. */
6931 canonicalization::canonicalization (const region_model
&model
)
6933 m_rid_map (model
.get_num_regions ()),
6934 m_sid_map (model
.get_num_svalues ()),
6940 /* If we've not seen RID yet, assign it a canonicalized region_id,
6941 and walk the region's svalue and then the region. */
6944 canonicalization::walk_rid (region_id rid
)
6946 /* Stop if we've already seen RID. */
6947 if (!m_rid_map
.get_dst_for_src (rid
).null_p ())
6950 region
*region
= m_model
.get_region (rid
);
6953 m_rid_map
.put (rid
, region_id::from_int (m_next_rid_int
++));
6954 walk_sid (region
->get_value_direct ());
6955 region
->walk_for_canonicalization (this);
6959 /* If we've not seen SID yet, assign it a canonicalized svalue_id,
6960 and walk the svalue (and potentially regions e.g. for ptr values). */
6963 canonicalization::walk_sid (svalue_id sid
)
6965 /* Stop if we've already seen SID. */
6966 if (!m_sid_map
.get_dst_for_src (sid
).null_p ())
6969 svalue
*sval
= m_model
.get_svalue (sid
);
6972 m_sid_map
.put (sid
, svalue_id::from_int (m_next_sid_int
++));
6973 /* Potentially walk regions e.g. for ptrs. */
6974 sval
->walk_for_canonicalization (this);
6978 /* Dump a multiline representation of this to PP. */
6981 canonicalization::dump_to_pp (pretty_printer
*pp
) const
6983 pp_string (pp
, "region_id map:");
6985 m_rid_map
.dump_to_pp (pp
);
6988 pp_string (pp
, "svalue_id map:");
6990 m_sid_map
.dump_to_pp (pp
);
6994 /* Dump a multiline representation of this to FILE. */
6997 canonicalization::dump (FILE *fp
) const
7000 pp_format_decoder (&pp
) = default_tree_printer
;
7001 pp_show_color (&pp
) = pp_show_color (global_dc
->printer
);
7002 pp
.buffer
->stream
= fp
;
7007 /* Dump a multiline representation of this to stderr. */
7010 canonicalization::dump () const
7017 /* Update HSTATE with a hash of SID. */
7020 inchash::add (svalue_id sid
, inchash::hash
&hstate
)
7022 hstate
.add_int (sid
.as_int ());
7025 /* Update HSTATE with a hash of RID. */
7028 inchash::add (region_id rid
, inchash::hash
&hstate
)
7030 hstate
.add_int (rid
.as_int ());
7033 /* Dump RMODEL fully to stderr (i.e. without summarization). */
7036 debug (const region_model
&rmodel
)
7038 rmodel
.dump (false);
7045 namespace selftest
{
7047 /* Build a constant tree of the given type from STR. */
7050 build_real_cst_from_string (tree type
, const char *str
)
7052 REAL_VALUE_TYPE real
;
7053 real_from_string (&real
, str
);
7054 return build_real (type
, real
);
7057 /* Append various "interesting" constants to OUT (e.g. NaN). */
7060 append_interesting_constants (auto_vec
<tree
> *out
)
7062 out
->safe_push (build_int_cst (integer_type_node
, 0));
7063 out
->safe_push (build_int_cst (integer_type_node
, 42));
7064 out
->safe_push (build_int_cst (unsigned_type_node
, 0));
7065 out
->safe_push (build_int_cst (unsigned_type_node
, 42));
7066 out
->safe_push (build_real_cst_from_string (float_type_node
, "QNaN"));
7067 out
->safe_push (build_real_cst_from_string (float_type_node
, "-QNaN"));
7068 out
->safe_push (build_real_cst_from_string (float_type_node
, "SNaN"));
7069 out
->safe_push (build_real_cst_from_string (float_type_node
, "-SNaN"));
7070 out
->safe_push (build_real_cst_from_string (float_type_node
, "0.0"));
7071 out
->safe_push (build_real_cst_from_string (float_type_node
, "-0.0"));
7072 out
->safe_push (build_real_cst_from_string (float_type_node
, "Inf"));
7073 out
->safe_push (build_real_cst_from_string (float_type_node
, "-Inf"));
7076 /* Verify that tree_cmp is a well-behaved comparator for qsort, even
7077 if the underlying constants aren't comparable. */
7080 test_tree_cmp_on_constants ()
7082 auto_vec
<tree
> csts
;
7083 append_interesting_constants (&csts
);
7085 /* Try sorting every triple. */
7086 const unsigned num
= csts
.length ();
7087 for (unsigned i
= 0; i
< num
; i
++)
7088 for (unsigned j
= 0; j
< num
; j
++)
7089 for (unsigned k
= 0; k
< num
; k
++)
7091 auto_vec
<tree
> v (3);
7092 v
.quick_push (csts
[i
]);
7093 v
.quick_push (csts
[j
]);
7094 v
.quick_push (csts
[k
]);
7099 /* Implementation detail of the ASSERT_CONDITION_* macros. */
7102 assert_condition (const location
&loc
,
7103 region_model
&model
,
7104 tree lhs
, tree_code op
, tree rhs
,
7107 tristate actual
= model
.eval_condition (lhs
, op
, rhs
, NULL
);
7108 ASSERT_EQ_AT (loc
, actual
, expected
);
7111 /* Implementation detail of ASSERT_DUMP_EQ. */
7114 assert_dump_eq (const location
&loc
,
7115 const region_model
&model
,
7117 const char *expected
)
7119 auto_fix_quotes sentinel
;
7121 pp_format_decoder (&pp
) = default_tree_printer
;
7122 model
.dump_to_pp (&pp
, summarize
);
7123 ASSERT_STREQ_AT (loc
, pp_formatted_text (&pp
), expected
);
7126 /* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
7128 #define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
7129 SELFTEST_BEGIN_STMT \
7130 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
7133 /* Smoketest for region_model::dump_to_pp. */
7139 model
.get_root_region ()->ensure_stack_region (&model
);
7140 model
.get_root_region ()->ensure_globals_region (&model
);
7141 model
.get_root_region ()->ensure_heap_region (&model
);
7143 ASSERT_DUMP_EQ (model
, false,
7144 "r0: {kind: `root', parent: null, sval: null}\n"
7145 "|-stack: r1: {kind: `stack', parent: r0, sval: sv0}\n"
7146 "| |: sval: sv0: {poisoned: uninit}\n"
7147 "|-globals: r2: {kind: `globals', parent: r0, sval: null, map: {}}\n"
7148 "`-heap: r3: {kind: `heap', parent: r0, sval: sv1}\n"
7149 " |: sval: sv1: {poisoned: uninit}\n"
7151 " sv0: {poisoned: uninit}\n"
7152 " sv1: {poisoned: uninit}\n"
7153 "constraint manager:\n"
7156 ASSERT_DUMP_EQ (model
, true, "");
7159 /* Verify that calling region_model::get_rvalue repeatedly on the same
7160 tree constant retrieves the same svalue_id. */
7163 test_unique_constants ()
7165 tree int_0
= build_int_cst (integer_type_node
, 0);
7166 tree int_42
= build_int_cst (integer_type_node
, 42);
7168 test_region_model_context ctxt
;
7170 ASSERT_EQ (model
.get_rvalue (int_0
, &ctxt
), model
.get_rvalue (int_0
, &ctxt
));
7171 ASSERT_EQ (model
.get_rvalue (int_42
, &ctxt
),
7172 model
.get_rvalue (int_42
, &ctxt
));
7173 ASSERT_NE (model
.get_rvalue (int_0
, &ctxt
), model
.get_rvalue (int_42
, &ctxt
));
7174 ASSERT_EQ (ctxt
.get_num_diagnostics (), 0);
7177 /* Check that operator== and hashing works as expected for the
7178 various svalue subclasses. */
7181 test_svalue_equality ()
7183 tree int_42
= build_int_cst (integer_type_node
, 42);
7184 tree int_0
= build_int_cst (integer_type_node
, 0);
7186 /* Create pairs instances of the various subclasses of svalue,
7187 testing for hash and equality between (this, this) and
7188 (this, other of same subclass). */
7190 = new region_svalue (ptr_type_node
, region_id::from_int (0));
7192 = new region_svalue (ptr_type_node
, region_id::from_int (1));
7194 ASSERT_EQ (ptr_to_r0
->hash (), ptr_to_r0
->hash ());
7195 ASSERT_EQ (*ptr_to_r0
, *ptr_to_r0
);
7197 ASSERT_NE (ptr_to_r0
->hash (), ptr_to_r1
->hash ());
7198 ASSERT_NE (*ptr_to_r0
, *ptr_to_r1
);
7200 svalue
*cst_int_42
= new constant_svalue (int_42
);
7201 svalue
*cst_int_0
= new constant_svalue (int_0
);
7203 ASSERT_EQ (cst_int_42
->hash (), cst_int_42
->hash ());
7204 ASSERT_EQ (*cst_int_42
, *cst_int_42
);
7206 ASSERT_NE (cst_int_42
->hash (), cst_int_0
->hash ());
7207 ASSERT_NE (*cst_int_42
, *cst_int_0
);
7209 svalue
*uninit
= new poisoned_svalue (POISON_KIND_UNINIT
, NULL_TREE
);
7210 svalue
*freed
= new poisoned_svalue (POISON_KIND_FREED
, NULL_TREE
);
7212 ASSERT_EQ (uninit
->hash (), uninit
->hash ());
7213 ASSERT_EQ (*uninit
, *uninit
);
7215 ASSERT_NE (uninit
->hash (), freed
->hash ());
7216 ASSERT_NE (*uninit
, *freed
);
7218 svalue
*unknown_0
= new unknown_svalue (ptr_type_node
);
7219 svalue
*unknown_1
= new unknown_svalue (ptr_type_node
);
7220 ASSERT_EQ (unknown_0
->hash (), unknown_0
->hash ());
7221 ASSERT_EQ (*unknown_0
, *unknown_0
);
7222 ASSERT_EQ (*unknown_1
, *unknown_1
);
7224 /* Comparisons between different kinds of svalue. */
7225 ASSERT_NE (*ptr_to_r0
, *cst_int_42
);
7226 ASSERT_NE (*ptr_to_r0
, *uninit
);
7227 ASSERT_NE (*ptr_to_r0
, *unknown_0
);
7228 ASSERT_NE (*cst_int_42
, *ptr_to_r0
);
7229 ASSERT_NE (*cst_int_42
, *uninit
);
7230 ASSERT_NE (*cst_int_42
, *unknown_0
);
7231 ASSERT_NE (*uninit
, *ptr_to_r0
);
7232 ASSERT_NE (*uninit
, *cst_int_42
);
7233 ASSERT_NE (*uninit
, *unknown_0
);
7234 ASSERT_NE (*unknown_0
, *ptr_to_r0
);
7235 ASSERT_NE (*unknown_0
, *cst_int_42
);
7236 ASSERT_NE (*unknown_0
, *uninit
);
7248 /* Check that operator== and hashing works as expected for the
7249 various region subclasses. */
7252 test_region_equality ()
7255 = new primitive_region (region_id::from_int (3), integer_type_node
);
7257 = new primitive_region (region_id::from_int (4), integer_type_node
);
7259 ASSERT_EQ (*r0
, *r0
);
7260 ASSERT_EQ (r0
->hash (), r0
->hash ());
7261 ASSERT_NE (*r0
, *r1
);
7262 ASSERT_NE (r0
->hash (), r1
->hash ());
7267 // TODO: test coverage for the map within a map_region
7270 /* A subclass of purge_criteria for selftests: purge all svalue_id instances. */
7272 class purge_all_svalue_ids
: public purge_criteria
7275 bool should_purge_p (svalue_id
) const FINAL OVERRIDE
7281 /* A subclass of purge_criteria: purge a specific svalue_id. */
7283 class purge_one_svalue_id
: public purge_criteria
7286 purge_one_svalue_id (svalue_id victim
) : m_victim (victim
) {}
7288 purge_one_svalue_id (region_model model
, tree expr
)
7289 : m_victim (model
.get_rvalue (expr
, NULL
)) {}
7291 bool should_purge_p (svalue_id sid
) const FINAL OVERRIDE
7293 return sid
== m_victim
;
7300 /* Check that constraint_manager::purge works for individual svalue_ids. */
7303 test_purging_by_criteria ()
7305 tree int_42
= build_int_cst (integer_type_node
, 42);
7306 tree int_0
= build_int_cst (integer_type_node
, 0);
7308 tree x
= build_global_decl ("x", integer_type_node
);
7309 tree y
= build_global_decl ("y", integer_type_node
);
7312 region_model model0
;
7313 region_model model1
;
7315 ADD_SAT_CONSTRAINT (model1
, x
, EQ_EXPR
, y
);
7316 ASSERT_NE (model0
, model1
);
7318 purge_stats stats_for_px
;
7319 purge_one_svalue_id
px (model1
, x
);
7320 model1
.get_constraints ()->purge (px
, &stats_for_px
);
7321 ASSERT_EQ (stats_for_px
.m_num_equiv_classes
, 0);
7323 purge_stats stats_for_py
;
7324 purge_one_svalue_id
py (model1
.get_rvalue (y
, NULL
));
7325 model1
.get_constraints ()->purge (py
, &stats_for_py
);
7326 ASSERT_EQ (stats_for_py
.m_num_equiv_classes
, 1);
7328 ASSERT_EQ (*model0
.get_constraints (), *model1
.get_constraints ());
7332 region_model model0
;
7333 region_model model1
;
7335 ADD_SAT_CONSTRAINT (model1
, x
, EQ_EXPR
, int_42
);
7336 ASSERT_NE (model0
, model1
);
7337 ASSERT_CONDITION_TRUE (model1
, x
, EQ_EXPR
, int_42
);
7340 model1
.get_constraints ()->purge (purge_one_svalue_id (model1
, x
), &stats
);
7342 ASSERT_CONDITION_UNKNOWN (model1
, x
, EQ_EXPR
, int_42
);
7346 region_model model0
;
7347 region_model model1
;
7349 ADD_SAT_CONSTRAINT (model1
, x
, GE_EXPR
, int_0
);
7350 ADD_SAT_CONSTRAINT (model1
, x
, LE_EXPR
, int_42
);
7351 ASSERT_NE (model0
, model1
);
7353 ASSERT_CONDITION_TRUE (model1
, x
, GE_EXPR
, int_0
);
7354 ASSERT_CONDITION_TRUE (model1
, x
, LE_EXPR
, int_42
);
7357 model1
.get_constraints ()->purge (purge_one_svalue_id (model1
, x
), &stats
);
7359 ASSERT_CONDITION_UNKNOWN (model1
, x
, GE_EXPR
, int_0
);
7360 ASSERT_CONDITION_UNKNOWN (model1
, x
, LE_EXPR
, int_42
);
7364 region_model model0
;
7365 region_model model1
;
7367 ADD_SAT_CONSTRAINT (model1
, x
, NE_EXPR
, int_42
);
7368 ADD_SAT_CONSTRAINT (model1
, y
, NE_EXPR
, int_0
);
7369 ASSERT_NE (model0
, model1
);
7370 ASSERT_CONDITION_TRUE (model1
, x
, NE_EXPR
, int_42
);
7371 ASSERT_CONDITION_TRUE (model1
, y
, NE_EXPR
, int_0
);
7374 model1
.get_constraints ()->purge (purge_one_svalue_id (model1
, x
), &stats
);
7375 ASSERT_NE (model0
, model1
);
7377 ASSERT_CONDITION_UNKNOWN (model1
, x
, NE_EXPR
, int_42
);
7378 ASSERT_CONDITION_TRUE (model1
, y
, NE_EXPR
, int_0
);
7382 region_model model0
;
7383 region_model model1
;
7385 ADD_SAT_CONSTRAINT (model1
, x
, NE_EXPR
, int_42
);
7386 ADD_SAT_CONSTRAINT (model1
, y
, NE_EXPR
, int_0
);
7387 ASSERT_NE (model0
, model1
);
7388 ASSERT_CONDITION_TRUE (model1
, x
, NE_EXPR
, int_42
);
7389 ASSERT_CONDITION_TRUE (model1
, y
, NE_EXPR
, int_0
);
7392 model1
.get_constraints ()->purge (purge_all_svalue_ids (), &stats
);
7393 ASSERT_CONDITION_UNKNOWN (model1
, x
, NE_EXPR
, int_42
);
7394 ASSERT_CONDITION_UNKNOWN (model1
, y
, NE_EXPR
, int_0
);
7399 /* Test that region_model::purge_unused_svalues works as expected. */
7402 test_purge_unused_svalues ()
7404 tree int_42
= build_int_cst (integer_type_node
, 42);
7405 tree int_0
= build_int_cst (integer_type_node
, 0);
7406 tree x
= build_global_decl ("x", integer_type_node
);
7407 tree y
= build_global_decl ("y", integer_type_node
);
7409 test_region_model_context ctxt
;
7411 model
.set_to_new_unknown_value (model
.get_lvalue (x
, &ctxt
), TREE_TYPE (x
),
7413 model
.set_to_new_unknown_value (model
.get_lvalue (x
, &ctxt
), TREE_TYPE (x
),
7415 model
.set_to_new_unknown_value (model
.get_lvalue (x
, &ctxt
), TREE_TYPE (x
),
7417 model
.add_constraint (x
, NE_EXPR
, int_42
, &ctxt
);
7419 model
.set_value (model
.get_lvalue (x
, &ctxt
),
7420 model
.get_rvalue (int_42
, &ctxt
),
7422 model
.add_constraint (y
, GT_EXPR
, int_0
, &ctxt
);
7424 /* The redundant unknown values should have been purged. */
7426 model
.purge_unused_svalues (&purged
, NULL
);
7427 ASSERT_EQ (purged
.m_num_svalues
, 3);
7429 /* and the redundant constraint on an old, unknown value for x should
7430 have been purged. */
7431 ASSERT_EQ (purged
.m_num_equiv_classes
, 1);
7432 ASSERT_EQ (purged
.m_num_constraints
, 1);
7433 ASSERT_EQ (model
.get_constraints ()->m_constraints
.length (), 2);
7435 /* ...but we should still have x == 42. */
7436 ASSERT_EQ (model
.eval_condition (x
, EQ_EXPR
, int_42
, &ctxt
),
7439 /* ...and we should still have the constraint on y. */
7440 ASSERT_EQ (model
.eval_condition (y
, GT_EXPR
, int_0
, &ctxt
),
7443 ASSERT_EQ (ctxt
.get_num_diagnostics (), 0);
7446 /* Verify that simple assignments work as expected. */
7451 tree int_0
= build_int_cst (integer_type_node
, 0);
7452 tree x
= build_global_decl ("x", integer_type_node
);
7453 tree y
= build_global_decl ("y", integer_type_node
);
7455 /* "x == 0", then use of y, then "y = 0;". */
7457 ADD_SAT_CONSTRAINT (model
, x
, EQ_EXPR
, int_0
);
7458 ASSERT_CONDITION_UNKNOWN (model
, y
, EQ_EXPR
, int_0
);
7459 model
.set_value (model
.get_lvalue (y
, NULL
),
7460 model
.get_rvalue (int_0
, NULL
),
7462 ASSERT_CONDITION_TRUE (model
, y
, EQ_EXPR
, int_0
);
7463 ASSERT_CONDITION_TRUE (model
, y
, EQ_EXPR
, x
);
7465 ASSERT_DUMP_EQ (model
, true, "y: 0, {x}: unknown, x == y");
7468 /* Verify the details of pushing and popping stack frames. */
7471 test_stack_frames ()
7473 tree int_42
= build_int_cst (integer_type_node
, 42);
7474 tree int_10
= build_int_cst (integer_type_node
, 10);
7475 tree int_5
= build_int_cst (integer_type_node
, 5);
7476 tree int_0
= build_int_cst (integer_type_node
, 0);
7478 auto_vec
<tree
> param_types
;
7479 tree parent_fndecl
= make_fndecl (integer_type_node
,
7482 allocate_struct_function (parent_fndecl
, true);
7484 tree child_fndecl
= make_fndecl (integer_type_node
,
7487 allocate_struct_function (child_fndecl
, true);
7489 /* "a" and "b" in the parent frame. */
7490 tree a
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
,
7491 get_identifier ("a"),
7493 tree b
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
,
7494 get_identifier ("b"),
7496 /* "x" and "y" in a child frame. */
7497 tree x
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
,
7498 get_identifier ("x"),
7500 tree y
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
,
7501 get_identifier ("y"),
7505 tree p
= build_global_decl ("p", ptr_type_node
);
7508 tree q
= build_global_decl ("q", ptr_type_node
);
7510 test_region_model_context ctxt
;
7513 /* Push stack frame for "parent_fn". */
7514 region_id parent_frame_rid
7515 = model
.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl
), NULL
, &ctxt
);
7516 ASSERT_EQ (model
.get_current_frame_id (), parent_frame_rid
);
7517 region_id a_in_parent_rid
= model
.get_lvalue (a
, &ctxt
);
7518 model
.set_value (a_in_parent_rid
, model
.get_rvalue (int_42
, &ctxt
), &ctxt
);
7519 model
.set_to_new_unknown_value (model
.get_lvalue (b
, &ctxt
),
7520 integer_type_node
, &ctxt
);
7521 model
.add_constraint (b
, LT_EXPR
, int_10
, &ctxt
);
7522 ASSERT_EQ (model
.eval_condition (b
, LT_EXPR
, int_10
, &ctxt
),
7523 tristate (tristate::TS_TRUE
));
7525 /* Push stack frame for "child_fn". */
7526 region_id child_frame_rid
7527 = model
.push_frame (DECL_STRUCT_FUNCTION (child_fndecl
), NULL
, &ctxt
);
7528 ASSERT_EQ (model
.get_current_frame_id (), child_frame_rid
);
7529 region_id x_in_child_rid
= model
.get_lvalue (x
, &ctxt
);
7530 model
.set_value (x_in_child_rid
, model
.get_rvalue (int_0
, &ctxt
), &ctxt
);
7531 model
.set_to_new_unknown_value (model
.get_lvalue (y
, &ctxt
),
7532 integer_type_node
, &ctxt
);
7533 model
.add_constraint (y
, NE_EXPR
, int_5
, &ctxt
);
7534 ASSERT_EQ (model
.eval_condition (y
, NE_EXPR
, int_5
, &ctxt
),
7535 tristate (tristate::TS_TRUE
));
7537 /* Point a global pointer at a local in the child frame: p = &x. */
7538 region_id p_in_globals_rid
= model
.get_lvalue (p
, &ctxt
);
7539 model
.set_value (p_in_globals_rid
,
7540 model
.get_or_create_ptr_svalue (ptr_type_node
,
7544 /* Point another global pointer at p: q = &p. */
7545 region_id q_in_globals_rid
= model
.get_lvalue (q
, &ctxt
);
7546 model
.set_value (q_in_globals_rid
,
7547 model
.get_or_create_ptr_svalue (ptr_type_node
,
7551 /* Test get_descendents. */
7552 region_id_set
descendents (&model
);
7553 model
.get_descendents (child_frame_rid
, &descendents
, region_id::null ());
7554 ASSERT_TRUE (descendents
.region_p (child_frame_rid
));
7555 ASSERT_TRUE (descendents
.region_p (x_in_child_rid
));
7556 ASSERT_FALSE (descendents
.region_p (a_in_parent_rid
));
7557 ASSERT_EQ (descendents
.num_regions (), 3);
7559 auto_vec
<region_id
> test_vec
;
7560 for (region_id_set::iterator_t iter
= descendents
.begin ();
7561 iter
!= descendents
.end ();
7563 test_vec
.safe_push (*iter
);
7564 gcc_unreachable (); // TODO
7568 ASSERT_DUMP_EQ (model
, true,
7569 "x: 0, {y}: unknown, p: &x, q: &p, b < 10, y != 5");
7571 /* Pop the "child_fn" frame from the stack. */
7573 model
.pop_frame (true, &purged
, &ctxt
);
7575 /* We should have purged the unknown values for x and y. */
7576 ASSERT_EQ (purged
.m_num_svalues
, 2);
7578 /* We should have purged the frame region and the regions for x and y. */
7579 ASSERT_EQ (purged
.m_num_regions
, 3);
7581 /* We should have purged the constraint on y. */
7582 ASSERT_EQ (purged
.m_num_equiv_classes
, 1);
7583 ASSERT_EQ (purged
.m_num_constraints
, 1);
7585 /* Verify that p (which was pointing at the local "x" in the popped
7586 frame) has been poisoned. */
7587 svalue
*new_p_sval
= model
.get_svalue (model
.get_rvalue (p
, &ctxt
));
7588 ASSERT_EQ (new_p_sval
->get_kind (), SK_POISONED
);
7589 ASSERT_EQ (new_p_sval
->dyn_cast_poisoned_svalue ()->get_poison_kind (),
7590 POISON_KIND_POPPED_STACK
);
7592 /* Verify that q still points to p, in spite of the region
7594 svalue
*new_q_sval
= model
.get_svalue (model
.get_rvalue (q
, &ctxt
));
7595 ASSERT_EQ (new_q_sval
->get_kind (), SK_REGION
);
7596 ASSERT_EQ (new_q_sval
->dyn_cast_region_svalue ()->get_pointee (),
7597 model
.get_lvalue (p
, &ctxt
));
7599 /* Verify that top of stack has been updated. */
7600 ASSERT_EQ (model
.get_current_frame_id (), parent_frame_rid
);
7602 /* Verify locals in parent frame. */
7603 /* Verify "a" still has its value. */
7604 svalue
*new_a_sval
= model
.get_svalue (model
.get_rvalue (a
, &ctxt
));
7605 ASSERT_EQ (new_a_sval
->get_kind (), SK_CONSTANT
);
7606 ASSERT_EQ (new_a_sval
->dyn_cast_constant_svalue ()->get_constant (),
7608 /* Verify "b" still has its constraint. */
7609 ASSERT_EQ (model
.eval_condition (b
, LT_EXPR
, int_10
, &ctxt
),
7610 tristate (tristate::TS_TRUE
));
7613 /* Verify that get_representative_path_var works as expected, that
7614 we can map from region ids to parms and back within a recursive call
7618 test_get_representative_path_var ()
7620 auto_vec
<tree
> param_types
;
7621 tree fndecl
= make_fndecl (integer_type_node
,
7624 allocate_struct_function (fndecl
, true);
7627 tree n
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
,
7628 get_identifier ("n"),
7633 /* Push 5 stack frames for "factorial", each with a param */
7634 auto_vec
<region_id
> parm_rids
;
7635 auto_vec
<svalue_id
> parm_sids
;
7636 for (int depth
= 0; depth
< 5; depth
++)
7639 = model
.push_frame (DECL_STRUCT_FUNCTION (fndecl
), NULL
, NULL
);
7640 region_id rid_n
= model
.get_lvalue (path_var (n
, depth
), NULL
);
7641 parm_rids
.safe_push (rid_n
);
7643 ASSERT_EQ (model
.get_region (rid_n
)->get_parent (), frame_rid
);
7646 = model
.set_to_new_unknown_value (rid_n
, integer_type_node
, NULL
);
7647 parm_sids
.safe_push (sid_n
);
7650 /* Verify that we can recognize that the regions are the parms,
7652 for (int depth
= 0; depth
< 5; depth
++)
7654 ASSERT_EQ (model
.get_representative_path_var (parm_rids
[depth
]),
7655 path_var (n
, depth
));
7656 /* ...and that we can lookup lvalues for locals for all frames,
7657 not just the top. */
7658 ASSERT_EQ (model
.get_lvalue (path_var (n
, depth
), NULL
),
7660 /* ...and that we can locate the svalues. */
7661 auto_vec
<path_var
> pvs
;
7662 model
.get_path_vars_for_svalue (parm_sids
[depth
], &pvs
);
7663 ASSERT_EQ (pvs
.length (), 1);
7664 ASSERT_EQ (pvs
[0], path_var (n
, depth
));
7668 /* Verify that the core regions within a region_model are in a consistent
7669 order after canonicalization. */
7672 test_canonicalization_1 ()
7674 region_model model0
;
7675 model0
.get_root_region ()->ensure_stack_region (&model0
);
7676 model0
.get_root_region ()->ensure_globals_region (&model0
);
7678 region_model model1
;
7679 model1
.get_root_region ()->ensure_globals_region (&model1
);
7680 model1
.get_root_region ()->ensure_stack_region (&model1
);
7682 model0
.canonicalize (NULL
);
7683 model1
.canonicalize (NULL
);
7684 ASSERT_EQ (model0
, model1
);
7687 /* Verify that region models for
7691 are equal after canonicalization. */
7694 test_canonicalization_2 ()
7696 tree int_42
= build_int_cst (integer_type_node
, 42);
7697 tree int_113
= build_int_cst (integer_type_node
, 113);
7698 tree x
= build_global_decl ("x", integer_type_node
);
7699 tree y
= build_global_decl ("y", integer_type_node
);
7701 region_model model0
;
7702 model0
.set_value (model0
.get_lvalue (x
, NULL
),
7703 model0
.get_rvalue (int_42
, NULL
),
7705 model0
.set_value (model0
.get_lvalue (y
, NULL
),
7706 model0
.get_rvalue (int_113
, NULL
),
7709 region_model model1
;
7710 model1
.set_value (model1
.get_lvalue (y
, NULL
),
7711 model1
.get_rvalue (int_113
, NULL
),
7713 model1
.set_value (model1
.get_lvalue (x
, NULL
),
7714 model1
.get_rvalue (int_42
, NULL
),
7717 model0
.canonicalize (NULL
);
7718 model1
.canonicalize (NULL
);
7719 ASSERT_EQ (model0
, model1
);
7722 /* Verify that constraints for
7726 are equal after canonicalization. */
7729 test_canonicalization_3 ()
7731 tree int_3
= build_int_cst (integer_type_node
, 3);
7732 tree int_42
= build_int_cst (integer_type_node
, 42);
7733 tree x
= build_global_decl ("x", integer_type_node
);
7734 tree y
= build_global_decl ("y", integer_type_node
);
7736 region_model model0
;
7737 model0
.add_constraint (x
, GT_EXPR
, int_3
, NULL
);
7738 model0
.add_constraint (y
, GT_EXPR
, int_42
, NULL
);
7740 region_model model1
;
7741 model1
.add_constraint (y
, GT_EXPR
, int_42
, NULL
);
7742 model1
.add_constraint (x
, GT_EXPR
, int_3
, NULL
);
7744 model0
.canonicalize (NULL
);
7745 model1
.canonicalize (NULL
);
7746 ASSERT_EQ (model0
, model1
);
7749 /* Verify that we can canonicalize a model containing NaN and other real
7753 test_canonicalization_4 ()
7755 auto_vec
<tree
> csts
;
7756 append_interesting_constants (&csts
);
7762 FOR_EACH_VEC_ELT (csts
, i
, cst
)
7763 model
.get_rvalue (cst
, NULL
);
7765 model
.canonicalize (NULL
);
7768 /* Assert that if we have two region_model instances
7769 with values VAL_A and VAL_B for EXPR that they are
7770 mergable. Write the merged model to *OUT_MERGED_MODEL,
7771 and the merged svalue ptr to *OUT_MERGED_SVALUE.
7772 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
7773 for that region_model. */
7776 assert_region_models_merge (tree expr
, tree val_a
, tree val_b
,
7777 region_model
*out_merged_model
,
7778 svalue
**out_merged_svalue
)
7780 test_region_model_context ctxt
;
7781 region_model model0
;
7782 region_model model1
;
7784 model0
.set_value (model0
.get_lvalue (expr
, &ctxt
),
7785 model0
.get_rvalue (val_a
, &ctxt
),
7788 model1
.set_value (model1
.get_lvalue (expr
, &ctxt
),
7789 model1
.get_rvalue (val_b
, &ctxt
),
7792 /* They should be mergeable. */
7793 ASSERT_TRUE (model0
.can_merge_with_p (model1
, out_merged_model
));
7795 svalue_id merged_svalue_sid
= out_merged_model
->get_rvalue (expr
, &ctxt
);
7796 *out_merged_svalue
= out_merged_model
->get_svalue (merged_svalue_sid
);
7799 /* Verify that we can merge region_model instances. */
7802 test_state_merging ()
7804 tree int_42
= build_int_cst (integer_type_node
, 42);
7805 tree int_113
= build_int_cst (integer_type_node
, 113);
7806 tree x
= build_global_decl ("x", integer_type_node
);
7807 tree y
= build_global_decl ("y", integer_type_node
);
7808 tree z
= build_global_decl ("z", integer_type_node
);
7809 tree p
= build_global_decl ("p", ptr_type_node
);
7811 tree addr_of_y
= build1 (ADDR_EXPR
, ptr_type_node
, y
);
7812 tree addr_of_z
= build1 (ADDR_EXPR
, ptr_type_node
, z
);
7814 auto_vec
<tree
> param_types
;
7815 tree test_fndecl
= make_fndecl (integer_type_node
, "test_fn", param_types
);
7816 allocate_struct_function (test_fndecl
, true);
7819 tree a
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
,
7820 get_identifier ("a"),
7822 tree addr_of_a
= build1 (ADDR_EXPR
, ptr_type_node
, a
);
7824 /* Param "q", a pointer. */
7825 tree q
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
,
7826 get_identifier ("q"),
7830 region_model model0
;
7831 region_model model1
;
7832 region_model merged
;
7833 /* Verify empty models can be merged. */
7834 ASSERT_TRUE (model0
.can_merge_with_p (model1
, &merged
));
7835 ASSERT_EQ (model0
, merged
);
7838 /* Verify that we can merge two contradictory constraints on the
7839 value for a global. */
7840 /* TODO: verify that the merged model doesn't have a value for
7843 region_model model0
;
7844 region_model model1
;
7845 region_model merged
;
7846 test_region_model_context ctxt
;
7847 model0
.add_constraint (x
, EQ_EXPR
, int_42
, &ctxt
);
7848 model1
.add_constraint (x
, EQ_EXPR
, int_113
, &ctxt
);
7849 ASSERT_TRUE (model0
.can_merge_with_p (model1
, &merged
));
7850 ASSERT_NE (model0
, merged
);
7851 ASSERT_NE (model1
, merged
);
7854 /* Verify handling of a PARM_DECL. */
7856 test_region_model_context ctxt
;
7857 region_model model0
;
7858 region_model model1
;
7859 ASSERT_EQ (model0
.get_stack_depth (), 0);
7860 model0
.push_frame (DECL_STRUCT_FUNCTION (test_fndecl
), NULL
, &ctxt
);
7861 ASSERT_EQ (model0
.get_stack_depth (), 1);
7862 ASSERT_EQ (model0
.get_function_at_depth (0),
7863 DECL_STRUCT_FUNCTION (test_fndecl
));
7864 model1
.push_frame (DECL_STRUCT_FUNCTION (test_fndecl
), NULL
, &ctxt
);
7867 = model0
.set_to_new_unknown_value (model0
.get_lvalue (a
, &ctxt
),
7868 integer_type_node
, &ctxt
);
7869 model1
.set_to_new_unknown_value (model1
.get_lvalue (a
, &ctxt
),
7870 integer_type_node
, &ctxt
);
7871 ASSERT_EQ (model0
, model1
);
7873 /* Check that get_value_by_name works for locals. */
7874 ASSERT_EQ (model0
.get_value_by_name ("a"), sid_a
);
7876 /* They should be mergeable, and the result should be the same. */
7877 region_model merged
;
7878 ASSERT_TRUE (model0
.can_merge_with_p (model1
, &merged
));
7879 ASSERT_EQ (model0
, merged
);
7880 /* In particular, there should be an unknown value for "a". */
7881 svalue
*merged_a_sval
= merged
.get_svalue (merged
.get_rvalue (a
, &ctxt
));
7882 ASSERT_EQ (merged_a_sval
->get_kind (), SK_UNKNOWN
);
7885 /* Verify handling of a global. */
7887 test_region_model_context ctxt
;
7888 region_model model0
;
7889 region_model model1
;
7891 = model0
.set_to_new_unknown_value (model0
.get_lvalue (x
, &ctxt
),
7892 integer_type_node
, &ctxt
);
7893 model1
.set_to_new_unknown_value (model1
.get_lvalue (x
, &ctxt
),
7894 integer_type_node
, &ctxt
);
7895 ASSERT_EQ (model0
, model1
);
7897 /* Check that get_value_by_name works for globals. */
7898 ASSERT_EQ (model0
.get_value_by_name ("x"), sid_x
);
7900 /* They should be mergeable, and the result should be the same. */
7901 region_model merged
;
7902 ASSERT_TRUE (model0
.can_merge_with_p (model1
, &merged
));
7903 ASSERT_EQ (model0
, merged
);
7904 /* In particular, there should be an unknown value for "x". */
7905 svalue
*merged_x_sval
= merged
.get_svalue (merged
.get_rvalue (x
, &ctxt
));
7906 ASSERT_EQ (merged_x_sval
->get_kind (), SK_UNKNOWN
);
7909 /* Use global-handling to verify various combinations of values. */
7911 /* Two equal constant values. */
7913 region_model merged
;
7914 svalue
*merged_x_sval
;
7915 assert_region_models_merge (x
, int_42
, int_42
, &merged
, &merged_x_sval
);
7917 /* In particular, there should be a constant value for "x". */
7918 ASSERT_EQ (merged_x_sval
->get_kind (), SK_CONSTANT
);
7919 ASSERT_EQ (merged_x_sval
->dyn_cast_constant_svalue ()->get_constant (),
7923 /* Two non-equal constant values. */
7925 region_model merged
;
7926 svalue
*merged_x_sval
;
7927 assert_region_models_merge (x
, int_42
, int_113
, &merged
, &merged_x_sval
);
7929 /* In particular, there should be an unknown value for "x". */
7930 ASSERT_EQ (merged_x_sval
->get_kind (), SK_UNKNOWN
);
7933 /* Uninit and constant. */
7935 region_model merged
;
7936 svalue
*merged_x_sval
;
7937 assert_region_models_merge (x
, NULL_TREE
, int_113
, &merged
, &merged_x_sval
);
7939 /* In particular, there should be an unknown value for "x". */
7940 ASSERT_EQ (merged_x_sval
->get_kind (), SK_UNKNOWN
);
7943 /* Constant and uninit. */
7945 region_model merged
;
7946 svalue
*merged_x_sval
;
7947 assert_region_models_merge (x
, int_42
, NULL_TREE
, &merged
, &merged_x_sval
);
7949 /* In particular, there should be an unknown value for "x". */
7950 ASSERT_EQ (merged_x_sval
->get_kind (), SK_UNKNOWN
);
7953 /* Unknown and constant. */
7956 /* Pointers: NULL and NULL. */
7959 /* Pointers: NULL and non-NULL. */
7962 /* Pointers: non-NULL and non-NULL: ptr to a local. */
7964 region_model model0
;
7965 model0
.push_frame (DECL_STRUCT_FUNCTION (test_fndecl
), NULL
, NULL
);
7966 model0
.set_to_new_unknown_value (model0
.get_lvalue (a
, NULL
),
7967 integer_type_node
, NULL
);
7968 model0
.set_value (model0
.get_lvalue (p
, NULL
),
7969 model0
.get_rvalue (addr_of_a
, NULL
), NULL
);
7971 region_model
model1 (model0
);
7972 ASSERT_EQ (model0
, model1
);
7974 /* They should be mergeable, and the result should be the same. */
7975 region_model merged
;
7976 ASSERT_TRUE (model0
.can_merge_with_p (model1
, &merged
));
7977 ASSERT_EQ (model0
, merged
);
7980 /* Pointers: non-NULL and non-NULL: ptr to a global. */
7982 region_model merged
;
7983 /* p == &y in both input models. */
7984 svalue
*merged_p_sval
;
7985 assert_region_models_merge (p
, addr_of_y
, addr_of_y
, &merged
,
7988 /* We should get p == &y in the merged model. */
7989 ASSERT_EQ (merged_p_sval
->get_kind (), SK_REGION
);
7990 region_svalue
*merged_p_ptr
= merged_p_sval
->dyn_cast_region_svalue ();
7991 region_id merged_p_star_rid
= merged_p_ptr
->get_pointee ();
7992 ASSERT_EQ (merged_p_star_rid
, merged
.get_lvalue (y
, NULL
));
7995 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
7997 region_model merged
;
7998 /* x == &y vs x == &z in the input models. */
7999 svalue
*merged_x_sval
;
8000 assert_region_models_merge (x
, addr_of_y
, addr_of_z
, &merged
,
8003 /* We should get x == unknown in the merged model. */
8004 ASSERT_EQ (merged_x_sval
->get_kind (), SK_UNKNOWN
);
8007 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
8009 test_region_model_context ctxt
;
8010 region_model model0
;
8011 region_id new_rid
= model0
.add_new_malloc_region ();
8013 = model0
.get_or_create_ptr_svalue (ptr_type_node
, new_rid
);
8014 model0
.set_value (model0
.get_lvalue (p
, &ctxt
),
8016 model0
.canonicalize (&ctxt
);
8018 region_model
model1 (model0
);
8020 ASSERT_EQ (model0
, model1
);
8022 region_model merged
;
8023 ASSERT_TRUE (model0
.can_merge_with_p (model1
, &merged
));
8025 merged
.canonicalize (&ctxt
);
8027 /* The merged model ought to be identical (after canonicalization,
8029 ASSERT_EQ (model0
, merged
);
8032 /* Two regions sharing the same unknown svalue should continue sharing
8033 an unknown svalue after self-merger. */
8035 test_region_model_context ctxt
;
8036 region_model model0
;
8038 = model0
.set_to_new_unknown_value (model0
.get_lvalue (x
, &ctxt
),
8039 integer_type_node
, &ctxt
);
8040 model0
.set_value (model0
.get_lvalue (y
, &ctxt
), sid
, &ctxt
);
8041 region_model
model1 (model0
);
8043 /* They should be mergeable, and the result should be the same. */
8044 region_model merged
;
8045 ASSERT_TRUE (model0
.can_merge_with_p (model1
, &merged
));
8046 ASSERT_EQ (model0
, merged
);
8048 /* In particular, we should have x == y. */
8049 ASSERT_EQ (merged
.eval_condition (x
, EQ_EXPR
, y
, &ctxt
),
8050 tristate (tristate::TS_TRUE
));
8055 region_model model0
;
8056 region_model model1
;
8057 test_region_model_context ctxt
;
8058 model0
.add_constraint (x
, EQ_EXPR
, int_42
, &ctxt
);
8059 model1
.add_constraint (x
, NE_EXPR
, int_42
, &ctxt
);
8060 ASSERT_TRUE (model0
.can_merge_with_p (model1
));
8064 region_model model0
;
8065 region_model model1
;
8066 test_region_model_context ctxt
;
8067 model0
.add_constraint (x
, EQ_EXPR
, int_42
, &ctxt
);
8068 model1
.add_constraint (x
, NE_EXPR
, int_42
, &ctxt
);
8069 model1
.add_constraint (x
, EQ_EXPR
, int_113
, &ctxt
);
8070 ASSERT_TRUE (model0
.can_merge_with_p (model1
));
8074 // TODO: what can't we merge? need at least one such test
8076 /* TODO: various things
8079 - every combination, but in particular
8085 test_region_model_context ctxt
;
8086 region_model model0
;
8088 region_id x_rid
= model0
.get_lvalue (x
, &ctxt
);
8089 region_id x_as_ptr
= model0
.get_or_create_view (x_rid
, ptr_type_node
);
8090 model0
.set_value (x_as_ptr
, model0
.get_rvalue (addr_of_y
, &ctxt
), &ctxt
);
8092 region_model
model1 (model0
);
8093 ASSERT_EQ (model1
, model0
);
8095 /* They should be mergeable, and the result should be the same. */
8096 region_model merged
;
8097 ASSERT_TRUE (model0
.can_merge_with_p (model1
, &merged
));
8100 /* Verify that we can merge a model in which a local in an older stack
8101 frame points to a local in a more recent stack frame. */
8103 region_model model0
;
8104 model0
.push_frame (DECL_STRUCT_FUNCTION (test_fndecl
), NULL
, NULL
);
8105 region_id q_in_first_frame
= model0
.get_lvalue (q
, NULL
);
8107 /* Push a second frame. */
8108 region_id rid_2nd_frame
8109 = model0
.push_frame (DECL_STRUCT_FUNCTION (test_fndecl
), NULL
, NULL
);
8111 /* Have a pointer in the older frame point to a local in the
8112 more recent frame. */
8113 svalue_id sid_ptr
= model0
.get_rvalue (addr_of_a
, NULL
);
8114 model0
.set_value (q_in_first_frame
, sid_ptr
, NULL
);
8116 /* Verify that it's pointing at the newer frame. */
8117 region_id rid_pointee
8118 = model0
.get_svalue (sid_ptr
)->dyn_cast_region_svalue ()->get_pointee ();
8119 ASSERT_EQ (model0
.get_region (rid_pointee
)->get_parent (), rid_2nd_frame
);
8121 model0
.canonicalize (NULL
);
8123 region_model
model1 (model0
);
8124 ASSERT_EQ (model0
, model1
);
8126 /* They should be mergeable, and the result should be the same
8127 (after canonicalization, at least). */
8128 region_model merged
;
8129 ASSERT_TRUE (model0
.can_merge_with_p (model1
, &merged
));
8130 merged
.canonicalize (NULL
);
8131 ASSERT_EQ (model0
, merged
);
8134 /* Verify that we can merge a model in which a local points to a global. */
8136 region_model model0
;
8137 model0
.push_frame (DECL_STRUCT_FUNCTION (test_fndecl
), NULL
, NULL
);
8138 model0
.set_value (model0
.get_lvalue (q
, NULL
),
8139 model0
.get_rvalue (addr_of_y
, NULL
), NULL
);
8141 model0
.canonicalize (NULL
);
8143 region_model
model1 (model0
);
8144 ASSERT_EQ (model0
, model1
);
8146 /* They should be mergeable, and the result should be the same
8147 (after canonicalization, at least). */
8148 region_model merged
;
8149 ASSERT_TRUE (model0
.can_merge_with_p (model1
, &merged
));
8150 merged
.canonicalize (NULL
);
8151 ASSERT_EQ (model0
, merged
);
8155 /* Verify that constraints are correctly merged when merging region_model
8159 test_constraint_merging ()
8161 tree int_0
= build_int_cst (integer_type_node
, 0);
8162 tree int_5
= build_int_cst (integer_type_node
, 5);
8163 tree x
= build_global_decl ("x", integer_type_node
);
8164 tree y
= build_global_decl ("y", integer_type_node
);
8165 tree z
= build_global_decl ("z", integer_type_node
);
8166 tree n
= build_global_decl ("n", integer_type_node
);
8168 test_region_model_context ctxt
;
8170 /* model0: 0 <= (x == y) < n. */
8171 region_model model0
;
8172 model0
.set_to_new_unknown_value (model0
.get_lvalue (x
, &ctxt
),
8173 integer_type_node
, &ctxt
);
8174 model0
.add_constraint (x
, EQ_EXPR
, y
, &ctxt
);
8175 model0
.add_constraint (x
, GE_EXPR
, int_0
, NULL
);
8176 model0
.add_constraint (x
, LT_EXPR
, n
, NULL
);
8178 /* model1: z != 5 && (0 <= x < n). */
8179 region_model model1
;
8180 model1
.set_to_new_unknown_value (model1
.get_lvalue (x
, &ctxt
),
8181 integer_type_node
, &ctxt
);
8182 model1
.add_constraint (z
, NE_EXPR
, int_5
, NULL
);
8183 model1
.add_constraint (x
, GE_EXPR
, int_0
, NULL
);
8184 model1
.add_constraint (x
, LT_EXPR
, n
, NULL
);
8186 /* They should be mergeable; the merged constraints should
8187 be: (0 <= x < n). */
8188 region_model merged
;
8189 ASSERT_TRUE (model0
.can_merge_with_p (model1
, &merged
));
8191 ASSERT_EQ (merged
.eval_condition (x
, GE_EXPR
, int_0
, &ctxt
),
8192 tristate (tristate::TS_TRUE
));
8193 ASSERT_EQ (merged
.eval_condition (x
, LT_EXPR
, n
, &ctxt
),
8194 tristate (tristate::TS_TRUE
));
8196 ASSERT_EQ (merged
.eval_condition (z
, NE_EXPR
, int_5
, &ctxt
),
8197 tristate (tristate::TS_UNKNOWN
));
8198 ASSERT_EQ (merged
.eval_condition (x
, LT_EXPR
, y
, &ctxt
),
8199 tristate (tristate::TS_UNKNOWN
));
8202 /* Run all of the selftests within this file. */
8205 analyzer_region_model_cc_tests ()
8207 test_tree_cmp_on_constants ();
8209 test_unique_constants ();
8210 test_svalue_equality ();
8211 test_region_equality ();
8212 test_purging_by_criteria ();
8213 test_purge_unused_svalues ();
8215 test_stack_frames ();
8216 test_get_representative_path_var ();
8217 test_canonicalization_1 ();
8218 test_canonicalization_2 ();
8219 test_canonicalization_3 ();
8220 test_canonicalization_4 ();
8221 test_state_merging ();
8222 test_constraint_merging ();
8225 } // namespace selftest
8227 #endif /* CHECKING_P */
8231 #endif /* #if ENABLE_ANALYZER */