]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/analyzer/region-model.cc
runtime: call runtime_nanotime1, not runtime_nanotime
[thirdparty/gcc.git] / gcc / analyzer / region-model.cc
CommitLineData
757bf1df
DM
1/* Classes for modeling the state of memory.
2 Copyright (C) 2019-2020 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tree.h"
25#include "function.h"
26#include "basic-block.h"
27#include "gimple.h"
28#include "gimple-iterator.h"
29#include "graphviz.h"
30#include "options.h"
31#include "cgraph.h"
32#include "tree-dfa.h"
33#include "stringpool.h"
34#include "convert.h"
35#include "target.h"
36#include "fold-const.h"
37#include "tree-pretty-print.h"
38#include "diagnostic-color.h"
39#include "diagnostic-metadata.h"
40#include "diagnostic-core.h"
41#include "tristate.h"
ef7827b0 42#include "bitmap.h"
757bf1df
DM
43#include "selftest.h"
44#include "function.h"
45#include "analyzer/analyzer.h"
46#include "analyzer/analyzer-logging.h"
47#include "ordered-hash-map.h"
48#include "options.h"
49#include "cgraph.h"
50#include "cfg.h"
51#include "digraph.h"
52#include "analyzer/supergraph.h"
53#include "sbitmap.h"
54#include "analyzer/region-model.h"
55#include "analyzer/constraint-manager.h"
56#include "diagnostic-event-id.h"
57#include "analyzer/sm.h"
58#include "diagnostic-event-id.h"
59#include "analyzer/sm.h"
60#include "analyzer/pending-diagnostic.h"
61#include "analyzer/analyzer-selftests.h"
62
63#if ENABLE_ANALYZER
64
65/* Dump T to PP in language-independent form, for debugging/logging/dumping
66 purposes. */
67
68static void
69dump_tree (pretty_printer *pp, tree t)
70{
71 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
72}
73
74/* Dump this path_var to PP (which must support %E for trees).
75
76 Express the stack depth using an "@DEPTH" suffix, so e.g. given
77 void foo (int j);
78 void bar (int i)
79 {
80 foo (i);
81 }
82 then:
83 - the "i" in "bar" would be "(i @ 0)"
84 - the "j" in "foo" would be "(j @ 1)". */
85
86void
87path_var::dump (pretty_printer *pp) const
88{
89PUSH_IGNORE_WFORMAT
90 if (m_tree == NULL_TREE)
91 pp_string (pp, "NULL");
92 if (CONSTANT_CLASS_P (m_tree))
93 pp_printf (pp, "%qE", m_tree);
94 else
95 pp_printf (pp, "(%qE @ %i)", m_tree, m_stack_depth);
96POP_IGNORE_WFORMAT
97}
98
99/* For use in printing a comma-separated list. */
100
101static void
102dump_separator (pretty_printer *pp, bool *is_first)
103{
104 if (!*is_first)
105 pp_string (pp, ", ");
106 *is_first = false;
107}
108
109/* Concrete subclass of constraint_manager that wires it up to a region_model
110 (whilst allowing the constraint_manager and region_model to be somewhat
111 at arms length).
112 TODO: revisit this; maybe put the region_model * into the constraint_manager
113 base class. */
114
115class impl_constraint_manager : public constraint_manager
116{
117 public:
118 impl_constraint_manager (region_model *model)
119 : constraint_manager (),
120 m_model (model)
121 {}
122
123 impl_constraint_manager (const impl_constraint_manager &other,
124 region_model *model)
125 : constraint_manager (other),
126 m_model (model)
127 {}
128
129 constraint_manager *clone (region_model *model) const
130 {
131 return new impl_constraint_manager (*this, model);
132 }
133
134 tree maybe_get_constant (svalue_id sid) const FINAL OVERRIDE
135 {
136 svalue *svalue = m_model->get_svalue (sid);
137 return svalue->maybe_get_constant ();
138 }
139
140 svalue_id get_sid_for_constant (tree cst) const FINAL OVERRIDE
141 {
142 gcc_assert (CONSTANT_CLASS_P (cst));
143 return m_model->get_rvalue (cst, NULL);
144 }
145
146 int get_num_svalues () const FINAL OVERRIDE
147 {
148 return m_model->get_num_svalues ();
149 }
150
151 private:
152 region_model *m_model;
153};
154
155/* class svalue_id. */
156
157/* Print this svalue_id to PP. */
158
159void
160svalue_id::print (pretty_printer *pp) const
161{
162 if (null_p ())
163 pp_printf (pp, "null");
164 else
165 pp_printf (pp, "sv%i", m_idx);
166}
167
168/* Print this svalue_id in .dot format to PP. */
169
170void
171svalue_id::dump_node_name_to_pp (pretty_printer *pp) const
172{
173 gcc_assert (!null_p ());
174 pp_printf (pp, "svalue_%i", m_idx);
175}
176
177/* Assert that this object is valid (w.r.t. MODEL). */
178
179void
180svalue_id::validate (const region_model &model) const
181{
182 gcc_assert (null_p () || m_idx < (int)model.get_num_svalues ());
183}
184
185/* class region_id. */
186
187/* Print this region_id to PP. */
188
189void
190region_id::print (pretty_printer *pp) const
191{
192 if (null_p ())
193 pp_printf (pp, "null");
194 else
195 pp_printf (pp, "r%i", m_idx);
196}
197
198/* Print this region_id in .dot format to PP. */
199
200void
201region_id::dump_node_name_to_pp (pretty_printer *pp) const
202{
203 gcc_assert (!null_p ());
204 pp_printf (pp, "region_%i", m_idx);
205}
206
207/* Assert that this object is valid (w.r.t. MODEL). */
208
209void
210region_id::validate (const region_model &model) const
211{
212 gcc_assert (null_p () || m_idx < (int)model.get_num_regions ());
213}
214
215/* class id_set. */
216
217/* id_set<region_id>'s ctor. */
218
219template<>
220id_set<region_id>::id_set (const region_model *model)
221: m_bitmap (model->get_num_regions ())
222{
223 bitmap_clear (m_bitmap);
224}
225
226/* class svalue and its various subclasses. */
227
228/* class svalue. */
229
230/* svalue's equality operator. Most of the work is done by the
231 a "compare_fields" implementation on each subclass. */
232
233bool
234svalue::operator== (const svalue &other) const
235{
236 enum svalue_kind this_kind = get_kind ();
237 enum svalue_kind other_kind = other.get_kind ();
238 if (this_kind != other_kind)
239 return false;
240
241 if (m_type != other.m_type)
242 return false;
243
244 switch (this_kind)
245 {
246 default:
247 gcc_unreachable ();
248 case SK_REGION:
249 {
250 const region_svalue &this_sub
251 = (const region_svalue &)*this;
252 const region_svalue &other_sub
253 = (const region_svalue &)other;
254 return this_sub.compare_fields (other_sub);
255 }
256 break;
257 case SK_CONSTANT:
258 {
259 const constant_svalue &this_sub
260 = (const constant_svalue &)*this;
261 const constant_svalue &other_sub
262 = (const constant_svalue &)other;
263 return this_sub.compare_fields (other_sub);
264 }
265 break;
266 case SK_UNKNOWN:
267 {
268 const unknown_svalue &this_sub
269 = (const unknown_svalue &)*this;
270 const unknown_svalue &other_sub
271 = (const unknown_svalue &)other;
272 return this_sub.compare_fields (other_sub);
273 }
274 break;
275 case SK_POISONED:
276 {
277 const poisoned_svalue &this_sub
278 = (const poisoned_svalue &)*this;
279 const poisoned_svalue &other_sub
280 = (const poisoned_svalue &)other;
281 return this_sub.compare_fields (other_sub);
282 }
283 break;
284 case SK_SETJMP:
285 {
286 const setjmp_svalue &this_sub
287 = (const setjmp_svalue &)*this;
288 const setjmp_svalue &other_sub
289 = (const setjmp_svalue &)other;
290 return this_sub.compare_fields (other_sub);
291 }
292 break;
293 }
294}
295
296/* Generate a hash value for this svalue. Most of the work is done by the
297 add_to_hash vfunc. */
298
299hashval_t
300svalue::hash () const
301{
302 inchash::hash hstate;
303 if (m_type)
304 hstate.add_int (TYPE_UID (m_type));
305 add_to_hash (hstate);
306 return hstate.end ();
307}
308
309/* Print this svalue and its ID to PP. */
310
311void
312svalue::print (const region_model &model,
313 svalue_id this_sid,
314 pretty_printer *pp) const
315{
316 this_sid.print (pp);
317 pp_string (pp, ": {");
318
319PUSH_IGNORE_WFORMAT
320 if (m_type)
321 {
322 gcc_assert (TYPE_P (m_type));
323 pp_printf (pp, "type: %qT, ", m_type);
324 }
325POP_IGNORE_WFORMAT
326
327 /* vfunc. */
328 print_details (model, this_sid, pp);
329
330 pp_string (pp, "}");
331}
332
333/* Dump this svalue in the form of a .dot record to PP. */
334
335void
336svalue::dump_dot_to_pp (const region_model &model,
337 svalue_id this_sid,
338 pretty_printer *pp) const
339{
340 this_sid.dump_node_name_to_pp (pp);
341 pp_printf (pp, " [label=\"");
342 pp_write_text_to_stream (pp);
343 this_sid.print (pp);
344 pp_string (pp, ": {");
345 print (model, this_sid, pp);
346 pp_write_text_as_dot_label_to_stream (pp, /*for_record=*/false);
347 pp_string (pp, "}\"];");
348 pp_newline (pp);
349}
350
351/* Base implementation of svalue::remap_region_ids vfunc. */
352
353void
354svalue::remap_region_ids (const region_id_map &)
355{
356 /* Empty. */
357}
358
359/* Base implementation of svalue::walk_for_canonicalization vfunc. */
360
361void
362svalue::walk_for_canonicalization (canonicalization *) const
363{
364 /* Empty. */
365}
366
367/* Base implementation of svalue::get_child_sid vfunc. */
368
369svalue_id
370svalue::get_child_sid (region *parent ATTRIBUTE_UNUSED,
371 region *child,
372 region_model &model,
373 region_model_context *ctxt ATTRIBUTE_UNUSED)
374{
375 svalue *new_child_value = clone ();
376 if (child->get_type ())
377 new_child_value->m_type = child->get_type ();
378 svalue_id new_child_sid = model.add_svalue (new_child_value);
379 return new_child_sid;
380}
381
382/* If this svalue is a constant_svalue, return the underlying tree constant.
383 Otherwise return NULL_TREE. */
384
385tree
386svalue::maybe_get_constant () const
387{
388 if (const constant_svalue *cst_sval = dyn_cast_constant_svalue ())
389 return cst_sval->get_constant ();
390 else
391 return NULL_TREE;
392}
393
394/* class region_svalue : public svalue. */
395
396/* Compare the fields of this region_svalue with OTHER, returning true
397 if they are equal.
398 For use by svalue::operator==. */
399
400bool
401region_svalue::compare_fields (const region_svalue &other) const
402{
403 return m_rid == other.m_rid;
404}
405
406/* Implementation of svalue::add_to_hash vfunc for region_svalue. */
407
408void
409region_svalue::add_to_hash (inchash::hash &hstate) const
410{
411 inchash::add (m_rid, hstate);
412}
413
414/* Implementation of svalue::print_details vfunc for region_svalue. */
415
416void
417region_svalue::print_details (const region_model &model ATTRIBUTE_UNUSED,
418 svalue_id this_sid ATTRIBUTE_UNUSED,
419 pretty_printer *pp) const
420{
421 if (m_rid.null_p ())
422 pp_string (pp, "NULL");
423 else
424 {
425 pp_string (pp, "&");
426 m_rid.print (pp);
427 }
428}
429
430/* Implementation of svalue::dump_dot_to_pp for region_svalue. */
431
432void
433region_svalue::dump_dot_to_pp (const region_model &model,
434 svalue_id this_sid,
435 pretty_printer *pp) const
436{
437 svalue::dump_dot_to_pp (model, this_sid, pp);
438
439 /* If non-NULL, add an edge to the pointed-to region. */
440 if (!m_rid.null_p ())
441 {
442 this_sid.dump_node_name_to_pp (pp);
443 pp_string (pp, " -> ");
444 m_rid.dump_node_name_to_pp (pp);
445 pp_string (pp, ";");
446 pp_newline (pp);
447 }
448}
449
450/* Implementation of svalue::remap_region_ids vfunc for region_svalue. */
451
452void
453region_svalue::remap_region_ids (const region_id_map &map)
454{
455 map.update (&m_rid);
456}
457
458/* Merge REGION_SVAL_A and REGION_SVAL_B using MERGER, writing the result
459 into *MERGED_SID. */
460
461void
462region_svalue::merge_values (const region_svalue &region_sval_a,
463 const region_svalue &region_sval_b,
464 svalue_id *merged_sid,
465 tree type,
466 model_merger *merger)
467{
468 region_id a_rid = region_sval_a.get_pointee ();
469 region_id b_rid = region_sval_b.get_pointee ();
470
471 /* Both are non-NULL. */
472 gcc_assert (!a_rid.null_p () && !b_rid.null_p ());
473
474 /* Have these ptr-values already been merged? */
475
476 region_id a_rid_in_m
477 = merger->m_map_regions_from_a_to_m.get_dst_for_src (a_rid);
478 region_id b_rid_in_m
479 = merger->m_map_regions_from_b_to_m.get_dst_for_src (b_rid);
480
481 /* "null_p" here means "we haven't seen this ptr-value before".
482 If we've seen one but not the other, or we have different
483 regions, then the merged ptr has to be "unknown". */
484 if (a_rid_in_m != b_rid_in_m)
485 {
486 svalue *merged_sval = new unknown_svalue (type);
487 *merged_sid = merger->m_merged_model->add_svalue (merged_sval);
488 return;
489 }
490
491 /* Have we seen this yet? If so, reuse the value. */
492 if (!a_rid_in_m.null_p ())
493 {
494 *merged_sid
495 = merger->m_merged_model->get_or_create_ptr_svalue (type, a_rid_in_m);
496 return;
497 }
498
499 /* Otherwise we have A/B regions that haven't been referenced yet. */
500
501 /* Are the regions the "same", when seen from the tree point-of-view.
502 If so, create a merged pointer to it. */
503 path_var pv_a = merger->m_model_a->get_representative_path_var (a_rid);
504 path_var pv_b = merger->m_model_b->get_representative_path_var (b_rid);
505 if (pv_a.m_tree
506 && pv_a == pv_b)
507 {
508 region_id merged_pointee_rid
509 = merger->m_merged_model->get_lvalue (pv_a, NULL);
510 *merged_sid
511 = merger->m_merged_model->get_or_create_ptr_svalue (type,
512 merged_pointee_rid);
513 merger->record_regions (a_rid, b_rid, merged_pointee_rid);
514 return;
515 }
516
517 /* Handle an A/B pair of ptrs that both point at heap regions.
518 If they both have a heap region in the merger model, merge them. */
519 region *region_a = merger->m_model_a->get_region (a_rid);
520 region *region_b = merger->m_model_b->get_region (b_rid);
521 region_id a_parent_rid = region_a->get_parent ();
522 region_id b_parent_rid = region_b->get_parent ();
523 region *parent_region_a = merger->m_model_a->get_region (a_parent_rid);
524 region *parent_region_b = merger->m_model_b->get_region (b_parent_rid);
525 if (parent_region_a
526 && parent_region_b
527 && parent_region_a->get_kind () == RK_HEAP
528 && parent_region_b->get_kind () == RK_HEAP)
529 {
530 /* We have an A/B pair of ptrs that both point at heap regions. */
531 /* presumably we want to see if each A/B heap region already
532 has a merged region, and, if so, is it the same one. */
533 // This check is above
534
535 region_id merged_pointee_rid
536 = merger->m_merged_model->add_new_malloc_region ();
537 *merged_sid
538 = merger->m_merged_model->get_or_create_ptr_svalue
539 (type, merged_pointee_rid);
540 merger->record_regions (a_rid, b_rid, merged_pointee_rid);
541 return;
542 }
543
544 /* Two different non-NULL pointers? Merge to unknown. */
545 svalue *merged_sval = new unknown_svalue (type);
546 *merged_sid = merger->m_merged_model->add_svalue (merged_sval);
547 return;
548}
549
550/* Implementation of svalue::walk_for_canonicalization vfunc for
551 region_svalue. */
552
553void
554region_svalue::walk_for_canonicalization (canonicalization *c) const
555{
556 c->walk_rid (m_rid);
557}
558
559/* Evaluate the condition LHS OP RHS.
560 Subroutine of region_model::eval_condition for when we have a pair of
561 pointers. */
562
563tristate
564region_svalue::eval_condition (region_svalue *lhs,
565 enum tree_code op,
566 region_svalue *rhs)
567{
568 /* See if they point to the same region. */
569 /* TODO: what about child regions where the child is the first child
570 (or descendent)? */
571 region_id lhs_rid = lhs->get_pointee ();
572 region_id rhs_rid = rhs->get_pointee ();
573 switch (op)
574 {
575 default:
576 gcc_unreachable ();
577
578 case EQ_EXPR:
579 if (lhs_rid == rhs_rid)
580 return tristate::TS_TRUE;
581 else
582 return tristate::TS_FALSE;
583 break;
584
585 case NE_EXPR:
586 if (lhs_rid != rhs_rid)
587 return tristate::TS_TRUE;
588 else
589 return tristate::TS_FALSE;
590 break;
591
592 case GE_EXPR:
593 case LE_EXPR:
594 if (lhs_rid == rhs_rid)
595 return tristate::TS_TRUE;
596 break;
597
598 case GT_EXPR:
599 case LT_EXPR:
600 if (lhs_rid == rhs_rid)
601 return tristate::TS_FALSE;
602 break;
603 }
604
605 return tristate::TS_UNKNOWN;
606}
607
608/* class constant_svalue : public svalue. */
609
610/* Compare the fields of this constant_svalue with OTHER, returning true
611 if they are equal.
612 For use by svalue::operator==. */
613
614bool
615constant_svalue::compare_fields (const constant_svalue &other) const
616{
617 return m_cst_expr == other.m_cst_expr;
618}
619
620/* Implementation of svalue::add_to_hash vfunc for constant_svalue. */
621
622void
623constant_svalue::add_to_hash (inchash::hash &hstate) const
624{
625 inchash::add_expr (m_cst_expr, hstate);
626}
627
628/* Merge the CST_SVAL_A and CST_SVAL_B using MERGER, writing the id of
629 the resulting svalue into *MERGED_SID. */
630
631void
632constant_svalue::merge_values (const constant_svalue &cst_sval_a,
633 const constant_svalue &cst_sval_b,
634 svalue_id *merged_sid,
635 model_merger *merger)
636{
637 tree cst_a = cst_sval_a.get_constant ();
638 tree cst_b = cst_sval_b.get_constant ();
639 svalue *merged_sval;
640 if (cst_a == cst_b)
641 {
642 /* If they are the same constant, merge as that constant value. */
643 merged_sval = new constant_svalue (cst_a);
644 }
645 else
646 {
647 /* Otherwise, we have two different constant values.
648 Merge as an unknown value.
649 TODO: impose constraints on the value?
650 (maybe just based on A, to avoid infinite chains) */
651 merged_sval = new unknown_svalue (TREE_TYPE (cst_a));
652 }
653 *merged_sid = merger->m_merged_model->add_svalue (merged_sval);
654}
655
656/* Evaluate the condition LHS OP RHS.
657 Subroutine of region_model::eval_condition for when we have a pair of
658 constants. */
659
660tristate
661constant_svalue::eval_condition (constant_svalue *lhs,
662 enum tree_code op,
663 constant_svalue *rhs)
664{
665 tree lhs_const = lhs->get_constant ();
666 tree rhs_const = rhs->get_constant ();
667
668 gcc_assert (CONSTANT_CLASS_P (lhs_const));
669 gcc_assert (CONSTANT_CLASS_P (rhs_const));
670
671 tree comparison
672 = fold_build2 (op, boolean_type_node, lhs_const, rhs_const);
673 if (comparison == boolean_true_node)
674 return tristate (tristate::TS_TRUE);
675 if (comparison == boolean_false_node)
676 return tristate (tristate::TS_FALSE);
677 return tristate::TS_UNKNOWN;
678}
679
680/* Implementation of svalue::print_details vfunc for constant_svalue. */
681
682void
683constant_svalue::print_details (const region_model &model ATTRIBUTE_UNUSED,
684 svalue_id this_sid ATTRIBUTE_UNUSED,
685 pretty_printer *pp) const
686{
687PUSH_IGNORE_WFORMAT
688 pp_printf (pp, "%qE", m_cst_expr);
689POP_IGNORE_WFORMAT
690}
691
692/* Implementation of svalue::get_child_sid vfunc for constant_svalue. */
693
694svalue_id
695constant_svalue::get_child_sid (region *parent ATTRIBUTE_UNUSED,
696 region *child,
697 region_model &model,
698 region_model_context *ctxt ATTRIBUTE_UNUSED)
699{
700 /* TODO: handle the all-zeroes case by returning an all-zeroes of the
701 child type. */
702
703 /* Otherwise, we don't have a good way to get a child value out of a
704 constant.
705
706 Handle this case by using an unknown value. */
707 svalue *unknown_sval = new unknown_svalue (child->get_type ());
708 return model.add_svalue (unknown_sval);
709}
710
711/* class unknown_svalue : public svalue. */
712
713/* Compare the fields of this unknown_svalue with OTHER, returning true
714 if they are equal.
715 For use by svalue::operator==. */
716
717bool
718unknown_svalue::compare_fields (const unknown_svalue &) const
719{
720 /* I *think* we want to return true here, in that when comparing
721 two region models, we want two peer unknown_svalue instances
722 to be the "same". */
723 return true;
724}
725
726/* Implementation of svalue::add_to_hash vfunc for unknown_svalue. */
727
728void
729unknown_svalue::add_to_hash (inchash::hash &) const
730{
731 /* Empty. */
732}
733
734/* Implementation of svalue::print_details vfunc for unknown_svalue. */
735
736void
737unknown_svalue::print_details (const region_model &model ATTRIBUTE_UNUSED,
738 svalue_id this_sid ATTRIBUTE_UNUSED,
739 pretty_printer *pp) const
740{
741 pp_string (pp, "unknown");
742}
743
744/* Get a string for KIND for use in debug dumps. */
745
746const char *
747poison_kind_to_str (enum poison_kind kind)
748{
749 switch (kind)
750 {
751 default:
752 gcc_unreachable ();
753 case POISON_KIND_UNINIT:
754 return "uninit";
755 case POISON_KIND_FREED:
756 return "freed";
757 case POISON_KIND_POPPED_STACK:
758 return "popped stack";
759 }
760}
761
762/* class poisoned_svalue : public svalue. */
763
764/* Compare the fields of this poisoned_svalue with OTHER, returning true
765 if they are equal.
766 For use by svalue::operator==. */
767
768bool
769poisoned_svalue::compare_fields (const poisoned_svalue &other) const
770{
771 return m_kind == other.m_kind;
772}
773
774/* Implementation of svalue::add_to_hash vfunc for poisoned_svalue. */
775
776void
777poisoned_svalue::add_to_hash (inchash::hash &hstate) const
778{
779 hstate.add_int (m_kind);
780}
781
782/* Implementation of svalue::print_details vfunc for poisoned_svalue. */
783
784void
785poisoned_svalue::print_details (const region_model &model ATTRIBUTE_UNUSED,
786 svalue_id this_sid ATTRIBUTE_UNUSED,
787 pretty_printer *pp) const
788{
789 pp_printf (pp, "poisoned: %s", poison_kind_to_str (m_kind));
790}
791
792/* class setjmp_svalue's implementation is in engine.cc, so that it can use
793 the declaration of exploded_node. */
794
795/* class region and its various subclasses. */
796
797/* Get a string for KIND for use in debug dumps. */
798
799const char *
800region_kind_to_str (enum region_kind kind)
801{
802 switch (kind)
803 {
804 default:
805 gcc_unreachable ();
806 case RK_PRIMITIVE:
807 return "primitive";
808 case RK_STRUCT:
809 return "struct";
810 case RK_UNION:
811 return "union";
812 case RK_ARRAY:
813 return "array";
814 case RK_FRAME:
815 return "frame";
816 case RK_GLOBALS:
817 return "globals";
818 case RK_CODE:
819 return "code";
820 case RK_FUNCTION:
821 return "function";
822 case RK_STACK:
823 return "stack";
824 case RK_HEAP:
825 return "heap";
826 case RK_ROOT:
827 return "root";
828 case RK_SYMBOLIC:
829 return "symbolic";
830 }
831}
832
833/* class region. */
834
835/* Equality operator for region.
836 After comparing base class fields and kind, the rest of the
837 comparison is handled off to a "compare_fields" member function
838 specific to the appropriate subclass. */
839
840bool
841region::operator== (const region &other) const
842{
843 if (m_parent_rid != other.m_parent_rid)
844 return false;
845 if (m_sval_id != other.m_sval_id)
846 return false;
847 if (m_type != other.m_type)
848 return false;
849
850 enum region_kind this_kind = get_kind ();
851 enum region_kind other_kind = other.get_kind ();
852 if (this_kind != other_kind)
853 return false;
854
855 /* Compare views. */
856 if (m_view_rids.length () != other.m_view_rids.length ())
857 return false;
858 int i;
859 region_id *rid;
860 FOR_EACH_VEC_ELT (m_view_rids, i, rid)
861 if (! (*rid == other.m_view_rids[i]))
862 return false;
863
864 switch (this_kind)
865 {
866 default:
867 gcc_unreachable ();
868 case RK_PRIMITIVE:
869 {
870#if 1
871 return true;
872#else
873 const primitive_region &this_sub
874 = (const primitive_region &)*this;
875 const primitive_region &other_sub
876 = (const primitive_region &)other;
877 return this_sub.compare_fields (other_sub);
878#endif
879 }
880 case RK_STRUCT:
881 {
882 const struct_region &this_sub
883 = (const struct_region &)*this;
884 const struct_region &other_sub
885 = (const struct_region &)other;
886 return this_sub.compare_fields (other_sub);
887 }
888 case RK_UNION:
889 {
890 const union_region &this_sub
891 = (const union_region &)*this;
892 const union_region &other_sub
893 = (const union_region &)other;
894 return this_sub.compare_fields (other_sub);
895 }
896 case RK_ARRAY:
897 {
898 const array_region &this_sub
899 = (const array_region &)*this;
900 const array_region &other_sub
901 = (const array_region &)other;
902 return this_sub.compare_fields (other_sub);
903 }
904 case RK_FRAME:
905 {
906 const frame_region &this_sub
907 = (const frame_region &)*this;
908 const frame_region &other_sub
909 = (const frame_region &)other;
910 return this_sub.compare_fields (other_sub);
911 }
912 case RK_GLOBALS:
913 {
914 const globals_region &this_sub
915 = (const globals_region &)*this;
916 const globals_region &other_sub
917 = (const globals_region &)other;
918 return this_sub.compare_fields (other_sub);
919 }
920 case RK_CODE:
921 {
922 const code_region &this_sub
923 = (const code_region &)*this;
924 const code_region &other_sub
925 = (const code_region &)other;
926 return this_sub.compare_fields (other_sub);
927 }
928 case RK_FUNCTION:
929 {
930 const function_region &this_sub
931 = (const function_region &)*this;
932 const function_region &other_sub
933 = (const function_region &)other;
934 return this_sub.compare_fields (other_sub);
935 }
936 case RK_STACK:
937 {
938 const stack_region &this_sub
939 = (const stack_region &)*this;
940 const stack_region &other_sub
941 = (const stack_region &)other;
942 return this_sub.compare_fields (other_sub);
943 }
944 case RK_ROOT:
945 {
946 const root_region &this_sub
947 = (const root_region &)*this;
948 const root_region &other_sub
949 = (const root_region &)other;
950 return this_sub.compare_fields (other_sub);
951 }
952 case RK_SYMBOLIC:
953 {
954 const symbolic_region &this_sub
955 = (const symbolic_region &)*this;
956 const symbolic_region &other_sub
957 = (const symbolic_region &)other;
958 return this_sub.compare_fields (other_sub);
959 }
960 case RK_HEAP:
961 {
962 const heap_region &this_sub
963 = (const heap_region &)*this;
964 const heap_region &other_sub
965 = (const heap_region &)other;
966 return this_sub.compare_fields (other_sub);
967 }
968 }
969}
970
971/* Get the parent region of this region. */
972
973region *
974region::get_parent_region (const region_model &model) const
975{
976 return model.get_region (m_parent_rid);
977}
978
979/* Set this region's value to RHS_SID (or potentially a variant of it,
980 for some kinds of casts). */
981
982void
983region::set_value (region_model &model, region_id this_rid, svalue_id rhs_sid,
984 region_model_context *ctxt)
985{
986 /* Handle some kinds of casting. */
987 if (m_type)
988 {
989 svalue *sval = model.get_svalue (rhs_sid);
990 if (sval->get_type ())
991 rhs_sid = model.maybe_cast (m_type, rhs_sid, ctxt);
992
993 sval = model.get_svalue (rhs_sid);
994 if (sval->get_type ())
995 gcc_assert (m_type == sval->get_type ());
996 }
997
998 m_sval_id = rhs_sid;
999
1000 /* Update views.
1001 If this is a view, it becomes its parent's active view.
1002 If there was already an active views, invalidate its value; otherwise
1003 if the parent itself had a value, invalidate it.
1004 If it's not a view, then deactivate any view that is active on this
1005 region. */
1006 {
1007 if (m_is_view)
1008 become_active_view (model, this_rid);
1009 else
1010 {
1011 deactivate_any_active_view (model);
1012 gcc_assert (m_active_view_rid.null_p ());
1013 }
1014 }
1015}
1016
1017/* Make this region (with id THIS_RID) the "active" view of its parent.
1018 Any other active view has its value set to "unknown" and descendent values
1019 cleared.
1020 If there wasn't an active view, then set the parent's value to unknown, and
1021 clear its descendent values (apart from this view). */
1022
1023void
1024region::become_active_view (region_model &model, region_id this_rid)
1025{
1026 gcc_assert (m_is_view);
1027
1028 region *parent_reg = model.get_region (m_parent_rid);
1029 gcc_assert (parent_reg);
1030
1031 region_id old_active_view_rid = parent_reg->m_active_view_rid;
1032
1033 if (old_active_view_rid == this_rid)
1034 {
1035 /* Already the active view: do nothing. */
1036 return;
1037 }
1038
1039 /* We have a change of active view. */
1040 parent_reg->m_active_view_rid = this_rid;
1041
1042 if (old_active_view_rid.null_p ())
1043 {
1044 /* No previous active view, but the parent and its other children
1045 might have values.
1046 If so, invalidate those values - but not that of the new view. */
1047 region_id_set below_region (&model);
1048 model.get_descendents (m_parent_rid, &below_region, this_rid);
1049 for (unsigned i = 0; i < model.get_num_regions (); i++)
1050 {
1051 region_id rid (region_id::from_int (i));
1052 if (below_region.region_p (rid))
1053 {
1054 region *other_reg = model.get_region (rid);
1055 other_reg->m_sval_id = svalue_id::null ();
1056 }
1057 }
1058 region *parent = model.get_region (m_parent_rid);
1059 parent->m_sval_id
1060 = model.add_svalue (new unknown_svalue (parent->get_type ()));
1061 }
1062 else
1063 {
1064 /* If there was an active view, invalidate it. */
1065 region *old_active_view = model.get_region (old_active_view_rid);
1066 old_active_view->deactivate_view (model, old_active_view_rid);
1067 }
1068}
1069
1070/* If this region (with id THIS_RID) has an active view, deactivate it,
1071 clearing m_active_view_rid. */
1072
1073void
1074region::deactivate_any_active_view (region_model &model)
1075{
1076 if (m_active_view_rid.null_p ())
1077 return;
1078 region *view = model.get_region (m_active_view_rid);
1079 view->deactivate_view (model, m_active_view_rid);
1080 m_active_view_rid = region_id::null ();
1081}
1082
1083/* Clear any values for regions below THIS_RID.
1084 Set the view's value to unknown. */
1085
1086void
1087region::deactivate_view (region_model &model, region_id this_view_rid)
1088{
1089 gcc_assert (is_view_p ());
1090
1091 /* Purge values from old_active_this_view_rid and all its
1092 descendents. Potentially we could use a poison value
1093 for this, but let's use unknown for now. */
1094 region_id_set below_view (&model);
1095 model.get_descendents (this_view_rid, &below_view, region_id::null ());
1096
1097 for (unsigned i = 0; i < model.get_num_regions (); i++)
1098 {
1099 region_id rid (region_id::from_int (i));
1100 if (below_view.region_p (rid))
1101 {
1102 region *other_reg = model.get_region (rid);
1103 other_reg->m_sval_id = svalue_id::null ();
1104 }
1105 }
1106
1107 m_sval_id = model.add_svalue (new unknown_svalue (get_type ()));
1108}
1109
1110/* Get a value for this region, either its value if it has one,
1111 or, failing that, "inherit" a value from first ancestor with a
1112 non-null value.
1113
1114 For example, when getting the value for a local variable within
1115 a stack frame that doesn't have one, the frame doesn't have a value
1116 either, but the stack as a whole will have an "uninitialized" poison
1117 value, so inherit that. */
1118
1119svalue_id
1120region::get_value (region_model &model, bool non_null,
1121 region_model_context *ctxt)
1122{
1123 /* If this region has a value, use it. */
1124 if (!m_sval_id.null_p ())
1125 return m_sval_id;
1126
1127 /* Otherwise, "inherit" value from first ancestor with a
1128 non-null value. */
1129
1130 region *parent = model.get_region (m_parent_rid);
1131 if (parent)
1132 {
1133 svalue_id inherited_sid
1134 = parent->get_inherited_child_sid (this, model, ctxt);
1135 if (!inherited_sid.null_p ())
1136 return inherited_sid;
1137 }
1138
1139 /* If a non-null value has been requested, then generate
1140 a new unknown value. Store it, so that repeated reads from this
1141 region will yield the same unknown value. */
1142 if (non_null)
1143 {
1144 svalue_id unknown_sid = model.add_svalue (new unknown_svalue (m_type));
1145 m_sval_id = unknown_sid;
1146 return unknown_sid;
1147 }
1148
1149 return svalue_id::null ();
1150}
1151
1152/* Get a value for CHILD, inheriting from this region.
1153
1154 Recurse, so this region will inherit a value if it doesn't already
1155 have one. */
1156
1157svalue_id
1158region::get_inherited_child_sid (region *child,
1159 region_model &model,
1160 region_model_context *ctxt)
1161{
1162 if (m_sval_id.null_p ())
1163 {
1164 /* Recurse. */
1165 if (!m_parent_rid.null_p ())
1166 {
1167 region *parent = model.get_region (m_parent_rid);
1168 m_sval_id = parent->get_inherited_child_sid (this, model, ctxt);
1169 }
1170 }
1171
1172 if (!m_sval_id.null_p ())
1173 {
1174 /* Clone the parent's value, so that attempts to update it
1175 (e.g giving a specific value to an inherited "uninitialized"
1176 value) touch the child, and not the parent. */
1177 svalue *this_value = model.get_svalue (m_sval_id);
1178 svalue_id new_child_sid
1179 = this_value->get_child_sid (this, child, model, ctxt);
1180 if (ctxt)
1181 ctxt->on_inherited_svalue (m_sval_id, new_child_sid);
1182 child->m_sval_id = new_child_sid;
1183 return new_child_sid;
1184 }
1185
1186 return svalue_id::null ();
1187}
1188
1189/* Generate a hash value for this region. The work is done by the
1190 add_to_hash vfunc. */
1191
1192hashval_t
1193region::hash () const
1194{
1195 inchash::hash hstate;
1196 add_to_hash (hstate);
1197 return hstate.end ();
1198}
1199
1200/* Print a one-liner representation of this region to PP, assuming
1201 that this region is within MODEL and its id is THIS_RID. */
1202
1203void
1204region::print (const region_model &model,
1205 region_id this_rid,
1206 pretty_printer *pp) const
1207{
1208 this_rid.print (pp);
1209 pp_string (pp, ": {");
1210
1211 /* vfunc. */
1212 print_fields (model, this_rid, pp);
1213
1214 pp_string (pp, "}");
1215}
1216
1217/* Base class implementation of region::dump_dot_to_pp vfunc. */
1218
1219void
1220region::dump_dot_to_pp (const region_model &model,
1221 region_id this_rid,
1222 pretty_printer *pp) const
1223{
1224 this_rid.dump_node_name_to_pp (pp);
1225 pp_printf (pp, " [shape=none,margin=0,style=filled,fillcolor=%s,label=\"",
1226 "lightgrey");
1227 pp_write_text_to_stream (pp);
1228 print (model, this_rid, pp);
1229 pp_write_text_as_dot_label_to_stream (pp, /*for_record=*/false);
1230 pp_string (pp, "\"];");
1231 pp_newline (pp);
1232
1233 /* Add edge to svalue. */
1234 if (!m_sval_id.null_p ())
1235 {
1236 this_rid.dump_node_name_to_pp (pp);
1237 pp_string (pp, " -> ");
1238 m_sval_id.dump_node_name_to_pp (pp);
1239 pp_string (pp, ";");
1240 pp_newline (pp);
1241 }
1242
1243 /* Add edge to parent. */
1244 if (!m_parent_rid.null_p ())
1245 {
1246 this_rid.dump_node_name_to_pp (pp);
1247 pp_string (pp, " -> ");
1248 m_parent_rid.dump_node_name_to_pp (pp);
1249 pp_string (pp, ";");
1250 pp_newline (pp);
1251 }
1252}
1253
1254/* Dump a tree-like ASCII-art representation of this region to PP. */
1255
1256void
1257region::dump_to_pp (const region_model &model,
1258 region_id this_rid,
1259 pretty_printer *pp,
1260 const char *prefix,
1261 bool is_last_child) const
1262{
1263 print (model, this_rid, pp);
1264 pp_newline (pp);
1265
1266 const char *new_prefix;
1267 if (!m_parent_rid.null_p ())
1268 new_prefix = ACONCAT ((prefix, is_last_child ? " " : "| ", NULL));
1269 else
1270 new_prefix = prefix;
1271
1272 const char *begin_color = colorize_start (pp_show_color (pp), "note");
1273 const char *end_color = colorize_stop (pp_show_color (pp));
1274 char *field_prefix
1275 = ACONCAT ((begin_color, new_prefix, "|:", end_color, NULL));
1276
1277 if (!m_sval_id.null_p ())
1278 {
1279 pp_printf (pp, "%s sval: ", field_prefix);
1280 model.get_svalue (m_sval_id)->print (model, m_sval_id, pp);
1281 pp_newline (pp);
1282 }
1283 if (m_type)
1284 {
1285PUSH_IGNORE_WFORMAT
1286 pp_printf (pp, "%s type: %qT", field_prefix, m_type);
1287POP_IGNORE_WFORMAT
1288 pp_newline (pp);
1289 }
1290
1291 /* Find the children. */
1292
1293 auto_vec<region_id> child_rids;
1294 unsigned i;
1295 for (unsigned i = 0; i < model.get_num_regions (); ++i)
1296 {
1297 region_id rid = region_id::from_int (i);
1298 region *child = model.get_region (rid);
1299 if (child->m_parent_rid == this_rid)
1300 child_rids.safe_push (rid);
1301 }
1302
1303 /* Print the children, using dump_child_label to label them. */
1304
1305 region_id *child_rid;
1306 FOR_EACH_VEC_ELT (child_rids, i, child_rid)
1307 {
1308 is_last_child = (i == child_rids.length () - 1);
1309 if (!this_rid.null_p ())
1310 {
1311 const char *tail = is_last_child ? "`-" : "|-";
1312 pp_printf (pp, "%r%s%s%R", "note", new_prefix, tail);
1313 }
1314 dump_child_label (model, this_rid, *child_rid, pp);
1315 model.get_region (*child_rid)->dump_to_pp (model, *child_rid, pp,
1316 new_prefix,
1317 is_last_child);
1318 }
1319}
1320
1321/* Base implementation of region::dump_child_label vfunc. */
1322
1323void
1324region::dump_child_label (const region_model &model,
1325 region_id this_rid ATTRIBUTE_UNUSED,
1326 region_id child_rid,
1327 pretty_printer *pp) const
1328{
1329 region *child = model.get_region (child_rid);
1330 if (child->m_is_view)
1331 {
1332 gcc_assert (TYPE_P (child->get_type ()));
1333 if (m_active_view_rid == child_rid)
1334 pp_string (pp, "active ");
1335 else
1336 pp_string (pp, "inactive ");
1337PUSH_IGNORE_WFORMAT
1338 pp_printf (pp, "view as %qT: ", child->get_type ());
1339POP_IGNORE_WFORMAT
1340 }
1341}
1342
1343/* Assert that this object is valid. */
1344
1345void
1346region::validate (const region_model *model) const
1347{
1348 m_parent_rid.validate (*model);
1349 m_sval_id.validate (*model);
1350 unsigned i;
1351 region_id *view_rid;
1352 FOR_EACH_VEC_ELT (m_view_rids, i, view_rid)
1353 {
1354 gcc_assert (!view_rid->null_p ());
1355 view_rid->validate (*model);
1356 }
1357 m_active_view_rid.validate (*model);
1358}
1359
1360/* Apply MAP to svalue_ids to this region. This updates the value
1361 for the region (if any). */
1362
1363void
1364region::remap_svalue_ids (const svalue_id_map &map)
1365{
1366 map.update (&m_sval_id);
1367}
1368
1369/* Base implementation of region::remap_region_ids vfunc; subclasses should
1370 chain up to this, updating any region_id data. */
1371
1372void
1373region::remap_region_ids (const region_id_map &map)
1374{
1375 map.update (&m_parent_rid);
1376 unsigned i;
1377 region_id *view_rid;
1378 FOR_EACH_VEC_ELT (m_view_rids, i, view_rid)
1379 map.update (view_rid);
1380 map.update (&m_active_view_rid);
1381}
1382
1383/* Add a new region with id VIEW_RID as a view of this region. */
1384
1385void
1386region::add_view (region_id view_rid, region_model *model)
1387{
1388 gcc_assert (!view_rid.null_p ());
1389 region *new_view = model->get_region (view_rid);
1390 new_view->m_is_view = true;
1391 gcc_assert (!new_view->m_parent_rid.null_p ());
1392 gcc_assert (new_view->m_sval_id.null_p ());
1393
1394 //gcc_assert (new_view->get_type () != NULL_TREE);
1395 // TODO: this can sometimes be NULL, when viewing through a (void *)
1396
1397 // TODO: the type ought to not be present yet
1398
1399 m_view_rids.safe_push (view_rid);
1400}
1401
1402/* Look for a view of type TYPE of this region, returning its id if found,
1403 or null otherwise. */
1404
1405region_id
1406region::get_view (tree type, region_model *model) const
1407{
1408 unsigned i;
1409 region_id *view_rid;
1410 FOR_EACH_VEC_ELT (m_view_rids, i, view_rid)
1411 {
1412 region *view = model->get_region (*view_rid);
1413 gcc_assert (view->m_is_view);
1414 if (view->get_type () == type)
1415 return *view_rid;
1416 }
1417 return region_id::null ();
1418}
1419
1420/* region's ctor. */
1421
1422region::region (region_id parent_rid, svalue_id sval_id, tree type)
1423: m_parent_rid (parent_rid), m_sval_id (sval_id), m_type (type),
1424 m_view_rids (), m_is_view (false), m_active_view_rid (region_id::null ())
1425{
1426 gcc_assert (type == NULL_TREE || TYPE_P (type));
1427}
1428
1429/* region's copy ctor. */
1430
1431region::region (const region &other)
1432: m_parent_rid (other.m_parent_rid), m_sval_id (other.m_sval_id),
1433 m_type (other.m_type), m_view_rids (other.m_view_rids.length ()),
1434 m_is_view (other.m_is_view), m_active_view_rid (other.m_active_view_rid)
1435{
1436 int i;
1437 region_id *rid;
1438 FOR_EACH_VEC_ELT (other.m_view_rids, i, rid)
1439 m_view_rids.quick_push (*rid);
1440}
1441
1442/* Base implementation of region::add_to_hash vfunc; subclasses should
1443 chain up to this. */
1444
1445void
1446region::add_to_hash (inchash::hash &hstate) const
1447{
1448 inchash::add (m_parent_rid, hstate);
1449 inchash::add (m_sval_id, hstate);
1450 hstate.add_ptr (m_type);
1451 // TODO: views
1452}
1453
1454/* Base implementation of region::print_fields vfunc. */
1455
1456void
1457region::print_fields (const region_model &model ATTRIBUTE_UNUSED,
1458 region_id this_rid ATTRIBUTE_UNUSED,
1459 pretty_printer *pp) const
1460{
1461 pp_printf (pp, "kind: %qs", region_kind_to_str (get_kind ()));
1462
1463 pp_string (pp, ", parent: ");
1464 m_parent_rid.print (pp);
1465
1466 pp_printf (pp, ", sval: ");
1467 m_sval_id.print (pp);
1468
1469PUSH_IGNORE_WFORMAT
1470 if (m_type)
1471 pp_printf (pp, ", type: %qT", m_type);
1472POP_IGNORE_WFORMAT
1473}
1474
1475/* Determine if a pointer to this region must be non-NULL.
1476
1477 Generally, pointers to regions must be non-NULL, but pointers
1478 to symbolic_regions might, in fact, be NULL.
1479
1480 This allows us to simulate functions like malloc and calloc with:
1481 - only one "outcome" from each statement,
1482 - the idea that the pointer is on the heap if non-NULL
1483 - the possibility that the pointer could be NULL
1484 - the idea that successive values returned from malloc are non-equal
1485 - to be able to zero-fill for calloc. */
1486
1487bool
1488region::non_null_p (const region_model &model) const
1489{
1490 /* Look through views to get at the underlying region. */
1491 if (is_view_p ())
1492 return model.get_region (m_parent_rid)->non_null_p (model);
1493
1494 /* Are we within a symbolic_region? If so, it could be NULL. */
1495 if (const symbolic_region *sym_reg = dyn_cast_symbolic_region ())
1496 {
1497 if (sym_reg->m_possibly_null)
1498 return false;
1499 }
1500
1501 return true;
1502}
1503
1504/* class primitive_region : public region. */
1505
1506/* Implementation of region::clone vfunc for primitive_region. */
1507
1508region *
1509primitive_region::clone () const
1510{
1511 return new primitive_region (*this);
1512}
1513
1514/* Implementation of region::walk_for_canonicalization vfunc for
1515 primitive_region. */
1516
1517void
1518primitive_region::walk_for_canonicalization (canonicalization *) const
1519{
1520 /* Empty. */
1521}
1522
1523/* class map_region : public region. */
1524
1525/* map_region's copy ctor. */
1526
1527map_region::map_region (const map_region &other)
1528: region (other),
1529 m_map (other.m_map)
1530{
1531}
1532
1533/* Compare the fields of this map_region with OTHER, returning true
1534 if they are equal.
1535 For use by region::operator==. */
1536
1537bool
1538map_region::compare_fields (const map_region &other) const
1539{
1540 if (m_map.elements () != other.m_map.elements ())
1541 return false;
1542
1543 for (map_t::iterator iter = m_map.begin ();
1544 iter != m_map.end ();
1545 ++iter)
1546 {
1547 tree key = (*iter).first;
1548 region_id e = (*iter).second;
1549 region_id *other_slot = const_cast <map_t &> (other.m_map).get (key);
1550 if (other_slot == NULL)
1551 return false;
1552 if (e != *other_slot)
1553 return false;
1554 }
1555 return true;
1556}
1557
1558/* Implementation of region::print_fields vfunc for map_region. */
1559
1560void
1561map_region::print_fields (const region_model &model,
1562 region_id this_rid,
1563 pretty_printer *pp) const
1564{
1565 region::print_fields (model, this_rid, pp);
1566 pp_string (pp, ", map: {");
1567 for (map_t::iterator iter = m_map.begin ();
1568 iter != m_map.end ();
1569 ++iter)
1570 {
1571 if (iter != m_map.begin ())
1572 pp_string (pp, ", ");
1573 tree expr = (*iter).first;
1574 region_id child_rid = (*iter).second;
1575PUSH_IGNORE_WFORMAT
1576 pp_printf (pp, "%qE: ", expr);
1577POP_IGNORE_WFORMAT
1578 child_rid.print (pp);
1579 }
1580 pp_string (pp, "}");
1581}
1582
1583/* Implementation of region::dump_dot_to_pp vfunc for map_region. */
1584
1585void
1586map_region::dump_dot_to_pp (const region_model &model,
1587 region_id this_rid,
1588 pretty_printer *pp) const
1589{
1590 region::dump_dot_to_pp (model, this_rid, pp);
1591 for (map_t::iterator iter = m_map.begin ();
1592 iter != m_map.end ();
1593 ++iter)
1594 {
1595 // TODO: add nodes/edges to label things
1596
1597 tree expr = (*iter).first;
1598 region_id child_rid = (*iter).second;
1599
1600 pp_printf (pp, "rid_label_%i [label=\"", child_rid.as_int ());
1601 pp_write_text_to_stream (pp);
1602PUSH_IGNORE_WFORMAT
1603 pp_printf (pp, "%qE", expr);
1604POP_IGNORE_WFORMAT
1605 pp_write_text_as_dot_label_to_stream (pp, /*for_record=*/false);
1606 pp_string (pp, "\"];");
1607 pp_newline (pp);
1608
1609 pp_printf (pp, "rid_label_%i", child_rid.as_int ());
1610 pp_string (pp, " -> ");
1611 child_rid.dump_node_name_to_pp (pp);
1612 pp_string (pp, ";");
1613 pp_newline (pp);
1614 }
1615}
1616
1617/* Implementation of region::dump_child_label vfunc for map_region. */
1618
1619void
1620map_region::dump_child_label (const region_model &model,
1621 region_id this_rid,
1622 region_id child_rid,
1623 pretty_printer *pp) const
1624{
1625 region::dump_child_label (model, this_rid, child_rid, pp);
1626
1627 for (map_t::iterator iter = m_map.begin ();
1628 iter != m_map.end ();
1629 ++iter)
1630 {
1631 if (child_rid == (*iter).second)
1632 {
1633 tree key = (*iter).first;
1634PUSH_IGNORE_WFORMAT
1635 if (DECL_P (key))
1636 pp_printf (pp, "%qD: ", key);
1637 else
1638 pp_printf (pp, "%qE: ", key);
1639POP_IGNORE_WFORMAT
1640 }
1641 }
1642}
1643
1644/* Look for a child region for KEY within this map_region.
1645 If it doesn't already exist, create a child map_region, using TYPE for
1646 its type.
1647 Return the region_id of the child (whether pre-existing, or
1648 newly-created). */
1649
1650region_id
1651map_region::get_or_create (region_model *model,
1652 region_id this_rid,
1653 tree key,
1654 tree type)
1655{
1656 gcc_assert (key);
1657 gcc_assert (valid_key_p (key));
1658 region_id *slot = m_map.get (key);
1659 if (slot)
1660 return *slot;
1661 region_id child_rid = model->add_region_for_type (this_rid, type);
1662 m_map.put (key, child_rid);
1663 return child_rid;
1664}
1665
1666/* Get the region_id for the child region for KEY within this
1667 MAP_REGION, or NULL if there is no such child region. */
1668
1669region_id *
1670map_region::get (tree key)
1671{
1672 gcc_assert (key);
1673 gcc_assert (valid_key_p (key));
1674 region_id *slot = m_map.get (key);
1675 return slot;
1676}
1677
1678/* Implementation of region::add_to_hash vfunc for map_region. */
1679
1680void
1681map_region::add_to_hash (inchash::hash &hstate) const
1682{
1683 region::add_to_hash (hstate);
1684 // TODO
1685}
1686
1687/* Implementation of region::remap_region_ids vfunc for map_region. */
1688
1689void
1690map_region::remap_region_ids (const region_id_map &map)
1691{
1692 region::remap_region_ids (map);
1693
1694 /* Remap the region ids within the map entries. */
1695 for (map_t::iterator iter = m_map.begin ();
1696 iter != m_map.end (); ++iter)
1697 map.update (&(*iter).second);
1698}
1699
1700/* Remove the binding of KEY to its child region (but not the
1701 child region itself).
1702 For use when purging unneeded SSA names. */
1703
1704void
1705map_region::unbind (tree key)
1706{
1707 gcc_assert (key);
1708 gcc_assert (valid_key_p (key));
1709 m_map.remove (key);
1710}
1711
1712/* Look for a child region with id CHILD_RID within this map_region.
1713 If one is found, return its tree key, otherwise return NULL_TREE. */
1714
1715tree
1716map_region::get_tree_for_child_region (region_id child_rid) const
1717{
1718 // TODO: do we want to store an inverse map?
1719 for (map_t::iterator iter = m_map.begin ();
1720 iter != m_map.end ();
1721 ++iter)
1722 {
1723 tree key = (*iter).first;
1724 region_id r = (*iter).second;
1725 if (r == child_rid)
1726 return key;
1727 }
1728
1729 return NULL_TREE;
1730}
1731
1732/* Look for a child region CHILD within this map_region.
1733 If one is found, return its tree key, otherwise return NULL_TREE. */
1734
1735tree
1736map_region::get_tree_for_child_region (region *child,
1737 const region_model &model) const
1738{
1739 // TODO: do we want to store an inverse map?
1740 for (map_t::iterator iter = m_map.begin ();
1741 iter != m_map.end ();
1742 ++iter)
1743 {
1744 tree key = (*iter).first;
1745 region_id r = (*iter).second;
1746 if (model.get_region (r) == child)
1747 return key;
1748 }
1749
1750 return NULL_TREE;
1751}
1752
1753/* Comparator for trees to impose a deterministic ordering on
1754 T1 and T2. */
1755
1756static int
1757tree_cmp (const_tree t1, const_tree t2)
1758{
1759 gcc_assert (t1);
1760 gcc_assert (t2);
1761
1762 /* Test tree codes first. */
1763 if (TREE_CODE (t1) != TREE_CODE (t2))
1764 return TREE_CODE (t1) - TREE_CODE (t2);
1765
1766 /* From this point on, we know T1 and T2 have the same tree code. */
1767
1768 if (DECL_P (t1))
1769 {
1770 if (DECL_NAME (t1) && DECL_NAME (t2))
1771 return strcmp (IDENTIFIER_POINTER (DECL_NAME (t1)),
1772 IDENTIFIER_POINTER (DECL_NAME (t2)));
1773 else
1774 {
1775 if (DECL_NAME (t1))
1776 return -1;
1777 else if (DECL_NAME (t2))
1778 return 1;
1779 else
1780 return DECL_UID (t1) - DECL_UID (t2);
1781 }
1782 }
1783
1784 switch (TREE_CODE (t1))
1785 {
1786 case SSA_NAME:
1787 {
1788 if (SSA_NAME_VAR (t1) && SSA_NAME_VAR (t2))
1789 {
1790 int var_cmp = tree_cmp (SSA_NAME_VAR (t1), SSA_NAME_VAR (t2));
1791 if (var_cmp)
1792 return var_cmp;
1793 return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2);
1794 }
1795 else
1796 {
1797 if (SSA_NAME_VAR (t1))
1798 return -1;
1799 else if (SSA_NAME_VAR (t2))
1800 return 1;
1801 else
1802 return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2);
1803 }
1804 }
1805 break;
1806
1807 case INTEGER_CST:
1808 return tree_int_cst_compare (t1, t2);
1809
1810 case REAL_CST:
1811 {
1812 real_value *rv1 = TREE_REAL_CST_PTR (t1);
1813 real_value *rv2 = TREE_REAL_CST_PTR (t2);
1814 if (real_compare (LT_EXPR, rv1, rv2))
1815 return -1;
1816 if (real_compare (LT_EXPR, rv2, rv1))
1817 return 1;
1818 return 0;
1819 }
1820
1821 case STRING_CST:
1822 return strcmp (TREE_STRING_POINTER (t1),
1823 TREE_STRING_POINTER (t2));
1824
1825 default:
1826 gcc_unreachable ();
1827 break;
1828 }
1829
1830 gcc_unreachable ();
1831
1832 return 0;
1833}
1834
1835/* qsort comparator for trees to impose a deterministic ordering on
1836 P1 and P2. */
1837
1838static int
1839tree_cmp (const void *p1, const void *p2)
1840{
1841 const_tree t1 = *(const_tree const *)p1;
1842 const_tree t2 = *(const_tree const *)p2;
1843
1844 int result = tree_cmp (t1, t2);
1845
1846 /* Check that the ordering is symmetric */
1847#if CHECKING_P
1848 int reversed = tree_cmp (t2, t1);
1849 gcc_assert (reversed == -result);
1850#endif
1851
1852 /* We should only have 0 for equal pairs. */
1853#if 0
1854 gcc_assert (result != 0
1855 || t1 == t2);
1856#endif
1857
1858 return result;
1859}
1860
1861/* Attempt to merge MAP_REGION_A and MAP_REGION_B into MERGED_MAP_REGION,
1862 which has region_id MERGED_RID, using MERGER.
1863 Return true if the merger is possible, false otherwise. */
1864
1865bool
1866map_region::can_merge_p (const map_region *map_region_a,
1867 const map_region *map_region_b,
1868 map_region *merged_map_region,
1869 region_id merged_rid,
1870 model_merger *merger)
1871{
1872 for (map_t::iterator iter = map_region_a->m_map.begin ();
1873 iter != map_region_a->m_map.end ();
1874 ++iter)
1875 {
1876 tree key_a = (*iter).first;
1877 region_id rid_a = (*iter).second;
1878
1879 if (const region_id *slot_b
1880 = const_cast<map_region *>(map_region_b)->m_map.get (key_a))
1881 {
1882 region_id rid_b = *slot_b;
1883
1884 region *child_region_a = merger->get_region_a <region> (rid_a);
1885 region *child_region_b = merger->get_region_b <region> (rid_b);
1886
1887 gcc_assert (child_region_a->get_type ()
1888 == child_region_b->get_type ());
1889
1890 gcc_assert (child_region_a->get_kind ()
1891 == child_region_b->get_kind ());
1892
1893 region_id child_merged_rid
1894 = merged_map_region->get_or_create (merger->m_merged_model,
1895 merged_rid,
1896 key_a,
1897 child_region_a->get_type ());
1898
1899 region *child_merged_region
1900 = merger->m_merged_model->get_region (child_merged_rid);
1901
1902 /* Consider values. */
1903 svalue_id child_a_sid = child_region_a->get_value_direct ();
1904 svalue_id child_b_sid = child_region_b->get_value_direct ();
1905 svalue_id child_merged_sid;
1906 if (!merger->can_merge_values_p (child_a_sid, child_b_sid,
1907 &child_merged_sid))
1908 return false;
1909 if (!child_merged_sid.null_p ())
1910 child_merged_region->set_value (*merger->m_merged_model,
1911 child_merged_rid,
1912 child_merged_sid,
1913 NULL);
1914
1915 if (map_region *map_region_a = child_region_a->dyn_cast_map_region ())
1916 {
1917 /* Recurse. */
1918 if (!can_merge_p (map_region_a,
1919 as_a <map_region *> (child_region_b),
1920 as_a <map_region *> (child_merged_region),
1921 child_merged_rid,
1922 merger))
1923 return false;
1924 }
1925
1926 }
1927 else
1928 {
1929 /* TODO: region is present in A, but absent in B. */
1930 }
1931 }
1932
1933 /* TODO: check for keys in B that aren't in A. */
1934
1935 return true;
1936}
1937
1938
1939/* Implementation of region::walk_for_canonicalization vfunc for
1940 map_region. */
1941
1942void
1943map_region::walk_for_canonicalization (canonicalization *c) const
1944{
1945 auto_vec<tree> keys (m_map.elements ());
1946 for (map_t::iterator iter = m_map.begin ();
1947 iter != m_map.end ();
1948 ++iter)
1949 {
1950 tree key_a = (*iter).first;
1951 keys.quick_push (key_a);
1952 }
1953 keys.qsort (tree_cmp);
1954
1955 unsigned i;
1956 tree key;
1957 FOR_EACH_VEC_ELT (keys, i, key)
1958 {
1959 region_id rid = *const_cast<map_region *>(this)->m_map.get (key);
1960 c->walk_rid (rid);
1961 }
1962}
1963
1964/* For debugging purposes: look for a child region for a decl named
1965 IDENTIFIER (or an SSA_NAME for such a decl), returning its value,
1966 or svalue_id::null if none are found. */
1967
1968svalue_id
1969map_region::get_value_by_name (tree identifier,
1970 const region_model &model) const
1971{
1972 for (map_t::iterator iter = m_map.begin ();
1973 iter != m_map.end ();
1974 ++iter)
1975 {
1976 tree key = (*iter).first;
1977 if (TREE_CODE (key) == SSA_NAME)
1978 if (SSA_NAME_VAR (key))
1979 key = SSA_NAME_VAR (key);
1980 if (DECL_P (key))
1981 if (DECL_NAME (key) == identifier)
1982 {
1983 region_id rid = (*iter).second;
1984 region *region = model.get_region (rid);
1985 return region->get_value (const_cast<region_model &>(model),
1986 false, NULL);
1987 }
1988 }
1989 return svalue_id::null ();
1990}
1991
1992/* class struct_or_union_region : public map_region. */
1993
1994/* Implementation of map_region::valid_key_p vfunc for
1995 struct_or_union_region. */
1996
1997bool
1998struct_or_union_region::valid_key_p (tree key) const
1999{
2000 return TREE_CODE (key) == FIELD_DECL;
2001}
2002
2003/* Compare the fields of this struct_or_union_region with OTHER, returning
2004 true if they are equal.
2005 For use by region::operator==. */
2006
2007bool
2008struct_or_union_region::compare_fields (const struct_or_union_region &other)
2009 const
2010{
2011 return map_region::compare_fields (other);
2012}
2013
2014/* class struct_region : public struct_or_union_region. */
2015
2016/* Implementation of region::clone vfunc for struct_region. */
2017
2018region *
2019struct_region::clone () const
2020{
2021 return new struct_region (*this);
2022}
2023
2024/* Compare the fields of this struct_region with OTHER, returning true
2025 if they are equal.
2026 For use by region::operator==. */
2027
2028bool
2029struct_region::compare_fields (const struct_region &other) const
2030{
2031 return struct_or_union_region::compare_fields (other);
2032}
2033
2034/* class union_region : public struct_or_union_region. */
2035
2036/* Implementation of region::clone vfunc for union_region. */
2037
2038region *
2039union_region::clone () const
2040{
2041 return new union_region (*this);
2042}
2043
2044/* Compare the fields of this union_region with OTHER, returning true
2045 if they are equal.
2046 For use by region::operator==. */
2047
2048bool
2049union_region::compare_fields (const union_region &other) const
2050{
2051 return struct_or_union_region::compare_fields (other);
2052}
2053
2054/* class frame_region : public map_region. */
2055
2056/* Compare the fields of this frame_region with OTHER, returning true
2057 if they are equal.
2058 For use by region::operator==. */
2059
2060bool
2061frame_region::compare_fields (const frame_region &other) const
2062{
2063 if (!map_region::compare_fields (other))
2064 return false;
2065 if (m_fun != other.m_fun)
2066 return false;
2067 if (m_depth != other.m_depth)
2068 return false;
2069 return true;
2070}
2071
2072/* Implementation of region::clone vfunc for frame_region. */
2073
2074region *
2075frame_region::clone () const
2076{
2077 return new frame_region (*this);
2078}
2079
2080/* Implementation of map_region::valid_key_p vfunc for frame_region. */
2081
2082bool
2083frame_region::valid_key_p (tree key) const
2084{
2085 // TODO: could also check that VAR_DECLs are locals
2086 return (TREE_CODE (key) == PARM_DECL
2087 || TREE_CODE (key) == VAR_DECL
2088 || TREE_CODE (key) == SSA_NAME
2089 || TREE_CODE (key) == RESULT_DECL);
2090}
2091
2092/* Implementation of region::print_fields vfunc for frame_region. */
2093
2094void
2095frame_region::print_fields (const region_model &model,
2096 region_id this_rid,
2097 pretty_printer *pp) const
2098{
2099 map_region::print_fields (model, this_rid, pp);
2100 pp_printf (pp, ", function: %qs, depth: %i", function_name (m_fun), m_depth);
2101}
2102
2103/* Implementation of region::add_to_hash vfunc for frame_region. */
2104
2105void
2106frame_region::add_to_hash (inchash::hash &hstate) const
2107{
2108 map_region::add_to_hash (hstate);
2109 hstate.add_ptr (m_fun);
2110 hstate.add_int (m_depth);
2111}
2112
2113/* class globals_region : public scope_region. */
2114
2115/* Compare the fields of this globals_region with OTHER, returning true
2116 if they are equal.
2117 For use by region::operator==. */
2118
2119bool
2120globals_region::compare_fields (const globals_region &other) const
2121{
2122 return map_region::compare_fields (other);
2123}
2124
2125/* Implementation of region::clone vfunc for globals_region. */
2126
2127region *
2128globals_region::clone () const
2129{
2130 return new globals_region (*this);
2131}
2132
2133/* Implementation of map_region::valid_key_p vfunc for globals_region. */
2134
2135bool
2136globals_region::valid_key_p (tree key) const
2137{
2138 return TREE_CODE (key) == VAR_DECL;
2139}
2140
2141/* class code_region : public map_region. */
2142
2143/* Compare the fields of this code_region with OTHER, returning true
2144 if they are equal.
2145 For use by region::operator==. */
2146
2147bool
2148code_region::compare_fields (const code_region &other) const
2149{
2150 return map_region::compare_fields (other);
2151}
2152
2153/* Implementation of region::clone vfunc for code_region. */
2154
2155region *
2156code_region::clone () const
2157{
2158 return new code_region (*this);
2159}
2160
2161/* Implementation of map_region::valid_key_p vfunc for code_region. */
2162
2163bool
2164code_region::valid_key_p (tree key) const
2165{
2166 return TREE_CODE (key) == FUNCTION_DECL;
2167}
2168
2169/* class array_region : public region. */
2170
2171/* array_region's copy ctor. */
2172
2173array_region::array_region (const array_region &other)
2174: region (other),
2175 m_map (other.m_map)
2176{
2177}
2178
2179/* Get a child region for the element with index INDEX_SID. */
2180
2181region_id
2182array_region::get_element (region_model *model,
2183 region_id this_rid,
2184 svalue_id index_sid,
2185 region_model_context *ctxt ATTRIBUTE_UNUSED)
2186{
2187 tree element_type = TREE_TYPE (get_type ());
2188 svalue *index_sval = model->get_svalue (index_sid);
2189 if (tree cst_index = index_sval->maybe_get_constant ())
2190 {
2191 key_t key = key_from_constant (cst_index);
2192 region_id element_rid
2193 = get_or_create (model, this_rid, key, element_type);
2194 return element_rid;
2195 }
2196
2197 return model->get_or_create_view (this_rid, element_type);
2198}
2199
2200/* Implementation of region::clone vfunc for array_region. */
2201
2202region *
2203array_region::clone () const
2204{
2205 return new array_region (*this);
2206}
2207
2208/* Compare the fields of this array_region with OTHER, returning true
2209 if they are equal.
2210 For use by region::operator==. */
2211
2212bool
2213array_region::compare_fields (const array_region &other) const
2214{
2215 if (m_map.elements () != other.m_map.elements ())
2216 return false;
2217
2218 for (map_t::iterator iter = m_map.begin ();
2219 iter != m_map.end ();
2220 ++iter)
2221 {
2222 int key = (*iter).first;
2223 region_id e = (*iter).second;
2224 region_id *other_slot = const_cast <map_t &> (other.m_map).get (key);
2225 if (other_slot == NULL)
2226 return false;
2227 if (e != *other_slot)
2228 return false;
2229 }
2230 return true;
2231}
2232
2233/* Implementation of region::print_fields vfunc for array_region. */
2234
2235void
2236array_region::print_fields (const region_model &model,
2237 region_id this_rid,
2238 pretty_printer *pp) const
2239{
2240 region::print_fields (model, this_rid, pp);
2241 pp_string (pp, ", array: {");
2242 for (map_t::iterator iter = m_map.begin ();
2243 iter != m_map.end ();
2244 ++iter)
2245 {
2246 if (iter != m_map.begin ())
2247 pp_string (pp, ", ");
2248 int key = (*iter).first;
2249 region_id child_rid = (*iter).second;
2250PUSH_IGNORE_WFORMAT
2251 pp_printf (pp, "[%i]: ", key);
2252POP_IGNORE_WFORMAT
2253 child_rid.print (pp);
2254 }
2255 pp_string (pp, "}");
2256}
2257
2258/* Implementation of region::dump_dot_to_pp vfunc for array_region. */
2259
2260void
2261array_region::dump_dot_to_pp (const region_model &model,
2262 region_id this_rid,
2263 pretty_printer *pp) const
2264{
2265 region::dump_dot_to_pp (model, this_rid, pp);
2266 for (map_t::iterator iter = m_map.begin ();
2267 iter != m_map.end ();
2268 ++iter)
2269 {
2270 // TODO: add nodes/edges to label things
2271
2272 int key = (*iter).first;
2273 region_id child_rid = (*iter).second;
2274
2275 pp_printf (pp, "rid_label_%i [label=\"", child_rid.as_int ());
2276 pp_write_text_to_stream (pp);
2277PUSH_IGNORE_WFORMAT
2278 pp_printf (pp, "%qi", key);
2279POP_IGNORE_WFORMAT
2280 pp_write_text_as_dot_label_to_stream (pp, /*for_record=*/false);
2281 pp_string (pp, "\"];");
2282 pp_newline (pp);
2283
2284 pp_printf (pp, "rid_label_%i", child_rid.as_int ());
2285 pp_string (pp, " -> ");
2286 child_rid.dump_node_name_to_pp (pp);
2287 pp_string (pp, ";");
2288 pp_newline (pp);
2289 }
2290}
2291
2292/* Implementation of region::dump_child_label vfunc for array_region. */
2293
2294void
2295array_region::dump_child_label (const region_model &model,
2296 region_id this_rid,
2297 region_id child_rid,
2298 pretty_printer *pp) const
2299{
2300 region::dump_child_label (model, this_rid, child_rid, pp);
2301
2302 for (map_t::iterator iter = m_map.begin ();
2303 iter != m_map.end ();
2304 ++iter)
2305 {
2306 if (child_rid == (*iter).second)
2307 {
2308 int key = (*iter).first;
2309 pp_printf (pp, "[%i]: ", key);
2310 }
2311 }
2312}
2313
2314/* Look for a child region for KEY within this array_region.
2315 If it doesn't already exist, create a child array_region, using TYPE for
2316 its type.
2317 Return the region_id of the child (whether pre-existing, or
2318 newly-created). */
2319
2320region_id
2321array_region::get_or_create (region_model *model,
2322 region_id this_rid,
2323 key_t key,
2324 tree type)
2325{
2326 region_id *slot = m_map.get (key);
2327 if (slot)
2328 return *slot;
2329 region_id child_rid = model->add_region_for_type (this_rid, type);
2330 m_map.put (key, child_rid);
2331 return child_rid;
2332}
2333
2334/* Get the region_id for the child region for KEY within this
2335 ARRAY_REGION, or NULL if there is no such child region. */
2336
2337region_id *
2338array_region::get (key_t key)
2339{
2340 region_id *slot = m_map.get (key);
2341 return slot;
2342}
2343
2344/* Implementation of region::add_to_hash vfunc for array_region. */
2345
2346void
2347array_region::add_to_hash (inchash::hash &hstate) const
2348{
2349 region::add_to_hash (hstate);
2350 // TODO
2351}
2352
2353/* Implementation of region::remap_region_ids vfunc for array_region. */
2354
2355void
2356array_region::remap_region_ids (const region_id_map &map)
2357{
2358 region::remap_region_ids (map);
2359
2360 /* Remap the region ids within the map entries. */
2361 for (map_t::iterator iter = m_map.begin ();
2362 iter != m_map.end (); ++iter)
2363 map.update (&(*iter).second);
2364}
2365
2366/* Look for a child region with id CHILD_RID within this array_region.
2367 If one is found, write its key to *OUT and return true,
2368 otherwise return false. */
2369
2370bool
2371array_region::get_key_for_child_region (region_id child_rid, key_t *out) const
2372{
2373 // TODO: do we want to store an inverse map?
2374 for (map_t::iterator iter = m_map.begin ();
2375 iter != m_map.end ();
2376 ++iter)
2377 {
2378 key_t key = (*iter).first;
2379 region_id r = (*iter).second;
2380 if (r == child_rid)
2381 {
2382 *out = key;
2383 return true;
2384 }
2385 }
2386
2387 return false;
2388}
2389
4f01e577 2390/* qsort comparator for array_region's keys. */
757bf1df 2391
4f01e577
DM
2392int
2393array_region::key_cmp (const void *p1, const void *p2)
757bf1df 2394{
4f01e577
DM
2395 key_t i1 = *(const key_t *)p1;
2396 key_t i2 = *(const key_t *)p2;
757bf1df 2397
4f01e577
DM
2398 if (i1 > i2)
2399 return 1;
2400 else if (i1 < i2)
2401 return -1;
2402 else
2403 return 0;
757bf1df
DM
2404}
2405
2406/* Implementation of region::walk_for_canonicalization vfunc for
2407 array_region. */
2408
2409void
2410array_region::walk_for_canonicalization (canonicalization *c) const
2411{
2412 auto_vec<int> keys (m_map.elements ());
2413 for (map_t::iterator iter = m_map.begin ();
2414 iter != m_map.end ();
2415 ++iter)
2416 {
2417 int key_a = (*iter).first;
2418 keys.quick_push (key_a);
2419 }
4f01e577 2420 keys.qsort (key_cmp);
757bf1df
DM
2421
2422 unsigned i;
2423 int key;
2424 FOR_EACH_VEC_ELT (keys, i, key)
2425 {
2426 region_id rid = *const_cast<array_region *>(this)->m_map.get (key);
2427 c->walk_rid (rid);
2428 }
2429}
2430
2431/* Convert constant CST into an array_region::key_t. */
2432
2433array_region::key_t
2434array_region::key_from_constant (tree cst)
2435{
2436 gcc_assert (CONSTANT_CLASS_P (cst));
2437 wide_int w = wi::to_wide (cst);
2438 key_t result = w.to_shwi ();
2439 return result;
2440}
2441
2442/* class function_region : public map_region. */
2443
2444/* Compare the fields of this function_region with OTHER, returning true
2445 if they are equal.
2446 For use by region::operator==. */
2447
2448bool
2449function_region::compare_fields (const function_region &other) const
2450{
2451 return map_region::compare_fields (other);
2452}
2453
2454/* Implementation of region::clone vfunc for function_region. */
2455
2456region *
2457function_region::clone () const
2458{
2459 return new function_region (*this);
2460}
2461
2462/* Implementation of map_region::valid_key_p vfunc for function_region. */
2463
2464bool
2465function_region::valid_key_p (tree key) const
2466{
2467 return TREE_CODE (key) == LABEL_DECL;
2468}
2469
2470/* class stack_region : public region. */
2471
2472/* stack_region's copy ctor. */
2473
2474stack_region::stack_region (const stack_region &other)
2475: region (other),
2476 m_frame_rids (other.m_frame_rids.length ())
2477{
2478 int i;
2479 region_id *frame_rid;
2480 FOR_EACH_VEC_ELT (other.m_frame_rids, i, frame_rid)
2481 m_frame_rids.quick_push (*frame_rid);
2482}
2483
2484/* Compare the fields of this stack_region with OTHER, returning true
2485 if they are equal.
2486 For use by region::operator==. */
2487
2488bool
2489stack_region::compare_fields (const stack_region &other) const
2490{
2491 if (m_frame_rids.length () != other.m_frame_rids.length ())
2492 return false;
2493
2494 int i;
2495 region_id *frame_rid;
2496 FOR_EACH_VEC_ELT (m_frame_rids, i, frame_rid)
2497 if (m_frame_rids[i] != other.m_frame_rids[i])
2498 return false;
2499
2500 return true;
2501}
2502
2503/* Implementation of region::clone vfunc for stack_region. */
2504
2505region *
2506stack_region::clone () const
2507{
2508 return new stack_region (*this);
2509}
2510
2511/* Implementation of region::print_fields vfunc for stack_region. */
2512
2513void
2514stack_region::print_fields (const region_model &model,
2515 region_id this_rid,
2516 pretty_printer *pp) const
2517{
2518 region::print_fields (model, this_rid, pp);
2519 // TODO
2520}
2521
2522/* Implementation of region::dump_child_label vfunc for stack_region. */
2523
2524void
2525stack_region::dump_child_label (const region_model &model,
2526 region_id this_rid ATTRIBUTE_UNUSED,
2527 region_id child_rid,
2528 pretty_printer *pp) const
2529{
2530 function *fun = model.get_region<frame_region> (child_rid)->get_function ();
2531 pp_printf (pp, "frame for %qs: ", function_name (fun));
2532}
2533
2534/* Push FRAME_RID (for a frame_region) onto this stack. */
2535
2536void
2537stack_region::push_frame (region_id frame_rid)
2538{
2539 m_frame_rids.safe_push (frame_rid);
2540}
2541
2542/* Get the region_id of the top-most frame in this stack, if any. */
2543
2544region_id
2545stack_region::get_current_frame_id () const
2546{
2547 if (m_frame_rids.length () > 0)
2548 return m_frame_rids[m_frame_rids.length () - 1];
2549 else
2550 return region_id::null ();
2551}
2552
2553/* Pop the topmost frame_region from this stack.
2554
2555 Purge the frame region and all its descendent regions.
2556 Convert any pointers that point into such regions into
2557 POISON_KIND_POPPED_STACK svalues.
2558
2559 Return the ID of any return value from the frame.
2560
2561 If PURGE, then purge all unused svalues, with the exception of any
2562 return value for the frame, which is temporarily
2563 preserved in case no regions reference it, so it can
2564 be written into a region in the caller.
2565
2566 Accumulate stats on purged entities into STATS. */
2567
2568svalue_id
2569stack_region::pop_frame (region_model *model, bool purge, purge_stats *stats,
2570 region_model_context *ctxt)
2571{
2572 gcc_assert (m_frame_rids.length () > 0);
2573
2574 region_id frame_rid = get_current_frame_id ();
2575 frame_region *frame = model->get_region<frame_region> (frame_rid);
2576
2577 /* Evaluate the result, within the callee frame. */
2578 svalue_id result_sid;
2579 tree fndecl = frame->get_function ()->decl;
2580 tree result = DECL_RESULT (fndecl);
2581 if (result && TREE_TYPE (result) != void_type_node)
2582 result_sid = model->get_rvalue (result, ctxt);
2583
2584 /* Pop the frame RID. */
2585 m_frame_rids.pop ();
2586
2587 model->delete_region_and_descendents (frame_rid,
2588 POISON_KIND_POPPED_STACK,
2589 stats,
2590 ctxt ? ctxt->get_logger () : NULL);
2591
2592 /* Delete unused svalues, but don't delete the return value. */
2593 if (purge)
2594 model->purge_unused_svalues (stats, ctxt, &result_sid);
2595
2596 model->validate ();
2597
2598 return result_sid;
2599}
2600
2601/* Implementation of region::add_to_hash vfunc for stack_region. */
2602
2603void
2604stack_region::add_to_hash (inchash::hash &hstate) const
2605{
2606 region::add_to_hash (hstate);
2607
2608 int i;
2609 region_id *frame_rid;
2610 FOR_EACH_VEC_ELT (m_frame_rids, i, frame_rid)
2611 inchash::add (*frame_rid, hstate);
2612}
2613
2614/* Implementation of region::remap_region_ids vfunc for stack_region. */
2615
2616void
2617stack_region::remap_region_ids (const region_id_map &map)
2618{
2619 region::remap_region_ids (map);
2620 int i;
2621 region_id *frame_rid;
2622 FOR_EACH_VEC_ELT (m_frame_rids, i, frame_rid)
2623 map.update (&m_frame_rids[i]);
2624}
2625
2626/* Attempt to merge STACK_REGION_A and STACK_REGION_B using MERGER.
2627 Return true if the merger is possible, false otherwise. */
2628
2629bool
2630stack_region::can_merge_p (const stack_region *stack_region_a,
2631 const stack_region *stack_region_b,
2632 model_merger *merger)
2633{
2634 if (stack_region_a->get_num_frames ()
2635 != stack_region_b->get_num_frames ())
2636 return false;
2637
2638 region_model *merged_model = merger->m_merged_model;
2639
2640 region_id rid_merged_stack
2641 = merged_model->get_root_region ()->ensure_stack_region (merged_model);
2642
2643 stack_region *merged_stack
2644 = merged_model->get_region <stack_region> (rid_merged_stack);
2645
2646 for (unsigned i = 0; i < stack_region_a->get_num_frames (); i++)
2647 {
2648 region_id rid_a = stack_region_a->get_frame_rid (i);
2649 frame_region *frame_a = merger->get_region_a <frame_region> (rid_a);
2650
2651 region_id rid_b = stack_region_b->get_frame_rid (i);
2652 frame_region *frame_b = merger->get_region_b <frame_region> (rid_b);
2653
2654 if (frame_a->get_function () != frame_b->get_function ())
2655 return false;
2656 frame_region *merged_frame = new frame_region (rid_merged_stack,
2657 frame_a->get_function (),
2658 frame_a->get_depth ());
2659 region_id rid_merged_frame = merged_model->add_region (merged_frame);
2660 merged_stack->push_frame (rid_merged_frame);
2661
2662 if (!map_region::can_merge_p (frame_a, frame_b,
2663 merged_frame, rid_merged_frame,
2664 merger))
2665 return false;
2666 }
2667
2668 return true;
2669}
2670
2671/* Implementation of region::walk_for_canonicalization vfunc for
2672 stack_region. */
2673
2674void
2675stack_region::walk_for_canonicalization (canonicalization *c) const
2676{
2677 int i;
2678 region_id *frame_rid;
2679 FOR_EACH_VEC_ELT (m_frame_rids, i, frame_rid)
2680 c->walk_rid (*frame_rid);
2681}
2682
2683/* For debugging purposes: look for a grandchild region within one of
2684 the child frame regions, where the grandchild is for a decl named
2685 IDENTIFIER (or an SSA_NAME for such a decl):
2686
2687 stack_region
2688 `-frame_region
2689 `-region for decl named IDENTIFIER
2690
2691 returning its value, or svalue_id::null if none are found. */
2692
2693svalue_id
2694stack_region::get_value_by_name (tree identifier,
2695 const region_model &model) const
2696{
2697 int i;
2698 region_id *frame_rid;
2699 FOR_EACH_VEC_ELT (m_frame_rids, i, frame_rid)
2700 {
2701 frame_region *frame = model.get_region<frame_region> (*frame_rid);
2702 svalue_id sid = frame->get_value_by_name (identifier, model);
2703 if (!sid.null_p ())
2704 return sid;
2705 }
2706
2707 return svalue_id::null ();
2708}
2709
2710/* class heap_region : public region. */
2711
2712/* heap_region's copy ctor. */
2713
2714heap_region::heap_region (const heap_region &other)
2715: region (other)
2716{
2717}
2718
2719/* Compare the fields of this heap_region with OTHER, returning true
2720 if they are equal.
2721 For use by region::operator==. */
2722
2723bool
2724heap_region::compare_fields (const heap_region &) const
2725{
2726 /* Empty. */
2727 return true;
2728}
2729
2730/* Implementation of region::clone vfunc for heap_region. */
2731
2732region *
2733heap_region::clone () const
2734{
2735 return new heap_region (*this);
2736}
2737
2738/* Implementation of region::walk_for_canonicalization vfunc for
2739 heap_region. */
2740
2741void
2742heap_region::walk_for_canonicalization (canonicalization *) const
2743{
2744 /* Empty. */
2745}
2746
2747/* class root_region : public region. */
2748
2749/* root_region's default ctor. */
2750
2751root_region::root_region ()
2752: region (region_id::null (),
2753 svalue_id::null (),
2754 NULL_TREE)
2755{
2756}
2757
2758/* root_region's copy ctor. */
2759
2760root_region::root_region (const root_region &other)
2761: region (other),
2762 m_stack_rid (other.m_stack_rid),
2763 m_globals_rid (other.m_globals_rid),
2764 m_code_rid (other.m_code_rid),
2765 m_heap_rid (other.m_heap_rid)
2766{
2767}
2768
2769/* Compare the fields of this root_region with OTHER, returning true
2770 if they are equal.
2771 For use by region::operator==. */
2772
2773bool
2774root_region::compare_fields (const root_region &other) const
2775{
2776 if (m_stack_rid != other.m_stack_rid)
2777 return false;
2778 if (m_globals_rid != other.m_globals_rid)
2779 return false;
2780 if (m_code_rid != other.m_code_rid)
2781 return false;
2782 if (m_heap_rid != other.m_heap_rid)
2783 return false;
2784 return true;
2785}
2786
2787/* Implementation of region::clone vfunc for root_region. */
2788
2789region *
2790root_region::clone () const
2791{
2792 return new root_region (*this);
2793}
2794
2795/* Implementation of region::print_fields vfunc for root_region. */
2796
2797void
2798root_region::print_fields (const region_model &model,
2799 region_id this_rid,
2800 pretty_printer *pp) const
2801{
2802 region::print_fields (model, this_rid, pp);
2803 // TODO
2804}
2805
2806/* Implementation of region::dump_child_label vfunc for root_region. */
2807
2808void
2809root_region::dump_child_label (const region_model &model ATTRIBUTE_UNUSED,
2810 region_id this_rid ATTRIBUTE_UNUSED,
2811 region_id child_rid,
2812 pretty_printer *pp) const
2813{
2814 if (child_rid == m_stack_rid)
2815 pp_printf (pp, "stack: ");
2816 else if (child_rid == m_globals_rid)
2817 pp_printf (pp, "globals: ");
2818 else if (child_rid == m_code_rid)
2819 pp_printf (pp, "code: ");
2820 else if (child_rid == m_heap_rid)
2821 pp_printf (pp, "heap: ");
2822}
2823
2824/* Create a new frame_region for a call to FUN and push it onto
2825 the stack.
2826
2827 If ARG_SIDS is non-NULL, use it to populate the parameters
2828 in the new frame.
2829 Otherwise, populate them with unknown values.
2830
2831 Return the region_id of the new frame. */
2832
2833region_id
2834root_region::push_frame (region_model *model, function *fun,
2835 vec<svalue_id> *arg_sids,
2836 region_model_context *ctxt)
2837{
2838 gcc_assert (fun);
2839 /* arg_sids can be NULL. */
2840
2841 ensure_stack_region (model);
2842 stack_region *stack = model->get_region <stack_region> (m_stack_rid);
2843
2844 frame_region *region = new frame_region (m_stack_rid, fun,
2845 stack->get_num_frames ());
2846 region_id frame_rid = model->add_region (region);
2847
2848 // TODO: unify these cases by building a vec of unknown?
2849
2850 if (arg_sids)
2851 {
2852 /* Arguments supplied from a caller frame. */
2853
2854 tree fndecl = fun->decl;
2855 unsigned idx = 0;
2856 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
2857 iter_parm = DECL_CHAIN (iter_parm), ++idx)
2858 {
2859 /* If there's a mismatching declaration, the call stmt might
2860 not have enough args. Handle this case by leaving the
2861 rest of the params as uninitialized. */
2862 if (idx >= arg_sids->length ())
2863 break;
2864 svalue_id arg_sid = (*arg_sids)[idx];
2865 region_id parm_rid
2866 = region->get_or_create (model, frame_rid, iter_parm,
2867 TREE_TYPE (iter_parm));
2868 model->set_value (parm_rid, arg_sid, ctxt);
2869
2870 /* Also do it for default SSA name (sharing the same unknown
2871 value). */
2872 tree parm_default_ssa = ssa_default_def (fun, iter_parm);
2873 if (parm_default_ssa)
2874 {
2875 region_id defssa_rid
2876 = region->get_or_create (model, frame_rid, parm_default_ssa,
2877 TREE_TYPE (iter_parm));
2878 model->set_value (defssa_rid, arg_sid, ctxt);
2879 }
2880 }
2881 }
2882 else
2883 {
2884 /* No known arguments (a top-level call within the analysis). */
2885
2886 /* Params have a defined, unknown value; they should not inherit
2887 from the poisoned uninit value. */
2888 tree fndecl = fun->decl;
2889 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
2890 iter_parm = DECL_CHAIN (iter_parm))
2891 {
2892 region_id parm_rid
2893 = region->get_or_create (model, frame_rid, iter_parm,
2894 TREE_TYPE (iter_parm));
2895 svalue_id parm_sid
2896 = model->set_to_new_unknown_value (parm_rid, TREE_TYPE (iter_parm),
2897 ctxt);
2898
2899 /* Also do it for default SSA name (sharing the same unknown
2900 value). */
2901 tree parm_default_ssa = ssa_default_def (fun, iter_parm);
2902 if (parm_default_ssa)
2903 {
2904 region_id defssa_rid
2905 = region->get_or_create (model, frame_rid, parm_default_ssa,
2906 TREE_TYPE (iter_parm));
2907 model->get_region (defssa_rid)->set_value (*model, defssa_rid,
2908 parm_sid, ctxt);
2909 }
2910 }
2911 }
2912
2913 stack->push_frame (frame_rid);
2914
2915 return frame_rid;
2916}
2917
2918/* Get the region_id of the top-most frame in this root_region's stack,
2919 if any. */
2920
2921region_id
2922root_region::get_current_frame_id (const region_model &model) const
2923{
2924 stack_region *stack = model.get_region <stack_region> (m_stack_rid);
2925 if (stack)
2926 return stack->get_current_frame_id ();
2927 else
2928 return region_id::null ();
2929}
2930
2931/* Pop the topmost frame_region from this root_region's stack;
2932 see the comment for stack_region::pop_frame. */
2933
2934svalue_id
2935root_region::pop_frame (region_model *model, bool purge, purge_stats *out,
2936 region_model_context *ctxt)
2937{
2938 stack_region *stack = model->get_region <stack_region> (m_stack_rid);
2939 return stack->pop_frame (model, purge, out, ctxt);
2940}
2941
2942/* Return the region_id of the stack region, creating it if doesn't
2943 already exist. */
2944
2945region_id
2946root_region::ensure_stack_region (region_model *model)
2947{
2948 if (m_stack_rid.null_p ())
2949 {
2950 svalue_id uninit_sid
2951 = model->add_svalue (new poisoned_svalue (POISON_KIND_UNINIT,
2952 NULL_TREE));
2953 m_stack_rid
2954 = model->add_region (new stack_region (model->get_root_rid (),
2955 uninit_sid));
2956 }
2957 return m_stack_rid;
2958}
2959
2960/* Return the stack region (which could be NULL). */
2961
2962stack_region *
2963root_region::get_stack_region (const region_model *model) const
2964{
2965 return model->get_region <stack_region> (m_stack_rid);
2966}
2967
2968/* Return the region_id of the globals region, creating it if doesn't
2969 already exist. */
2970
2971region_id
2972root_region::ensure_globals_region (region_model *model)
2973{
2974 if (m_globals_rid.null_p ())
2975 m_globals_rid
2976 = model->add_region (new globals_region (model->get_root_rid ()));
2977 return m_globals_rid;
2978}
2979
2980/* Return the code region (which could be NULL). */
2981
2982code_region *
2983root_region::get_code_region (const region_model *model) const
2984{
2985 return model->get_region <code_region> (m_code_rid);
2986}
2987
2988/* Return the region_id of the code region, creating it if doesn't
2989 already exist. */
2990
2991region_id
2992root_region::ensure_code_region (region_model *model)
2993{
2994 if (m_code_rid.null_p ())
2995 m_code_rid
2996 = model->add_region (new code_region (model->get_root_rid ()));
2997 return m_code_rid;
2998}
2999
3000/* Return the globals region (which could be NULL). */
3001
3002globals_region *
3003root_region::get_globals_region (const region_model *model) const
3004{
3005 return model->get_region <globals_region> (m_globals_rid);
3006}
3007
3008/* Return the region_id of the heap region, creating it if doesn't
3009 already exist. */
3010
3011region_id
3012root_region::ensure_heap_region (region_model *model)
3013{
3014 if (m_heap_rid.null_p ())
3015 {
3016 svalue_id uninit_sid
3017 = model->add_svalue (new poisoned_svalue (POISON_KIND_UNINIT,
3018 NULL_TREE));
3019 m_heap_rid
3020 = model->add_region (new heap_region (model->get_root_rid (),
3021 uninit_sid));
3022 }
3023 return m_heap_rid;
3024}
3025
3026/* Return the heap region (which could be NULL). */
3027
3028heap_region *
3029root_region::get_heap_region (const region_model *model) const
3030{
3031 return model->get_region <heap_region> (m_heap_rid);
3032}
3033
3034/* Implementation of region::remap_region_ids vfunc for root_region. */
3035
3036void
3037root_region::remap_region_ids (const region_id_map &map)
3038{
3039 map.update (&m_stack_rid);
3040 map.update (&m_globals_rid);
3041 map.update (&m_code_rid);
3042 map.update (&m_heap_rid);
3043}
3044
3045/* Attempt to merge ROOT_REGION_A and ROOT_REGION_B into
3046 MERGED_ROOT_REGION using MERGER.
3047 Return true if the merger is possible, false otherwise. */
3048
3049bool
3050root_region::can_merge_p (const root_region *root_region_a,
3051 const root_region *root_region_b,
3052 root_region *merged_root_region,
3053 model_merger *merger)
3054{
3055 /* We can only merge if the stacks are sufficiently similar. */
3056 stack_region *stack_a = root_region_a->get_stack_region (merger->m_model_a);
3057 stack_region *stack_b = root_region_b->get_stack_region (merger->m_model_b);
3058 if (stack_a && stack_b)
3059 {
3060 /* If the two models both have a stack, attempt to merge them. */
3061 merged_root_region->ensure_stack_region (merger->m_merged_model);
3062 if (!stack_region::can_merge_p (stack_a, stack_b, merger))
3063 return false;
3064 }
3065 else if (stack_a || stack_b)
3066 /* Don't attempt to merge if one model has a stack and the other
3067 doesn't. */
3068 return false;
3069
3070 map_region *globals_a = root_region_a->get_globals_region (merger->m_model_a);
3071 map_region *globals_b = root_region_b->get_globals_region (merger->m_model_b);
3072 if (globals_a && globals_b)
3073 {
3074 /* If both models have globals regions, attempt to merge them. */
3075 region_id merged_globals_rid
3076 = merged_root_region->ensure_globals_region (merger->m_merged_model);
3077 map_region *merged_globals
3078 = merged_root_region->get_globals_region (merger->m_merged_model);
3079 if (!map_region::can_merge_p (globals_a, globals_b,
3080 merged_globals, merged_globals_rid,
3081 merger))
3082 return false;
3083 }
3084 /* otherwise, merge as "no globals". */
3085
3086 map_region *code_a = root_region_a->get_code_region (merger->m_model_a);
3087 map_region *code_b = root_region_b->get_code_region (merger->m_model_b);
3088 if (code_a && code_b)
3089 {
3090 /* If both models have code regions, attempt to merge them. */
3091 region_id merged_code_rid
3092 = merged_root_region->ensure_code_region (merger->m_merged_model);
3093 map_region *merged_code
3094 = merged_root_region->get_code_region (merger->m_merged_model);
3095 if (!map_region::can_merge_p (code_a, code_b,
3096 merged_code, merged_code_rid,
3097 merger))
3098 return false;
3099 }
3100 /* otherwise, merge as "no code". */
3101
3102 heap_region *heap_a = root_region_a->get_heap_region (merger->m_model_a);
3103 heap_region *heap_b = root_region_b->get_heap_region (merger->m_model_b);
3104 if (heap_a && heap_b)
3105 {
3106 /* If both have a heap, create a "merged" heap.
3107 Actually merging the heap contents happens via the region_svalue
3108 instances, as needed, when seeing pairs of region_svalue instances. */
3109 merged_root_region->ensure_heap_region (merger->m_merged_model);
3110 }
3111 /* otherwise, merge as "no heap". */
3112
3113 return true;
3114}
3115
3116/* Implementation of region::add_to_hash vfunc for root_region. */
3117
3118void
3119root_region::add_to_hash (inchash::hash &hstate) const
3120{
3121 region::add_to_hash (hstate);
3122 inchash::add (m_stack_rid, hstate);
3123 inchash::add (m_globals_rid, hstate);
3124 inchash::add (m_code_rid, hstate);
3125 inchash::add (m_heap_rid, hstate);
3126}
3127
3128/* Implementation of region::walk_for_canonicalization vfunc for
3129 root_region. */
3130
3131void
3132root_region::walk_for_canonicalization (canonicalization *c) const
3133{
3134 c->walk_rid (m_stack_rid);
3135 c->walk_rid (m_globals_rid);
3136 c->walk_rid (m_code_rid);
3137 c->walk_rid (m_heap_rid);
3138}
3139
3140/* For debugging purposes: look for a descendant region for a local
3141 or global decl named IDENTIFIER (or an SSA_NAME for such a decl),
3142 returning its value, or svalue_id::null if none are found. */
3143
3144svalue_id
3145root_region::get_value_by_name (tree identifier,
3146 const region_model &model) const
3147{
3148 if (stack_region *stack = get_stack_region (&model))
3149 {
3150 svalue_id sid = stack->get_value_by_name (identifier, model);
3151 if (!sid.null_p ())
3152 return sid;
3153 }
3154 if (map_region *globals = get_globals_region (&model))
3155 {
3156 svalue_id sid = globals->get_value_by_name (identifier, model);
3157 if (!sid.null_p ())
3158 return sid;
3159 }
3160 return svalue_id::null ();
3161}
3162
3163/* class symbolic_region : public map_region. */
3164
3165/* symbolic_region's copy ctor. */
3166
3167symbolic_region::symbolic_region (const symbolic_region &other)
3168: region (other),
3169 m_possibly_null (other.m_possibly_null)
3170{
3171}
3172
3173/* Compare the fields of this symbolic_region with OTHER, returning true
3174 if they are equal.
3175 For use by region::operator==. */
3176
3177bool
3178symbolic_region::compare_fields (const symbolic_region &other) const
3179{
3180 return m_possibly_null == other.m_possibly_null;
3181}
3182
3183/* Implementation of region::clone vfunc for symbolic_region. */
3184
3185region *
3186symbolic_region::clone () const
3187{
3188 return new symbolic_region (*this);
3189}
3190
3191/* Implementation of region::walk_for_canonicalization vfunc for
3192 symbolic_region. */
3193
3194void
3195symbolic_region::walk_for_canonicalization (canonicalization *) const
3196{
3197 /* Empty. */
3198}
3199
3200/* class region_model. */
3201
3202/* region_model's default ctor. */
3203
3204region_model::region_model ()
3205{
3206 m_root_rid = add_region (new root_region ());
3207 m_constraints = new impl_constraint_manager (this);
3208 // TODO
3209}
3210
3211/* region_model's copy ctor. */
3212
3213region_model::region_model (const region_model &other)
3214: m_svalues (other.m_svalues.length ()),
3215 m_regions (other.m_regions.length ()),
3216 m_root_rid (other.m_root_rid)
3217{
3218 /* Clone the svalues and regions. */
3219 int i;
3220
3221 svalue *svalue;
3222 FOR_EACH_VEC_ELT (other.m_svalues, i, svalue)
3223 m_svalues.quick_push (svalue->clone ());
3224
3225 region *region;
3226 FOR_EACH_VEC_ELT (other.m_regions, i, region)
3227 m_regions.quick_push (region->clone ());
3228
3229 m_constraints = other.m_constraints->clone (this);
3230}
3231
3232/* region_model's dtor. */
3233
3234region_model::~region_model ()
3235{
3236 delete m_constraints;
3237}
3238
3239/* region_model's assignment operator. */
3240
3241region_model &
3242region_model::operator= (const region_model &other)
3243{
3244 unsigned i;
3245 svalue *svalue;
3246 region *region;
3247
3248 /* Delete existing content. */
3249 FOR_EACH_VEC_ELT (m_svalues, i, svalue)
3250 delete svalue;
3251 m_svalues.truncate (0);
3252
3253 FOR_EACH_VEC_ELT (m_regions, i, region)
3254 delete region;
3255 m_regions.truncate (0);
3256
3257 delete m_constraints;
3258
3259 /* Clone the svalues and regions. */
3260 m_svalues.reserve (other.m_svalues.length (), true);
3261 FOR_EACH_VEC_ELT (other.m_svalues, i, svalue)
3262 m_svalues.quick_push (svalue->clone ());
3263
3264 m_regions.reserve (other.m_regions.length (), true);
3265 FOR_EACH_VEC_ELT (other.m_regions, i, region)
3266 m_regions.quick_push (region->clone ());
3267
3268 m_root_rid = other.m_root_rid;
3269
3270 m_constraints = other.m_constraints->clone (this);
3271
3272 return *this;
3273}
3274
3275/* Equality operator for region_model.
3276
3277 Amongst other things this directly compares the svalue and region
3278 vectors and so for this to be meaningful both this and OTHER should
3279 have been canonicalized. */
3280
3281bool
3282region_model::operator== (const region_model &other) const
3283{
3284 if (m_root_rid != other.m_root_rid)
3285 return false;
3286
3287 if (m_svalues.length () != other.m_svalues.length ())
3288 return false;
3289
3290 if (m_regions.length () != other.m_regions.length ())
3291 return false;
3292
3293 if (*m_constraints != *other.m_constraints)
3294 return false;
3295
3296 unsigned i;
3297 svalue *svalue;
3298 FOR_EACH_VEC_ELT (other.m_svalues, i, svalue)
3299 if (!(*m_svalues[i] == *other.m_svalues[i]))
3300 return false;
3301
3302 region *region;
3303 FOR_EACH_VEC_ELT (other.m_regions, i, region)
3304 if (!(*m_regions[i] == *other.m_regions[i]))
3305 return false;
3306
3307 gcc_checking_assert (hash () == other.hash ());
3308
3309 return true;
3310}
3311
3312/* Generate a hash value for this region_model. */
3313
3314hashval_t
3315region_model::hash () const
3316{
3317 hashval_t result = 0;
3318 int i;
3319
3320 svalue *svalue;
3321 FOR_EACH_VEC_ELT (m_svalues, i, svalue)
3322 result ^= svalue->hash ();
3323
3324 region *region;
3325 FOR_EACH_VEC_ELT (m_regions, i, region)
3326 result ^= region->hash ();
3327
3328 result ^= m_constraints->hash ();
3329
3330 return result;
3331}
3332
3333/* Print an all-on-one-line representation of this region_model to PP,
3334 which must support %E for trees. */
3335
3336void
3337region_model::print (pretty_printer *pp) const
3338{
3339 int i;
3340
3341 pp_string (pp, "svalues: [");
3342 svalue *svalue;
3343 FOR_EACH_VEC_ELT (m_svalues, i, svalue)
3344 {
3345 if (i > 0)
3346 pp_string (pp, ", ");
3347 print_svalue (svalue_id::from_int (i), pp);
3348 }
3349
3350 pp_string (pp, "], regions: [");
3351
3352 region *region;
3353 FOR_EACH_VEC_ELT (m_regions, i, region)
3354 {
3355 if (i > 0)
3356 pp_string (pp, ", ");
3357 region->print (*this, region_id::from_int (i), pp);
3358 }
3359
3360 pp_string (pp, "], constraints: ");
3361
3362 m_constraints->print (pp);
3363}
3364
3365/* Print the svalue with id SID to PP. */
3366
3367void
3368region_model::print_svalue (svalue_id sid, pretty_printer *pp) const
3369{
3370 get_svalue (sid)->print (*this, sid, pp);
3371}
3372
3373/* Dump a .dot representation of this region_model to PP, showing
3374 the values and the hierarchy of regions. */
3375
3376void
3377region_model::dump_dot_to_pp (pretty_printer *pp) const
3378{
3379 graphviz_out gv (pp);
3380
3381 pp_string (pp, "digraph \"");
3382 pp_write_text_to_stream (pp);
3383 pp_write_text_as_dot_label_to_stream (pp, /*for_record=*/false);
3384 pp_string (pp, "\" {\n");
3385
3386 gv.indent ();
3387
3388 pp_string (pp, "overlap=false;\n");
3389 pp_string (pp, "compound=true;\n");
3390
3391 int i;
3392
3393 svalue *svalue;
3394 FOR_EACH_VEC_ELT (m_svalues, i, svalue)
3395 svalue->dump_dot_to_pp (*this, svalue_id::from_int (i), pp);
3396
3397 region *region;
3398 FOR_EACH_VEC_ELT (m_regions, i, region)
3399 region->dump_dot_to_pp (*this, region_id::from_int (i), pp);
3400
3401 /* TODO: constraints. */
3402
3403 /* Terminate "digraph" */
3404 gv.outdent ();
3405 pp_string (pp, "}");
3406 pp_newline (pp);
3407}
3408
3409/* Dump a .dot representation of this region_model to FP. */
3410
3411void
3412region_model::dump_dot_to_file (FILE *fp) const
3413{
3414 pretty_printer pp;
3415 pp_format_decoder (&pp) = default_tree_printer;
3416 pp.buffer->stream = fp;
3417 dump_dot_to_pp (&pp);
3418 pp_flush (&pp);
3419}
3420
3421/* Dump a .dot representation of this region_model to PATH. */
3422
3423void
3424region_model::dump_dot (const char *path) const
3425{
3426 FILE *fp = fopen (path, "w");
3427 dump_dot_to_file (fp);
3428 fclose (fp);
3429}
3430
3431/* Dump a multiline representation of this model to PP, showing the
3432 region hierarchy, the svalues, and any constraints.
3433
3434 If SUMMARIZE is true, show only the most pertient information,
3435 in a form that attempts to be less verbose.
3436 Otherwise, show all information. */
3437
3438void
3439region_model::dump_to_pp (pretty_printer *pp, bool summarize) const
3440{
3441 if (summarize)
3442 {
3443 bool is_first = true;
3444 region_id frame_id = get_current_frame_id ();
3445 frame_region *frame = get_region <frame_region> (frame_id);
3446 if (frame)
3447 dump_summary_of_map (pp, frame, &is_first);
3448
3449 region_id globals_id = get_globals_region_id ();
3450 map_region *globals = get_region <map_region> (globals_id);
3451 if (globals)
3452 dump_summary_of_map (pp, globals, &is_first);
3453
3454 unsigned i;
3455
3456 equiv_class *ec;
3457 FOR_EACH_VEC_ELT (m_constraints->m_equiv_classes, i, ec)
3458 {
3459 for (unsigned j = 0; j < ec->m_vars.length (); j++)
3460 {
3461 svalue_id lhs_sid = ec->m_vars[j];
3462 tree lhs_tree = get_representative_tree (lhs_sid);
3463 if (lhs_tree == NULL_TREE)
3464 continue;
3465 for (unsigned k = j + 1; k < ec->m_vars.length (); k++)
3466 {
3467 svalue_id rhs_sid = ec->m_vars[k];
3468 tree rhs_tree = get_representative_tree (rhs_sid);
3469 if (rhs_tree
3470 && !(CONSTANT_CLASS_P (lhs_tree)
3471 && CONSTANT_CLASS_P (rhs_tree)))
3472 {
3473 dump_separator (pp, &is_first);
3474 dump_tree (pp, lhs_tree);
3475 pp_string (pp, " == ");
3476 dump_tree (pp, rhs_tree);
3477 }
3478 }
3479 }
3480 }
3481
3482 constraint *c;
3483 FOR_EACH_VEC_ELT (m_constraints->m_constraints, i, c)
3484 {
3485 const equiv_class &lhs = c->m_lhs.get_obj (*m_constraints);
3486 const equiv_class &rhs = c->m_rhs.get_obj (*m_constraints);
3487 svalue_id lhs_sid = lhs.get_representative ();
3488 svalue_id rhs_sid = rhs.get_representative ();
3489 tree lhs_tree = get_representative_tree (lhs_sid);
3490 tree rhs_tree = get_representative_tree (rhs_sid);
3491 if (lhs_tree && rhs_tree
3492 && !(CONSTANT_CLASS_P (lhs_tree) && CONSTANT_CLASS_P (rhs_tree)))
3493 {
3494 dump_separator (pp, &is_first);
3495 dump_tree (pp, lhs_tree);
3496 pp_printf (pp, " %s ", constraint_op_code (c->m_op));
3497 dump_tree (pp, rhs_tree);
3498 }
3499 }
3500
3501 return;
3502 }
3503
3504 get_region (m_root_rid)->dump_to_pp (*this, m_root_rid, pp, "", true);
3505
3506 pp_string (pp, "svalues:");
3507 pp_newline (pp);
3508 int i;
3509 svalue *svalue;
3510 FOR_EACH_VEC_ELT (m_svalues, i, svalue)
3511 {
3512 pp_string (pp, " ");
3513 svalue_id sid = svalue_id::from_int (i);
3514 print_svalue (sid, pp);
3515 pp_newline (pp);
3516 }
3517
3518 pp_string (pp, "constraint manager:");
3519 pp_newline (pp);
3520 m_constraints->dump_to_pp (pp);
3521}
3522
3523/* Dump a multiline representation of this model to FILE. */
3524
3525void
3526region_model::dump (FILE *fp, bool summarize) const
3527{
3528 pretty_printer pp;
3529 pp_format_decoder (&pp) = default_tree_printer;
3530 pp_show_color (&pp) = pp_show_color (global_dc->printer);
3531 pp.buffer->stream = fp;
3532 dump_to_pp (&pp, summarize);
3533 pp_flush (&pp);
3534}
3535
3536/* Dump a multiline representation of this model to stderr. */
3537
3538DEBUG_FUNCTION void
3539region_model::dump (bool summarize) const
3540{
3541 dump (stderr, summarize);
3542}
3543
3544/* Dump RMODEL fully to stderr (i.e. without summarization). */
3545
3546DEBUG_FUNCTION void
3547region_model::debug () const
3548{
3549 dump (false);
3550}
3551
3552/* Dump VEC to PP, in the form "{VEC elements}: LABEL". */
3553
3554static void
3555dump_vec_of_tree (pretty_printer *pp,
3556 bool *is_first,
3557 const auto_vec<tree> &vec,
3558 const char *label)
3559{
3560 if (vec.length () == 0)
3561 return;
3562
3563 dump_separator (pp, is_first);
3564 pp_printf (pp, "{");
3565 unsigned i;
3566 tree key;
3567 FOR_EACH_VEC_ELT (vec, i, key)
3568 {
3569 if (i > 0)
3570 pp_string (pp, ", ");
3571 dump_tree (pp, key);
3572 }
3573 pp_printf (pp, "}: %s", label);
3574}
3575
3576/* Dump *MAP_REGION to PP in compact form, updating *IS_FIRST.
3577 Subroutine of region_model::dump_to_pp for use on stack frames and for
3578 the "globals" region. */
3579
3580void
3581region_model::dump_summary_of_map (pretty_printer *pp,
3582 map_region *map_region,
3583 bool *is_first) const
3584{
3585 /* Get the keys, sorted by tree_cmp. In particular, this ought
3586 to alphabetize any decls. */
3587 auto_vec<tree> keys (map_region->elements ());
3588 for (map_region::iterator_t iter = map_region->begin ();
3589 iter != map_region->end ();
3590 ++iter)
3591 {
3592 tree key_a = (*iter).first;
3593 keys.quick_push (key_a);
3594 }
3595 keys.qsort (tree_cmp);
3596
3597 /* Print pointers, constants, and poisoned values that aren't "uninit";
3598 gather keys for unknown and uninit values. */
3599 unsigned i;
3600 tree key;
3601 auto_vec<tree> unknown_keys;
3602 auto_vec<tree> uninit_keys;
3603 FOR_EACH_VEC_ELT (keys, i, key)
3604 {
3605 region_id child_rid = *map_region->get (key);
3606
3607 region *child_region = get_region (child_rid);
3608 if (!child_region)
3609 continue;
3610 svalue_id sid = child_region->get_value_direct ();
3611 if (sid.null_p ())
3612 continue;
3613 svalue *sval = get_svalue (sid);
3614 switch (sval->get_kind ())
3615 {
3616 default:
3617 gcc_unreachable ();
3618 case SK_REGION:
3619 {
3620 region_svalue *region_sval = as_a <region_svalue *> (sval);
3621 region_id pointee_rid = region_sval->get_pointee ();
3622 tree pointee = get_representative_path_var (pointee_rid).m_tree;
3623 dump_separator (pp, is_first);
3624 dump_tree (pp, key);
3625 pp_string (pp, ": ");
3626 if (pointee)
3627 {
3628 pp_character (pp, '&');
3629 dump_tree (pp, pointee);
3630 }
3631 else
3632 pp_string (pp, "NULL");
3633 }
3634 break;
3635 case SK_CONSTANT:
3636 dump_separator (pp, is_first);
3637 dump_tree (pp, key);
3638 pp_string (pp, ": ");
3639 dump_tree (pp, sval->dyn_cast_constant_svalue ()->get_constant ());
3640 break;
3641 case SK_UNKNOWN:
3642 unknown_keys.safe_push (key);
3643 break;
3644 case SK_POISONED:
3645 {
3646 poisoned_svalue *poisoned_sval = as_a <poisoned_svalue *> (sval);
3647 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
3648 if (pkind == POISON_KIND_UNINIT)
3649 uninit_keys.safe_push (key);
3650 else
3651 {
3652 dump_separator (pp, is_first);
3653 dump_tree (pp, key);
3654 pp_printf (pp, ": %s", poison_kind_to_str (pkind));
3655 }
3656 }
3657 break;
3658 case SK_SETJMP:
3659 dump_separator (pp, is_first);
3660 pp_printf (pp, "setjmp: EN: %i",
3661 sval->dyn_cast_setjmp_svalue ()->get_index ());
3662 break;
3663 }
3664 }
3665
3666 /* Print unknown and uninitialized values in consolidated form. */
3667 dump_vec_of_tree (pp, is_first, unknown_keys, "unknown");
3668 dump_vec_of_tree (pp, is_first, uninit_keys, "uninit");
3669}
3670
3671/* Assert that this object is valid. */
3672
3673void
3674region_model::validate () const
3675{
3676 /* Skip this in a release build. */
3677#if !CHECKING_P
3678 return;
3679#endif
3680
3681 m_constraints->validate ();
3682
3683 unsigned i;
3684 region *r;
3685 FOR_EACH_VEC_ELT (m_regions, i, r)
3686 r->validate (this);
3687
3688 // TODO: anything else?
3689
3690 /* Verify that the stack region (if any) has an "uninitialized" value. */
3691 region *stack_region = get_root_region ()->get_stack_region (this);
3692 if (stack_region)
3693 {
3694 svalue_id stack_value_sid = stack_region->get_value_direct ();
3695 svalue *stack_value = get_svalue (stack_value_sid);
3696 gcc_assert (stack_value->get_kind () == SK_POISONED);
3697 poisoned_svalue *subclass = stack_value->dyn_cast_poisoned_svalue ();
3698 gcc_assert (subclass);
3699 gcc_assert (subclass->get_poison_kind () == POISON_KIND_UNINIT);
3700 }
3701}
3702
3703/* Global data for use by svalue_id_cmp_by_constant_svalue. */
3704
3705static region_model *svalue_id_cmp_by_constant_svalue_model = NULL;
3706
3707/* Comparator for use by region_model::canonicalize. */
3708
3709static int
3710svalue_id_cmp_by_constant_svalue (const void *p1, const void *p2)
3711{
3712 const svalue_id *sid1 = (const svalue_id *)p1;
3713 const svalue_id *sid2 = (const svalue_id *)p2;
3714 gcc_assert (!sid1->null_p ());
3715 gcc_assert (!sid2->null_p ());
3716 gcc_assert (svalue_id_cmp_by_constant_svalue_model);
3717 const svalue &sval1
3718 = *svalue_id_cmp_by_constant_svalue_model->get_svalue (*sid1);
3719 const svalue &sval2
3720 = *svalue_id_cmp_by_constant_svalue_model->get_svalue (*sid2);
3721 gcc_assert (sval1.get_kind () == SK_CONSTANT);
3722 gcc_assert (sval2.get_kind () == SK_CONSTANT);
3723
3724 tree cst1 = ((const constant_svalue &)sval1).get_constant ();
3725 tree cst2 = ((const constant_svalue &)sval2).get_constant ();
3726 return tree_cmp (cst1, cst2);
3727}
3728
3729/* Reorder the regions and svalues into a deterministic "canonical" order,
3730 to maximize the chance of equality.
3731 If non-NULL, notify CTXT about the svalue id remapping. */
3732
3733void
3734region_model::canonicalize (region_model_context *ctxt)
3735{
3736 /* Walk all regions and values in a deterministic order, visiting
3737 rids and sids, generating a rid and sid map. */
3738 canonicalization c (*this);
3739
3740 /* (1): Walk all svalues, putting constants first, sorting the constants
3741 (thus imposing an ordering on any constants that are purely referenced
3742 by constraints).
3743 Ignore other svalues for now. */
3744 {
3745 unsigned i;
3746 auto_vec<svalue_id> sids;
3747 svalue *sval;
3748 FOR_EACH_VEC_ELT (m_svalues, i, sval)
3749 {
3750 if (sval->get_kind () == SK_CONSTANT)
3751 sids.safe_push (svalue_id::from_int (i));
3752 }
3753 svalue_id_cmp_by_constant_svalue_model = this;
3754 sids.qsort (svalue_id_cmp_by_constant_svalue);
3755 svalue_id_cmp_by_constant_svalue_model = NULL;
3756 svalue_id *sid;
3757 FOR_EACH_VEC_ELT (sids, i, sid)
3758 c.walk_sid (*sid);
3759 }
3760
3761 /* (2): Walk all regions (and thus their values) in a deterministic
3762 order. */
3763 c.walk_rid (m_root_rid);
3764
3765 /* (3): Ensure we've visited everything, as we don't want to purge
3766 at this stage. Anything we visit for the first time here has
3767 arbitrary order. */
3768 {
3769 unsigned i;
3770 region *region;
3771 FOR_EACH_VEC_ELT (m_regions, i, region)
3772 c.walk_rid (region_id::from_int (i));
3773 svalue *sval;
3774 FOR_EACH_VEC_ELT (m_svalues, i, sval)
3775 c.walk_sid (svalue_id::from_int (i));
3776 }
3777
3778 /* (4): We now have a reordering of the regions and values.
3779 Apply it. */
3780 remap_svalue_ids (c.m_sid_map);
3781 remap_region_ids (c.m_rid_map);
3782 if (ctxt)
3783 ctxt->remap_svalue_ids (c.m_sid_map);
3784
3785 /* (5): Canonicalize the constraint_manager (it has already had its
3786 svalue_ids remapped above). This makes use of the new svalue_id
3787 values, and so must happen last. */
3788 m_constraints->canonicalize (get_num_svalues ());
3789
3790 validate ();
3791}
3792
3793/* Return true if this region_model is in canonical form. */
3794
3795bool
3796region_model::canonicalized_p () const
3797{
3798 region_model copy (*this);
3799 copy.canonicalize (NULL);
3800 return *this == copy;
3801}
3802
3803/* A subclass of pending_diagnostic for complaining about uses of
3804 poisoned values. */
3805
3806class poisoned_value_diagnostic
3807: public pending_diagnostic_subclass<poisoned_value_diagnostic>
3808{
3809public:
3810 poisoned_value_diagnostic (tree expr, enum poison_kind pkind)
3811 : m_expr (expr), m_pkind (pkind)
3812 {}
3813
3814 const char *get_kind () const FINAL OVERRIDE { return "poisoned_value_diagnostic"; }
3815
3816 bool operator== (const poisoned_value_diagnostic &other) const
3817 {
3818 return m_expr == other.m_expr;
3819 }
3820
3821 bool emit (rich_location *rich_loc) FINAL OVERRIDE
3822 {
3823 switch (m_pkind)
3824 {
3825 default:
3826 gcc_unreachable ();
3827 case POISON_KIND_UNINIT:
3828 {
3829 diagnostic_metadata m;
3830 m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
3831 return warning_at (rich_loc, m,
3832 OPT_Wanalyzer_use_of_uninitialized_value,
3833 "use of uninitialized value %qE",
3834 m_expr);
3835 }
3836 break;
3837 case POISON_KIND_FREED:
3838 {
3839 diagnostic_metadata m;
3840 m.add_cwe (416); /* "CWE-416: Use After Free". */
3841 return warning_at (rich_loc, m,
3842 OPT_Wanalyzer_use_after_free,
3843 "use after %<free%> of %qE",
3844 m_expr);
3845 }
3846 break;
3847 case POISON_KIND_POPPED_STACK:
3848 {
3849 diagnostic_metadata m;
3850 /* TODO: which CWE? */
3851 return warning_at (rich_loc, m,
3852 OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame,
3853 "use of pointer %qE within stale stack frame",
3854 m_expr);
3855 }
3856 break;
3857 }
3858 }
3859
3860 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
3861 {
3862 switch (m_pkind)
3863 {
3864 default:
3865 gcc_unreachable ();
3866 case POISON_KIND_UNINIT:
3867 return ev.formatted_print ("use of uninitialized value %qE here",
3868 m_expr);
3869 case POISON_KIND_FREED:
3870 return ev.formatted_print ("use after %<free%> of %qE here",
3871 m_expr);
3872 case POISON_KIND_POPPED_STACK:
3873 return ev.formatted_print
3874 ("use of pointer %qE within stale stack frame here",
3875 m_expr);
3876 }
3877 }
3878
3879private:
3880 tree m_expr;
3881 enum poison_kind m_pkind;
3882};
3883
3884/* Determine if EXPR is poisoned, and if so, queue a diagnostic to CTXT. */
3885
3886void
3887region_model::check_for_poison (tree expr, region_model_context *ctxt)
3888{
3889 if (!ctxt)
3890 return;
3891
3892 // TODO: this is disabled for now (too many false positives)
3893 return;
3894
3895 svalue_id expr_sid = get_rvalue (expr, ctxt);
3896 gcc_assert (!expr_sid.null_p ());
3897 svalue *expr_svalue = get_svalue (expr_sid);
3898 gcc_assert (expr_svalue);
3899 if (const poisoned_svalue *poisoned_sval
3900 = expr_svalue->dyn_cast_poisoned_svalue ())
3901 {
3902 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
3903 ctxt->warn (new poisoned_value_diagnostic (expr, pkind));
3904 }
3905}
3906
3907/* Update this model for the ASSIGN stmt, using CTXT to report any
3908 diagnostics. */
3909
3910void
3911region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
3912{
3913 tree lhs = gimple_assign_lhs (assign);
3914 tree rhs1 = gimple_assign_rhs1 (assign);
3915
3916 region_id lhs_rid = get_lvalue (lhs, ctxt);
3917
3918 /* Check for uses of poisoned values. */
3919 switch (get_gimple_rhs_class (gimple_expr_code (assign)))
3920 {
3921 case GIMPLE_INVALID_RHS:
3922 gcc_unreachable ();
3923 break;
3924 case GIMPLE_TERNARY_RHS:
3925 check_for_poison (gimple_assign_rhs3 (assign), ctxt);
3926 /* Fallthru */
3927 case GIMPLE_BINARY_RHS:
3928 check_for_poison (gimple_assign_rhs2 (assign), ctxt);
3929 /* Fallthru */
3930 case GIMPLE_UNARY_RHS:
3931 case GIMPLE_SINGLE_RHS:
3932 check_for_poison (gimple_assign_rhs1 (assign), ctxt);
3933 }
3934
3935 if (lhs_rid.null_p ())
3936 return;
3937 // TODO: issue a warning for this case
3938
3939 enum tree_code op = gimple_assign_rhs_code (assign);
3940 switch (op)
3941 {
3942 default:
3943 {
3944 if (0)
3945 sorry_at (assign->location, "unhandled assignment op: %qs",
3946 get_tree_code_name (op));
3947 set_to_new_unknown_value (lhs_rid, TREE_TYPE (lhs), ctxt);
3948 }
3949 break;
3950
3951 case BIT_FIELD_REF:
3952 {
3953 // TODO
3954 }
3955 break;
3956
3957 case CONSTRUCTOR:
3958 {
3959 /* e.g. "x ={v} {CLOBBER};" */
3960 // TODO
3961 }
3962 break;
3963
3964 case POINTER_PLUS_EXPR:
3965 {
3966 /* e.g. "_1 = a_10(D) + 12;" */
3967 tree ptr = rhs1;
3968 tree offset = gimple_assign_rhs2 (assign);
3969
3970 svalue_id ptr_sid = get_rvalue (ptr, ctxt);
3971 svalue_id offset_sid = get_rvalue (offset, ctxt);
3972 region_id element_rid
3973 = get_or_create_pointer_plus_expr (TREE_TYPE (TREE_TYPE (ptr)),
3974 ptr_sid, offset_sid,
3975 ctxt);
3976 svalue_id element_ptr_sid
3977 = get_or_create_ptr_svalue (TREE_TYPE (ptr), element_rid);
3978 set_value (lhs_rid, element_ptr_sid, ctxt);
3979 }
3980 break;
3981
3982 case POINTER_DIFF_EXPR:
3983 {
3984 /* e.g. "_1 = p_2(D) - q_3(D);". */
3985
3986 /* TODO. */
3987
3988 set_to_new_unknown_value (lhs_rid, TREE_TYPE (lhs), ctxt);
3989 }
3990 break;
3991
3992 case ADDR_EXPR:
3993 {
3994 /* LHS = &RHS; */
3995 svalue_id ptr_sid = get_rvalue (rhs1, ctxt);
3996 set_value (lhs_rid, ptr_sid, ctxt);
3997 }
3998 break;
3999
4000 case MEM_REF:
4001 {
4002 region_id rhs_rid = get_lvalue (rhs1, ctxt);
4003 svalue_id rhs_sid
4004 = get_region (rhs_rid)->get_value (*this, true, ctxt);
4005 set_value (lhs_rid, rhs_sid, ctxt);
4006 }
4007 break;
4008
4009 case REAL_CST:
4010 case INTEGER_CST:
4011 case ARRAY_REF:
4012 {
4013 /* LHS = RHS; */
4014 svalue_id cst_sid = get_rvalue (rhs1, ctxt);
4015 set_value (lhs_rid, cst_sid, ctxt);
4016 }
4017 break;
4018
4019 case FIX_TRUNC_EXPR:
4020 case FLOAT_EXPR:
4021 case NOP_EXPR:
4022 // cast: TODO
4023 // fall though for now
4024 case SSA_NAME:
4025 case VAR_DECL:
4026 case PARM_DECL:
4027 {
4028 /* LHS = VAR; */
4029 svalue_id var_sid = get_rvalue (rhs1, ctxt);
4030 set_value (lhs_rid, var_sid, ctxt);
4031 }
4032 break;
4033
4034 case EQ_EXPR:
4035 case GE_EXPR:
4036 case LE_EXPR:
4037 case NE_EXPR:
4038 case GT_EXPR:
4039 case LT_EXPR:
4040 {
4041 tree rhs2 = gimple_assign_rhs2 (assign);
4042
4043 // TODO: constraints between svalues
4044 svalue_id rhs1_sid = get_rvalue (rhs1, ctxt);
4045 svalue_id rhs2_sid = get_rvalue (rhs2, ctxt);
4046
4047 tristate t = eval_condition (rhs1_sid, op, rhs2_sid);
4048 if (t.is_known ())
4049 set_value (lhs_rid,
4050 get_rvalue (t.is_true ()
4051 ? boolean_true_node
4052 : boolean_false_node,
4053 ctxt),
4054 ctxt);
4055 else
4056 set_to_new_unknown_value (lhs_rid, TREE_TYPE (lhs), ctxt);
4057 }
4058 break;
4059
4060 case NEGATE_EXPR:
4061 case BIT_NOT_EXPR:
4062 {
4063 // TODO: unary ops
4064
4065 // TODO: constant?
4066
4067 set_to_new_unknown_value (lhs_rid, TREE_TYPE (lhs), ctxt);
4068 }
4069 break;
4070
4071 case PLUS_EXPR:
4072 case MINUS_EXPR:
4073 case MULT_EXPR:
4074 case TRUNC_DIV_EXPR:
4075 case TRUNC_MOD_EXPR:
4076 case LSHIFT_EXPR:
4077 case RSHIFT_EXPR:
4078 case BIT_IOR_EXPR:
4079 case BIT_XOR_EXPR:
4080 case BIT_AND_EXPR:
4081 case MIN_EXPR:
4082 case MAX_EXPR:
4083 {
4084 /* Binary ops. */
4085 tree rhs2 = gimple_assign_rhs2 (assign);
4086
4087 svalue_id rhs1_sid = get_rvalue (rhs1, ctxt);
4088 svalue_id rhs2_sid = get_rvalue (rhs2, ctxt);
4089
4090 if (tree rhs1_cst = maybe_get_constant (rhs1_sid))
4091 if (tree rhs2_cst = maybe_get_constant (rhs2_sid))
4092 {
4093 tree result = fold_build2 (op, TREE_TYPE (lhs),
4094 rhs1_cst, rhs2_cst);
4095 if (CONSTANT_CLASS_P (result))
4096 {
4097 svalue_id result_sid
4098 = get_or_create_constant_svalue (result);
4099 set_value (lhs_rid, result_sid, ctxt);
4100 return;
4101 }
4102 }
4103 set_to_new_unknown_value (lhs_rid, TREE_TYPE (lhs), ctxt);
4104 }
4105 break;
4106
4107 case COMPONENT_REF:
4108 {
4109 /* LHS = op0.op1; */
4110 region_id child_rid = get_lvalue (rhs1, ctxt);
4111 svalue_id child_sid
4112 = get_region (child_rid)->get_value (*this, true, ctxt);
4113 set_value (lhs_rid, child_sid, ctxt);
4114 }
4115 break;
4116 }
4117}
4118
4119/* Update this model for the CALL stmt, using CTXT to report any
4120 diagnostics - the first half.
4121
4122 Updates to the region_model that should be made *before* sm-states
4123 are updated are done here; other updates to the region_model are done
ef7827b0 4124 in region_model::on_call_post.
757bf1df 4125
ef7827b0
DM
4126 Return true if the function call has unknown side effects (it wasn't
4127 recognized and we don't have a body for it, or are unable to tell which
4128 fndecl it is). */
4129
4130bool
757bf1df
DM
4131region_model::on_call_pre (const gcall *call, region_model_context *ctxt)
4132{
4133 region_id lhs_rid;
4134 tree lhs_type = NULL_TREE;
4135 if (tree lhs = gimple_call_lhs (call))
4136 {
4137 lhs_rid = get_lvalue (lhs, ctxt);
4138 lhs_type = TREE_TYPE (lhs);
4139 }
4140
4141 /* Check for uses of poisoned values.
4142 For now, special-case "free", to avoid warning about "use-after-free"
4143 when "double free" would be more precise. */
4144 if (!is_special_named_call_p (call, "free", 1))
4145 for (unsigned i = 0; i < gimple_call_num_args (call); i++)
4146 check_for_poison (gimple_call_arg (call, i), ctxt);
4147
ef7827b0
DM
4148 bool unknown_side_effects = false;
4149
757bf1df
DM
4150 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
4151 {
4152 if (is_named_call_p (callee_fndecl, "malloc", call, 1))
4153 {
4154 // TODO: capture size as a svalue?
4155 region_id new_rid = add_new_malloc_region ();
4156 if (!lhs_rid.null_p ())
4157 {
4158 svalue_id ptr_sid
4159 = get_or_create_ptr_svalue (lhs_type, new_rid);
4160 set_value (lhs_rid, ptr_sid, ctxt);
4161 }
ef7827b0 4162 return false;
757bf1df
DM
4163 }
4164 else if (is_named_call_p (callee_fndecl, "__builtin_alloca", call, 1))
4165 {
4166 region_id frame_rid = get_current_frame_id ();
4167 region_id new_rid
4168 = add_region (new symbolic_region (frame_rid, false));
4169 if (!lhs_rid.null_p ())
4170 {
4171 svalue_id ptr_sid
4172 = get_or_create_ptr_svalue (lhs_type, new_rid);
4173 set_value (lhs_rid, ptr_sid, ctxt);
4174 }
ef7827b0 4175 return false;
757bf1df
DM
4176 }
4177 else if (is_named_call_p (callee_fndecl, "strlen", call, 1))
4178 {
4179 region_id buf_rid = deref_rvalue (gimple_call_arg (call, 0), ctxt);
4180 svalue_id buf_sid
4181 = get_region (buf_rid)->get_value (*this, true, ctxt);
4182 if (tree cst_expr = maybe_get_constant (buf_sid))
4183 {
4184 if (TREE_CODE (cst_expr) == STRING_CST
4185 && !lhs_rid.null_p ())
4186 {
4187 /* TREE_STRING_LENGTH is sizeof, not strlen. */
4188 int sizeof_cst = TREE_STRING_LENGTH (cst_expr);
4189 int strlen_cst = sizeof_cst - 1;
4190 tree t_cst = build_int_cst (lhs_type, strlen_cst);
4191 svalue_id result_sid
4192 = get_or_create_constant_svalue (t_cst);
4193 set_value (lhs_rid, result_sid, ctxt);
ef7827b0 4194 return false;
757bf1df
DM
4195 }
4196 }
4197 /* Otherwise an unknown value. */
4198 }
4199 else if (is_named_call_p (callee_fndecl,
4200 "__analyzer_dump_num_heap_regions", call, 0))
4201 {
4202 /* Handle the builtin "__analyzer_dump_num_heap_regions" by emitting
4203 a warning (for use in DejaGnu tests). */
4204 int num_heap_regions = 0;
4205 region_id heap_rid = get_root_region ()->ensure_heap_region (this);
4206 unsigned i;
4207 region *region;
4208 FOR_EACH_VEC_ELT (m_regions, i, region)
4209 if (region->get_parent () == heap_rid)
4210 num_heap_regions++;
4211 /* Use quotes to ensure the output isn't truncated. */
4212 warning_at (call->location, 0,
4213 "num heap regions: %qi", num_heap_regions);
ef7827b0 4214 return false;
757bf1df 4215 }
ef7827b0
DM
4216 else if (!fndecl_has_gimple_body_p (callee_fndecl)
4217 && !DECL_PURE_P (callee_fndecl))
4218 unknown_side_effects = true;
757bf1df 4219 }
ef7827b0
DM
4220 else
4221 unknown_side_effects = true;
757bf1df
DM
4222
4223 /* Unknown return value. */
4224 if (!lhs_rid.null_p ())
4225 set_to_new_unknown_value (lhs_rid, lhs_type, ctxt);
4226
ef7827b0 4227 return unknown_side_effects;
757bf1df
DM
4228}
4229
4230/* Update this model for the CALL stmt, using CTXT to report any
4231 diagnostics - the second half.
4232
4233 Updates to the region_model that should be made *after* sm-states
4234 are updated are done here; other updates to the region_model are done
ef7827b0
DM
4235 in region_model::on_call_pre.
4236
4237 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
4238 to purge state. */
757bf1df
DM
4239
4240void
ef7827b0
DM
4241region_model::on_call_post (const gcall *call,
4242 bool unknown_side_effects,
4243 region_model_context *ctxt)
757bf1df
DM
4244{
4245 /* Update for "free" here, after sm-handling.
4246
4247 If the ptr points to an underlying heap region, delete the region,
4248 poisoning pointers to it and regions within it.
4249
4250 We delay this until after sm-state has been updated so that the
4251 sm-handling can transition all of the various casts of the pointer
4252 to a "freed" state *before* we delete the related region here.
4253
4254 This has to be done here so that the sm-handling can use the fact
4255 that they point to the same region to establish that they are equal
4256 (in region_model::eval_condition_without_cm), and thus transition
4257 all pointers to the region to the "freed" state together, regardless
4258 of casts. */
4259 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
4260 if (is_named_call_p (callee_fndecl, "free", call, 1))
4261 {
4262 tree ptr = gimple_call_arg (call, 0);
4263 svalue_id ptr_sid = get_rvalue (ptr, ctxt);
4264 svalue *ptr_sval = get_svalue (ptr_sid);
4265 if (region_svalue *ptr_to_region_sval
4266 = ptr_sval->dyn_cast_region_svalue ())
4267 {
4268 /* If the ptr points to an underlying heap region, delete it,
4269 poisoning pointers. */
4270 region_id pointee_rid = ptr_to_region_sval->get_pointee ();
4271 region_id heap_rid = get_root_region ()->ensure_heap_region (this);
4272 if (!pointee_rid.null_p ()
4273 && get_region (pointee_rid)->get_parent () == heap_rid)
4274 {
4275 purge_stats stats;
4276 delete_region_and_descendents (pointee_rid,
4277 POISON_KIND_FREED,
4278 &stats, ctxt->get_logger ());
4279 purge_unused_svalues (&stats, ctxt);
4280 validate ();
4281 // TODO: do anything with stats?
4282 }
4283 }
4284 return;
4285 }
ef7827b0
DM
4286
4287 if (unknown_side_effects)
4288 handle_unrecognized_call (call, ctxt);
4289}
4290
4291/* Helper class for region_model::handle_unrecognized_call, for keeping
4292 track of all regions that are reachable, and, of those, which are
4293 mutable. */
4294
4295class reachable_regions
4296{
4297public:
4298 reachable_regions (region_model *model)
4299 : m_model (model), m_reachable_rids (), m_mutable_rids ()
4300 {}
4301
4302 /* Lazily mark RID as being reachable, recursively adding regions
4303 reachable from RID. */
4304 void add (region_id rid, bool is_mutable)
4305 {
4306 gcc_assert (!rid.null_p ());
4307
4308 unsigned idx = rid.as_int ();
4309 /* Bail out if this region is already in the sets at the IS_MUTABLE
4310 level of mutability. */
4311 if (!is_mutable && bitmap_bit_p (m_reachable_rids, idx))
4312 return;
4313 bitmap_set_bit (m_reachable_rids, idx);
4314
4315 if (is_mutable)
4316 {
4317 if (bitmap_bit_p (m_mutable_rids, idx))
4318 return;
4319 else
4320 bitmap_set_bit (m_mutable_rids, idx);
4321 }
4322
4323 /* If this region's value is a pointer, add the pointee. */
4324 region *reg = m_model->get_region (rid);
4325 svalue_id sid = reg->get_value_direct ();
4326 svalue *sval = m_model->get_svalue (sid);
4327 if (sval)
4328 if (region_svalue *ptr = sval->dyn_cast_region_svalue ())
4329 {
4330 region_id pointee_rid = ptr->get_pointee ();
4331 /* Use const-ness of pointer type to affect mutability. */
4332 bool ptr_is_mutable = true;
4333 if (ptr->get_type ()
4334 && TREE_CODE (ptr->get_type ()) == POINTER_TYPE
4335 && TYPE_READONLY (TREE_TYPE (ptr->get_type ())))
4336 ptr_is_mutable = false;
4337 add (pointee_rid, ptr_is_mutable);
4338 }
4339
4340 /* Add descendents of this region. */
4341 region_id_set descendents (m_model);
4342 m_model->get_descendents (rid, &descendents, region_id::null ());
4343 for (unsigned i = 0; i < m_model->get_num_regions (); i++)
4344 {
4345 region_id iter_rid = region_id::from_int (i);
4346 if (descendents.region_p (iter_rid))
4347 add (iter_rid, is_mutable);
4348 }
4349 }
4350
4351 bool mutable_p (region_id rid)
4352 {
4353 gcc_assert (!rid.null_p ());
4354 return bitmap_bit_p (m_mutable_rids, rid.as_int ());
4355 }
4356
4357private:
4358 region_model *m_model;
4359
4360 /* The region ids already seen. This has to be an auto_bitmap rather than
4361 an auto_sbitmap as new regions can be created within the model during
4362 the traversal. */
4363 auto_bitmap m_reachable_rids;
4364
4365 /* The region_ids that can be changed (accessed via non-const pointers). */
4366 auto_bitmap m_mutable_rids;
4367};
4368
4369/* Handle a call CALL to a function with unknown behavior.
4370
4371 Traverse the regions in this model, determining what regions are
4372 reachable from pointer arguments to CALL and from global variables,
4373 recursively.
4374
4375 Set all reachable regions to new unknown values and purge sm-state
4376 from their values, and from values that point to them. */
4377
4378void
4379region_model::handle_unrecognized_call (const gcall *call,
4380 region_model_context *ctxt)
4381{
4382 tree fndecl = get_fndecl_for_call (call, ctxt);
4383
4384 reachable_regions reachable_regions (this);
4385
4386 /* Determine the reachable regions and their mutability. */
4387 {
4388 /* Globals. */
4389 region_id globals_rid = get_globals_region_id ();
4390 if (!globals_rid.null_p ())
4391 reachable_regions.add (globals_rid, true);
4392
4393 /* Params that are pointers. */
4394 tree iter_param_types = NULL_TREE;
4395 if (fndecl)
4396 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4397 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
4398 {
4399 /* Track expected param type, where available. */
4400 tree param_type = NULL_TREE;
4401 if (iter_param_types)
4402 {
4403 param_type = TREE_VALUE (iter_param_types);
4404 gcc_assert (param_type);
4405 iter_param_types = TREE_CHAIN (iter_param_types);
4406 }
4407
4408 tree parm = gimple_call_arg (call, arg_idx);
4409 svalue_id parm_sid = get_rvalue (parm, NULL);
4410 svalue *parm_sval = get_svalue (parm_sid);
4411 if (parm_sval)
4412 if (region_svalue *parm_ptr = parm_sval->dyn_cast_region_svalue ())
4413 {
4414 region_id pointee_rid = parm_ptr->get_pointee ();
4415 bool is_mutable = true;
4416 if (param_type
4417 && TREE_CODE (param_type) == POINTER_TYPE
4418 && TYPE_READONLY (TREE_TYPE (param_type)))
4419 is_mutable = false;
4420 reachable_regions.add (pointee_rid, is_mutable);
4421 }
4422 // FIXME: what about compound parms that contain ptrs?
4423 }
4424 }
4425
4426 /* OK: we now have all reachable regions.
4427 Set them all to new unknown values. */
4428 for (unsigned i = 0; i < get_num_regions (); i++)
4429 {
4430 region_id iter_rid = region_id::from_int (i);
4431 if (reachable_regions.mutable_p (iter_rid))
4432 {
4433 region *reg = get_region (iter_rid);
4434
4435 /* Purge any sm-state for any underlying svalue. */
4436 svalue_id curr_sid = reg->get_value_direct ();
4437 if (!curr_sid.null_p ())
4438 ctxt->on_unknown_change (curr_sid);
4439
4440 set_to_new_unknown_value (iter_rid,
4441 reg->get_type (),
4442 ctxt);
4443 }
4444 }
4445
4446 /* Purge sm-state for any remaining svalues that point to regions that
4447 were reachable. This helps suppress leak false-positives.
4448
4449 For example, if we had a malloc call that was cast to a "foo *" type,
4450 we could have a temporary void * for the result of malloc which has its
4451 own svalue, not reachable from the function call, but for which the
4452 "foo *" svalue was reachable. If we don't purge it, the temporary will
4453 be reported as a leak. */
4454 int i;
4455 svalue *svalue;
4456 FOR_EACH_VEC_ELT (m_svalues, i, svalue)
4457 if (region_svalue *ptr = svalue->dyn_cast_region_svalue ())
4458 {
4459 region_id pointee_rid = ptr->get_pointee ();
4460 if (reachable_regions.mutable_p (pointee_rid))
4461 ctxt->on_unknown_change (svalue_id::from_int (i));
4462 }
4463
4464 validate ();
757bf1df
DM
4465}
4466
4467/* Update this model for the RETURN_STMT, using CTXT to report any
4468 diagnostics. */
4469
4470void
4471region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
4472{
4473 tree callee = get_current_function ()->decl;
4474 tree lhs = DECL_RESULT (callee);
4475 tree rhs = gimple_return_retval (return_stmt);
4476
4477 if (lhs && rhs)
4478 set_value (get_lvalue (lhs, ctxt), get_rvalue (rhs, ctxt), ctxt);
4479}
4480
4481/* Update this model for a call and return of "setjmp" at CALL within ENODE,
4482 using CTXT to report any diagnostics.
4483
4484 This is for the initial direct invocation of setjmp (which returns 0),
4485 as opposed to any second return due to longjmp. */
4486
4487void
4488region_model::on_setjmp (const gcall *call, const exploded_node *enode,
4489 region_model_context *ctxt)
4490{
4491 region_id buf_rid = deref_rvalue (gimple_call_arg (call, 0), ctxt);
4492 region *buf = get_region (buf_rid);
4493
4494 /* Create a setjmp_svalue for ENODE and store it in BUF_RID's region. */
4495 if (buf)
4496 {
4497 svalue *sval = new setjmp_svalue (enode, buf->get_type ());
4498 svalue_id new_sid = add_svalue (sval);
4499 set_value (buf_rid, new_sid, ctxt);
4500 }
4501
4502 /* Direct calls to setjmp return 0. */
4503 if (tree lhs = gimple_call_lhs (call))
4504 {
4505 tree zero = build_int_cst (TREE_TYPE (lhs), 0);
4506 svalue_id new_sid = get_or_create_constant_svalue (zero);
4507 region_id lhs_rid = get_lvalue (lhs, ctxt);
4508 set_value (lhs_rid, new_sid, ctxt);
4509 }
4510}
4511
4512/* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
4513 to a "setjmp" at SETJMP_CALL where the final stack depth should be
4514 SETJMP_STACK_DEPTH. Purge any stack frames, potentially reporting on
4515 leaks to CTXT. */
4516
4517void
4518region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
4519 int setjmp_stack_depth,
4520 region_model_context *ctxt)
4521{
4522 /* Evaluate the val, using the frame of the "longjmp". */
4523 tree fake_retval = gimple_call_arg (longjmp_call, 1);
4524 svalue_id fake_retval_sid = get_rvalue (fake_retval, ctxt);
4525
4526 /* Pop any frames until we reach the stack depth of the function where
4527 setjmp was called. */
4528 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
4529 while (get_stack_depth () > setjmp_stack_depth)
4530 {
4531 /* Don't purge unused svalues yet, as we're using fake_retval_sid. */
4532 pop_frame (false, NULL, ctxt);
4533 }
4534
4535 gcc_assert (get_stack_depth () == setjmp_stack_depth);
4536
4537 /* Assign to LHS of "setjmp" in new_state. */
4538 if (tree lhs = gimple_call_lhs (setjmp_call))
4539 {
4540 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
4541 tree t_zero = build_int_cst (TREE_TYPE (fake_retval), 0);
4542 svalue_id zero_sid = get_or_create_constant_svalue (t_zero);
4543 tristate eq_zero = eval_condition (fake_retval_sid, EQ_EXPR, zero_sid);
4544 /* If we have 0, use 1. */
4545 if (eq_zero.is_true ())
4546 {
4547 tree t_one = build_int_cst (TREE_TYPE (fake_retval), 1);
4548 svalue_id one_sid = get_or_create_constant_svalue (t_one);
4549 fake_retval_sid = one_sid;
4550 }
4551 else
4552 {
4553 /* Otherwise note that the value is nonzero. */
4554 m_constraints->add_constraint (fake_retval_sid, NE_EXPR, zero_sid);
4555 }
4556
4557 region_id lhs_rid = get_lvalue (lhs, ctxt);
4558 set_value (lhs_rid, fake_retval_sid, ctxt);
4559 }
4560
4561 /* Now that we've assigned the fake_retval, we can purge the unused
4562 svalues, which could detect leaks. */
4563 purge_unused_svalues (NULL, ctxt, NULL);
4564 validate ();
4565}
4566
4567/* Update this region_model for a phi stmt of the form
4568 LHS = PHI <...RHS...>.
4569 where RHS is for the appropriate edge. */
4570
4571void
4572region_model::handle_phi (tree lhs, tree rhs, bool is_back_edge,
4573 region_model_context *ctxt)
4574{
4575 /* For now, don't bother tracking the .MEM SSA names. */
4576 if (tree var = SSA_NAME_VAR (lhs))
4577 if (TREE_CODE (var) == VAR_DECL)
4578 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
4579 return;
4580
4581 svalue_id rhs_sid = get_rvalue (rhs, ctxt);
4582
4583 if (is_back_edge && get_svalue (rhs_sid)->get_kind () != SK_UNKNOWN)
4584 {
4585 /* If we have a back edge, we probably have a loop.
4586 Use an unknown value, to avoid effectively unrolling the
4587 loop.
4588 To terminate, we need to avoid generating a series of
4589 models with an unbounded monotonically increasing number of
4590 redundant unknown values; hence we need to purge svalues
4591 before inserting the state into the exploded graph, to
4592 collect unused svalues. */
4593 set_to_new_unknown_value (get_lvalue (lhs, ctxt), TREE_TYPE (lhs), ctxt);
4594 }
4595 else
4596 set_value (get_lvalue (lhs, ctxt), rhs_sid, ctxt);
4597}
4598
4599/* Implementation of region_model::get_lvalue; the latter adds type-checking.
4600
4601 Get the id of the region for PV within this region_model,
4602 emitting any diagnostics to CTXT. */
4603
4604region_id
4605region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt)
4606{
4607 tree expr = pv.m_tree;
4608
4609 gcc_assert (expr);
4610
4611 switch (TREE_CODE (expr))
4612 {
4613 default:
4614 gcc_unreachable ();
4615
4616 case ARRAY_REF:
4617 {
4618 tree array = TREE_OPERAND (expr, 0);
4619 tree index = TREE_OPERAND (expr, 1);
4620#if 0
4621 // TODO: operands 2 and 3, if present:
4622 gcc_assert (TREE_OPERAND (expr, 2) == NULL_TREE);
4623 gcc_assert (TREE_OPERAND (expr, 3) == NULL_TREE);
4624#endif
4625
4626 region_id array_rid = get_lvalue (array, ctxt);
4627 svalue_id index_sid = get_rvalue (index, ctxt);
4628 array_region *array_reg = get_region<array_region> (array_rid);
4629 return array_reg->get_element (this, array_rid, index_sid, ctxt);
4630 }
4631 break;
4632
4633 case MEM_REF:
4634 {
4635 tree ptr = TREE_OPERAND (expr, 0);
4636 tree offset = TREE_OPERAND (expr, 1);
4637 svalue_id ptr_sid = get_rvalue (ptr, ctxt);
4638 svalue_id offset_sid = get_rvalue (offset, ctxt);
4639 return get_or_create_mem_ref (TREE_TYPE (expr), ptr_sid,
4640 offset_sid, ctxt);
4641 }
4642 break;
4643
4644 case VAR_DECL:
4645 /* Handle globals. */
4646 if (is_global_var (expr))
4647 {
4648 region_id globals_rid
4649 = get_root_region ()->ensure_globals_region (this);
4650 map_region *globals = get_region<map_region> (globals_rid);
4651 region_id var_rid = globals->get_or_create (this, globals_rid, expr,
4652 TREE_TYPE (expr));
4653 return var_rid;
4654 }
4655
4656 /* Fall through. */
4657
4658 case SSA_NAME:
4659 case PARM_DECL:
4660 case RESULT_DECL:
4661 {
4662 gcc_assert (TREE_CODE (expr) == SSA_NAME
4663 || TREE_CODE (expr) == PARM_DECL
4664 || TREE_CODE (expr) == VAR_DECL
4665 || TREE_CODE (expr) == RESULT_DECL);
4666
4667 int stack_depth = pv.m_stack_depth;
4668 stack_region *stack = get_root_region ()->get_stack_region (this);
4669 gcc_assert (stack);
4670 region_id frame_rid = stack->get_frame_rid (stack_depth);
4671 frame_region *frame = get_region <frame_region> (frame_rid);
4672 gcc_assert (frame);
4673 region_id child_rid = frame->get_or_create (this, frame_rid, expr,
4674 TREE_TYPE (expr));
4675 return child_rid;
4676 }
4677
4678 case COMPONENT_REF:
4679 {
4680 /* obj.field */
4681 tree obj = TREE_OPERAND (expr, 0);
4682 tree field = TREE_OPERAND (expr, 1);
4683 region_id obj_rid = get_lvalue (obj, ctxt);
4684 region_id struct_or_union_rid
4685 = get_or_create_view (obj_rid, TREE_TYPE (obj));
4686 return get_field_region (struct_or_union_rid, field);
4687 }
4688 break;
4689
4690 case STRING_CST:
4691 {
4692 tree cst_type = TREE_TYPE (expr);
4693 array_region *cst_region = new array_region (m_root_rid, cst_type);
4694 region_id cst_rid = add_region (cst_region);
4695 svalue_id cst_sid = get_or_create_constant_svalue (expr);
4696 cst_region->set_value (*this, cst_rid, cst_sid, ctxt);
4697 return cst_rid;
4698 }
4699 break;
4700 }
4701}
4702
4703/* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
4704
4705#define ASSERT_COMPAT_TYPES(SRC_TYPE, DST_TYPE) \
4706 gcc_checking_assert (useless_type_conversion_p ((SRC_TYPE), (DST_TYPE)))
4707
4708/* Get the id of the region for PV within this region_model,
4709 emitting any diagnostics to CTXT. */
4710
4711region_id
4712region_model::get_lvalue (path_var pv, region_model_context *ctxt)
4713{
4714 if (pv.m_tree == NULL_TREE)
4715 return region_id::null ();
4716
4717 region_id result_rid = get_lvalue_1 (pv, ctxt);
4718 ASSERT_COMPAT_TYPES (get_region (result_rid)->get_type (),
4719 TREE_TYPE (pv.m_tree));
4720 return result_rid;
4721}
4722
4723/* Get the region_id for EXPR within this region_model (assuming the most
4724 recent stack frame if it's a local). */
4725
4726region_id
4727region_model::get_lvalue (tree expr, region_model_context *ctxt)
4728{
4729 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
4730}
4731
4732/* Implementation of region_model::get_rvalue; the latter adds type-checking.
4733
4734 Get the value of PV within this region_model,
4735 emitting any diagnostics to CTXT. */
4736
4737svalue_id
4738region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt)
4739{
4740 gcc_assert (pv.m_tree);
4741
4742 switch (TREE_CODE (pv.m_tree))
4743 {
4744 default:
4745 {
4746 svalue *unknown_sval = new unknown_svalue (TREE_TYPE (pv.m_tree));
4747 return add_svalue (unknown_sval);
4748 }
4749 break;
4750
4751 case ADDR_EXPR:
4752 {
4753 /* "&EXPR". */
4754 tree expr = pv.m_tree;
4755 tree op0 = TREE_OPERAND (expr, 0);
4756 if (TREE_CODE (op0) == FUNCTION_DECL)
4757 return get_svalue_for_fndecl (TREE_TYPE (expr), op0);
4758 else if (TREE_CODE (op0) == LABEL_DECL)
4759 return get_svalue_for_label (TREE_TYPE (expr), op0);
4760 region_id expr_rid = get_lvalue (op0, ctxt);
4761 return get_or_create_ptr_svalue (TREE_TYPE (expr), expr_rid);
4762 }
4763 break;
4764
4765 case ARRAY_REF:
4766 {
4767 region_id element_rid = get_lvalue (pv, ctxt);
4768 return get_region (element_rid)->get_value (*this, true, ctxt);
4769 }
4770
4771 case INTEGER_CST:
4772 case REAL_CST:
4773 case STRING_CST:
4774 return get_or_create_constant_svalue (pv.m_tree);
4775
4776 case COMPONENT_REF:
4777 case MEM_REF:
4778 case SSA_NAME:
4779 case VAR_DECL:
4780 case PARM_DECL:
4781 case RESULT_DECL:
4782 {
4783 region_id var_rid = get_lvalue (pv, ctxt);
4784 return get_region (var_rid)->get_value (*this, true, ctxt);
4785 }
4786 }
4787}
4788
4789/* Get the value of PV within this region_model,
4790 emitting any diagnostics to CTXT. */
4791
4792svalue_id
4793region_model::get_rvalue (path_var pv, region_model_context *ctxt)
4794{
4795 if (pv.m_tree == NULL_TREE)
4796 return svalue_id::null ();
4797 svalue_id result_sid = get_rvalue_1 (pv, ctxt);
4798
4799 ASSERT_COMPAT_TYPES (get_svalue (result_sid)->get_type (),
4800 TREE_TYPE (pv.m_tree));
4801
4802 return result_sid;
4803}
4804
4805/* Get the value of EXPR within this region_model (assuming the most
4806 recent stack frame if it's a local). */
4807
4808svalue_id
4809region_model::get_rvalue (tree expr, region_model_context *ctxt)
4810{
4811 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
4812}
4813
4814/* Return an svalue_id for a pointer to RID of type PTR_TYPE, reusing
4815 existing pointer values if one is available. */
4816
4817svalue_id
4818region_model::get_or_create_ptr_svalue (tree ptr_type, region_id rid)
4819{
4820 /* Reuse existing region_svalue, if one of the right type is
4821 available. */
4822 /* In theory we could stash a svalue_id in "region", but differing
4823 pointer types muddles things.
4824 For now, just do a linear search through all existing svalues. */
4825 int i;
4826 svalue *svalue;
4827 FOR_EACH_VEC_ELT (m_svalues, i, svalue)
4828 if (region_svalue *ptr_svalue = svalue->dyn_cast_region_svalue ())
4829 if (ptr_svalue->get_pointee () == rid
4830 && ptr_svalue->get_type () == ptr_type)
4831 return svalue_id::from_int (i);
4832
4833 return add_svalue (new region_svalue (ptr_type, rid));
4834}
4835
4836/* Return an svalue_id for a constant_svalue for CST_EXPR,
4837 creating the constant_svalue if necessary.
4838 The constant_svalue instances are reused, based on pointer equality
4839 of trees */
4840
4841svalue_id
4842region_model::get_or_create_constant_svalue (tree cst_expr)
4843{
4844 gcc_assert (cst_expr);
4845
4846 /* Reuse one if it already exists. */
4847 // TODO: maybe store a map, rather than do linear search?
4848 int i;
4849 svalue *svalue;
4850 FOR_EACH_VEC_ELT (m_svalues, i, svalue)
4851 if (svalue->maybe_get_constant () == cst_expr)
4852 return svalue_id::from_int (i);
4853
4854 svalue_id cst_sid = add_svalue (new constant_svalue (cst_expr));
4855 return cst_sid;
4856}
4857
4858/* Return an svalue_id for a region_svalue for FNDECL,
4859 creating the function_region if necessary. */
4860
4861svalue_id
4862region_model::get_svalue_for_fndecl (tree ptr_type, tree fndecl)
4863{
4864 gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL);
4865 region_id function_rid = get_region_for_fndecl (fndecl);
4866 return get_or_create_ptr_svalue (ptr_type, function_rid);
4867}
4868
4869/* Return a region_id for a function_region for FNDECL,
4870 creating it if necessary. */
4871
4872region_id
4873region_model::get_region_for_fndecl (tree fndecl)
4874{
4875 gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL);
4876
4877 region_id code_rid = get_root_region ()->ensure_code_region (this);
4878 code_region *code = get_root_region ()->get_code_region (this);
4879
4880 return code->get_or_create (this, code_rid, fndecl, TREE_TYPE (fndecl));
4881}
4882
4883/* Return an svalue_id for a region_svalue for LABEL,
4884 creating the label_region if necessary. */
4885
4886svalue_id
4887region_model::get_svalue_for_label (tree ptr_type, tree label)
4888{
4889 gcc_assert (TREE_CODE (label) == LABEL_DECL);
4890 region_id label_rid = get_region_for_label (label);
4891 return get_or_create_ptr_svalue (ptr_type, label_rid);
4892}
4893
4894/* Return a region_id for a label_region for LABEL,
4895 creating it if necessary. */
4896
4897region_id
4898region_model::get_region_for_label (tree label)
4899{
4900 gcc_assert (TREE_CODE (label) == LABEL_DECL);
4901
4902 tree fndecl = DECL_CONTEXT (label);
4903 gcc_assert (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL);
4904
4905 region_id func_rid = get_region_for_fndecl (fndecl);
4906 function_region *func_reg = get_region <function_region> (func_rid);
4907 return func_reg->get_or_create (this, func_rid, label, TREE_TYPE (label));
4908}
4909
4910/* Build a cast of SRC_EXPR to DST_TYPE, or return NULL_TREE.
4911
4912 Adapted from gcc::jit::playback::context::build_cast, which in turn is
4913 adapted from
4914 - c/c-typeck.c:build_c_cast
4915 - c/c-convert.c: convert
4916 - convert.h
4917 Only some kinds of cast are currently supported here. */
4918
4919static tree
4920build_cast (tree dst_type, tree src_expr)
4921{
4922 tree result = targetm.convert_to_type (dst_type, src_expr);
4923 if (result)
4924 return result;
4925 enum tree_code dst_code = TREE_CODE (dst_type);
4926 switch (dst_code)
4927 {
4928 case INTEGER_TYPE:
4929 case ENUMERAL_TYPE:
4930 result = convert_to_integer (dst_type, src_expr);
4931 goto maybe_fold;
4932
4933 case BOOLEAN_TYPE:
4934 /* Compare with c_objc_common_truthvalue_conversion and
4935 c_common_truthvalue_conversion. */
4936 /* For now, convert to: (src_expr != 0) */
4937 result = build2 (NE_EXPR, dst_type,
4938 src_expr,
4939 build_int_cst (TREE_TYPE (src_expr), 0));
4940 goto maybe_fold;
4941
4942 case REAL_TYPE:
4943 result = convert_to_real (dst_type, src_expr);
4944 goto maybe_fold;
4945
4946 case POINTER_TYPE:
4947 result = build1 (NOP_EXPR, dst_type, src_expr);
4948 goto maybe_fold;
4949
4950 default:
4951 return NULL_TREE;
4952
4953 maybe_fold:
4954 if (TREE_CODE (result) != C_MAYBE_CONST_EXPR)
4955 result = fold (result);
4956 return result;
4957 }
4958}
4959
4960/* If the type of SID's underlying value is DST_TYPE, return SID.
4961 Otherwise, attempt to create (or reuse) an svalue representing an access
4962 of SID as a DST_TYPE and return that value's svalue_id. */
4963
4964svalue_id
4965region_model::maybe_cast_1 (tree dst_type, svalue_id sid)
4966{
4967 svalue *sval = get_svalue (sid);
4968 tree src_type = sval->get_type ();
4969 if (src_type == dst_type)
4970 return sid;
4971
4972 if (POINTER_TYPE_P (dst_type)
4973 && POINTER_TYPE_P (src_type))
4974 {
4975 /* Pointer to region. */
4976 if (region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
4977 return get_or_create_ptr_svalue (dst_type, ptr_sval->get_pointee ());
4978
4979 /* Unknown pointer? Get or create a new unknown pointer of the
4980 correct type, preserving the equality between the pointers. */
4981 if (sval->dyn_cast_unknown_svalue ())
4982 {
4983 equiv_class &ec = m_constraints->get_equiv_class (sid);
4984
4985 /* Look for an existing pointer of the correct type within the EC. */
4986 int i;
4987 svalue_id *equiv_sid;
4988 FOR_EACH_VEC_ELT (ec.m_vars, i, equiv_sid)
4989 {
4990 svalue *equiv_val = get_svalue (*equiv_sid);
4991 if (equiv_val->get_type () == dst_type)
4992 return *equiv_sid;
4993 }
4994
4995 /* Otherwise, create a new unknown pointer of the correct type. */
4996 svalue *unknown_sval = new unknown_svalue (dst_type);
4997 svalue_id new_ptr_sid = add_svalue (unknown_sval);
4998 m_constraints->add_constraint (sid, EQ_EXPR, new_ptr_sid);
4999 return new_ptr_sid;
5000 }
5001 }
5002
5003 /* Attempt to cast constants. */
5004 if (tree src_cst = sval->maybe_get_constant ())
5005 {
5006 tree dst = build_cast (dst_type, src_cst);
5007 gcc_assert (dst != NULL_TREE);
5008 if (CONSTANT_CLASS_P (dst))
5009 return get_or_create_constant_svalue (dst);
5010 }
5011
5012 /* Otherwise, return a new unknown value. */
5013 svalue *unknown_sval = new unknown_svalue (dst_type);
5014 return add_svalue (unknown_sval);
5015}
5016
5017/* If the type of SID's underlying value is DST_TYPE, return SID.
5018 Otherwise, attempt to create (or reuse) an svalue representing an access
5019 of SID as a DST_TYPE and return that value's svalue_id.
5020
5021 If the result != SID, then call CTXT's on_cast vfunc (if CTXT is non-NULL),
5022 so that sm-state can be propagated from SID to the result. */
5023
5024svalue_id
5025region_model::maybe_cast (tree dst_type, svalue_id sid,
5026 region_model_context *ctxt)
5027{
5028 svalue_id result = maybe_cast_1 (dst_type, sid);
5029 if (result != sid)
5030 if (ctxt)
5031 {
5032 /* Notify ctxt about a cast, so any sm-state can be copied. */
5033 ctxt->on_cast (sid, result);
5034 }
5035 return result;
5036}
5037
5038/* Ensure that the region for OBJ_RID has a child region for FIELD;
5039 return the child region's region_id. */
5040
5041region_id
5042region_model::get_field_region (region_id struct_or_union_rid, tree field)
5043{
5044 struct_or_union_region *sou_reg
5045 = get_region<struct_or_union_region> (struct_or_union_rid);
5046
5047 /* Inherit constness from parent type. */
5048 const int qual_mask = TYPE_QUAL_CONST;
5049 int sou_quals = TYPE_QUALS (sou_reg->get_type ()) & qual_mask;
5050 tree field_type = TREE_TYPE (field);
5051 tree field_type_with_quals = build_qualified_type (field_type, sou_quals);
5052
5053 // TODO: maybe convert to a vfunc?
5054 if (sou_reg->get_kind () == RK_UNION)
5055 {
5056 /* Union.
5057 Get a view of the union as a whole, with the type of the field. */
5058 region_id view_rid
5059 = get_or_create_view (struct_or_union_rid, field_type_with_quals);
5060 return view_rid;
5061 }
5062 else
5063 {
5064 /* Struct. */
5065 region_id child_rid
5066 = sou_reg->get_or_create (this, struct_or_union_rid, field,
5067 field_type_with_quals);
5068 return child_rid;
5069 }
5070}
5071
5072/* Get a region_id for referencing PTR_SID, creating a region if need be, and
5073 potentially generating warnings via CTXT. */
5074
5075region_id
5076region_model::deref_rvalue (svalue_id ptr_sid, region_model_context *ctxt)
5077{
5078 gcc_assert (!ptr_sid.null_p ());
5079 svalue *ptr_svalue = get_svalue (ptr_sid);
5080 gcc_assert (ptr_svalue);
5081
5082 switch (ptr_svalue->get_kind ())
5083 {
5084 case SK_REGION:
5085 {
5086 region_svalue *region_sval = as_a <region_svalue *> (ptr_svalue);
5087 return region_sval->get_pointee ();
5088 }
5089
5090 case SK_CONSTANT:
5091 goto create_symbolic_region;
5092
5093 case SK_POISONED:
5094 {
5095 if (ctxt)
5096 if (tree ptr = get_representative_tree (ptr_sid))
5097 {
5098 poisoned_svalue *poisoned_sval
5099 = as_a <poisoned_svalue *> (ptr_svalue);
5100 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
5101 ctxt->warn (new poisoned_value_diagnostic (ptr, pkind));
5102 }
5103 goto create_symbolic_region;
5104 }
5105
5106 case SK_UNKNOWN:
5107 {
5108 create_symbolic_region:
5109 /* We need a symbolic_region to represent this unknown region.
5110 We don't know if it on the heap, stack, or a global,
5111 so use the root region as parent. */
5112 region_id new_rid
5113 = add_region (new symbolic_region (m_root_rid, false));
5114
5115 /* We need to write the region back into the pointer,
5116 or we'll get a new, different region each time.
5117 We do this by changing the meaning of ptr_sid, replacing
5118 the unknown value with the ptr to the new region.
5119 We replace the meaning of the ID rather than simply writing
5120 to PTR's lvalue since there could be several places sharing
5121 the same unknown ptr value. */
5122 svalue *ptr_val
5123 = new region_svalue (ptr_svalue->get_type (), new_rid);
5124 replace_svalue (ptr_sid, ptr_val);
5125
5126 return new_rid;
5127 }
5128
5129 case SK_SETJMP:
5130 goto create_symbolic_region;
5131 }
5132
5133 gcc_unreachable ();
5134}
5135
5136/* Get a region_id for referencing PTR, creating a region if need be, and
5137 potentially generating warnings via CTXT. */
5138
5139region_id
5140region_model::deref_rvalue (tree ptr, region_model_context *ctxt)
5141{
5142 svalue_id ptr_sid = get_rvalue (ptr, ctxt);
5143 return deref_rvalue (ptr_sid, ctxt);
5144}
5145
5146/* Set the value of the region given by LHS_RID to the value given
5147 by RHS_SID. */
5148
5149void
5150region_model::set_value (region_id lhs_rid, svalue_id rhs_sid,
5151 region_model_context *ctxt)
5152{
5153 gcc_assert (!lhs_rid.null_p ());
5154 gcc_assert (!rhs_sid.null_p ());
5155 get_region (lhs_rid)->set_value (*this, lhs_rid, rhs_sid, ctxt);
5156}
5157
5158/* Determine what is known about the condition "LHS_SID OP RHS_SID" within
5159 this model. */
5160
5161tristate
5162region_model::eval_condition (svalue_id lhs_sid,
5163 enum tree_code op,
5164 svalue_id rhs_sid) const
5165{
5166 tristate ts = eval_condition_without_cm (lhs_sid, op, rhs_sid);
5167
5168 if (ts.is_known ())
5169 return ts;
5170
5171 /* Otherwise, try constraints. */
5172 return m_constraints->eval_condition (lhs_sid, op, rhs_sid);
5173}
5174
5175/* Determine what is known about the condition "LHS_SID OP RHS_SID" within
5176 this model, without resorting to the constraint_manager.
5177
5178 This is exposed so that impl_region_model_context::on_state_leak can
5179 check for equality part-way through region_model::purge_unused_svalues
5180 without risking creating new ECs. */
5181
5182tristate
5183region_model::eval_condition_without_cm (svalue_id lhs_sid,
5184 enum tree_code op,
5185 svalue_id rhs_sid) const
5186{
5187 svalue *lhs = get_svalue (lhs_sid);
5188 svalue *rhs = get_svalue (rhs_sid);
5189 gcc_assert (lhs);
5190 gcc_assert (rhs);
5191
5192 /* See what we know based on the values. */
5193 if (lhs && rhs)
5194 {
5195 if (lhs == rhs)
5196 {
07c86323
DM
5197 /* If we have the same svalue, then we have equality
5198 (apart from NaN-handling).
757bf1df
DM
5199 TODO: should this definitely be the case for poisoned values? */
5200 switch (op)
5201 {
757bf1df
DM
5202 case EQ_EXPR:
5203 case GE_EXPR:
5204 case LE_EXPR:
5205 return tristate::TS_TRUE;
5206
5207 case NE_EXPR:
5208 case GT_EXPR:
5209 case LT_EXPR:
5210 return tristate::TS_FALSE;
07c86323
DM
5211
5212 default:
5213 /* For other ops, use the logic below. */
5214 break;
757bf1df
DM
5215 }
5216 }
5217
5218 /* If we have a pair of region_svalues, compare them. */
5219 if (region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
5220 if (region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
5221 {
5222 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
5223 if (res.is_known ())
5224 return res;
5225 /* Otherwise, only known through constraints. */
5226 }
5227
5228 /* If we have a pair of constants, compare them. */
5229 if (constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
5230 if (constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
5231 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
5232
5233 /* Handle comparison of a region_svalue against zero. */
5234 if (region_svalue *ptr = lhs->dyn_cast_region_svalue ())
5235 if (constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
5236 if (zerop (cst_rhs->get_constant ()))
5237 {
5238 /* A region_svalue is a non-NULL pointer, except in certain
5239 special cases (see the comment for region::non_null_p. */
5240 region *pointee = get_region (ptr->get_pointee ());
5241 if (pointee->non_null_p (*this))
5242 {
5243 switch (op)
5244 {
5245 default:
5246 gcc_unreachable ();
5247
5248 case EQ_EXPR:
5249 case GE_EXPR:
5250 case LE_EXPR:
5251 return tristate::TS_FALSE;
5252
5253 case NE_EXPR:
5254 case GT_EXPR:
5255 case LT_EXPR:
5256 return tristate::TS_TRUE;
5257 }
5258 }
5259 }
5260 }
5261
5262 return tristate::TS_UNKNOWN;
5263}
5264
5265/* Attempt to add the constraint "LHS OP RHS" to this region_model.
5266 If it is consistent with existing constraints, add it, and return true.
5267 Return false if it contradicts existing constraints.
5268 Use CTXT for reporting any diagnostics associated with the accesses. */
5269
5270bool
5271region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
5272 region_model_context *ctxt)
5273{
5274 svalue_id lhs_sid = get_rvalue (lhs, ctxt);
5275 svalue_id rhs_sid = get_rvalue (rhs, ctxt);
5276
5277 tristate t_cond = eval_condition (lhs_sid, op, rhs_sid);
5278
5279 /* If we already have the condition, do nothing. */
5280 if (t_cond.is_true ())
5281 return true;
5282
5283 /* Reject a constraint that would contradict existing knowledge, as
5284 unsatisfiable. */
5285 if (t_cond.is_false ())
5286 return false;
5287
5288 /* Store the constraint. */
5289 m_constraints->add_constraint (lhs_sid, op, rhs_sid);
5290
5291 add_any_constraints_from_ssa_def_stmt (lhs, op, rhs, ctxt);
5292
5293 /* Notify the context, if any. This exists so that the state machines
5294 in a program_state can be notified about the condition, and so can
5295 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
5296 when synthesizing constraints as above. */
5297 if (ctxt)
5298 ctxt->on_condition (lhs, op, rhs);
5299
5300 return true;
5301}
5302
5303/* Subroutine of region_model::add_constraint for handling optimized
5304 && and || conditionals.
5305
5306 If we have an SSA_NAME for a boolean compared against 0,
5307 look at anything implied by the def stmt and call add_constraint
5308 for it (which could recurse).
5309
5310 For example, if we have
5311 _1 = p_6 == 0B;
5312 _2 = p_8 == 0B
5313 _3 = _1 | _2
5314 and add the constraint
5315 (_3 == 0),
5316 then the def stmt for _3 implies that _1 and _2 are both false,
5317 and hence we can add the constraints:
5318 p_6 != 0B
5319 p_8 != 0B. */
5320
5321void
5322region_model::add_any_constraints_from_ssa_def_stmt (tree lhs,
5323 enum tree_code op,
5324 tree rhs,
5325 region_model_context *ctxt)
5326{
5327 if (TREE_CODE (lhs) != SSA_NAME)
5328 return;
5329
5330 if (rhs != boolean_false_node)
5331 return;
5332
5333 if (op != NE_EXPR && op != EQ_EXPR)
5334 return;
5335
5336 /* We have either
5337 - "LHS != false" (i.e. LHS is true), or
5338 - "LHS == false" (i.e. LHS is false). */
5339 bool is_true = op == NE_EXPR;
5340
5341 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
5342 gassign *assign = dyn_cast<gassign *> (def_stmt);
5343 if (!assign)
5344 return;
5345
5346 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
5347
5348 switch (rhs_code)
5349 {
5350 default:
5351 break;
5352 case BIT_AND_EXPR:
5353 {
5354 if (is_true)
5355 {
5356 /* ...and "LHS == (rhs1 & rhs2) i.e. "(rhs1 & rhs2)" is true
5357 then both rhs1 and rhs2 must be true. */
5358 tree rhs1 = gimple_assign_rhs1 (assign);
5359 tree rhs2 = gimple_assign_rhs2 (assign);
5360 add_constraint (rhs1, NE_EXPR, boolean_false_node, ctxt);
5361 add_constraint (rhs2, NE_EXPR, boolean_false_node, ctxt);
5362 }
5363 }
5364 break;
5365
5366 case BIT_IOR_EXPR:
5367 {
5368 if (!is_true)
5369 {
5370 /* ...and "LHS == (rhs1 | rhs2)
5371 i.e. "(rhs1 | rhs2)" is false
5372 then both rhs1 and rhs2 must be false. */
5373 tree rhs1 = gimple_assign_rhs1 (assign);
5374 tree rhs2 = gimple_assign_rhs2 (assign);
5375 add_constraint (rhs1, EQ_EXPR, boolean_false_node, ctxt);
5376 add_constraint (rhs2, EQ_EXPR, boolean_false_node, ctxt);
5377 }
5378 }
5379 break;
5380
5381 case EQ_EXPR:
5382 case NE_EXPR:
5383 {
5384 /* ...and "LHS == (rhs1 OP rhs2)"
5385 then rhs1 OP rhs2 must have the same logical value as LHS. */
5386 tree rhs1 = gimple_assign_rhs1 (assign);
5387 tree rhs2 = gimple_assign_rhs2 (assign);
5388 if (!is_true)
5389 rhs_code
5390 = invert_tree_comparison (rhs_code, false /* honor_nans */);
5391 add_constraint (rhs1, rhs_code, rhs2, ctxt);
5392 }
5393 break;
5394 }
5395}
5396
5397/* Determine what is known about the condition "LHS OP RHS" within
5398 this model.
5399 Use CTXT for reporting any diagnostics associated with the accesses. */
5400
5401tristate
5402region_model::eval_condition (tree lhs,
5403 enum tree_code op,
5404 tree rhs,
5405 region_model_context *ctxt)
5406{
5407 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
5408}
5409
5410/* If SID is a constant value, return the underlying tree constant.
5411 Otherwise, return NULL_TREE. */
5412
5413tree
5414region_model::maybe_get_constant (svalue_id sid) const
5415{
5416 gcc_assert (!sid.null_p ());
5417 svalue *sval = get_svalue (sid);
5418 return sval->maybe_get_constant ();
5419}
5420
5421/* Create a new child region of the heap (creating the heap region if
5422 necessary).
5423 Return the region_id of the new child region. */
5424
5425region_id
5426region_model::add_new_malloc_region ()
5427{
5428 region_id heap_rid
5429 = get_root_region ()->ensure_heap_region (this);
5430 return add_region (new symbolic_region (heap_rid, true));
5431}
5432
5433/* Attempt to return a tree that represents SID, or return NULL_TREE.
5434 Find the first region that stores the value (e.g. a local) and
5435 generate a representative tree for it. */
5436
5437tree
5438region_model::get_representative_tree (svalue_id sid) const
5439{
5440 if (sid.null_p ())
5441 return NULL_TREE;
5442
5443 unsigned i;
5444 region *region;
5445 FOR_EACH_VEC_ELT (m_regions, i, region)
5446 if (sid == region->get_value_direct ())
5447 {
5448 path_var pv = get_representative_path_var (region_id::from_int (i));
5449 if (pv.m_tree)
5450 return pv.m_tree;
5451 }
5452
5453 return maybe_get_constant (sid);
5454}
5455
5456/* Attempt to return a path_var that represents the region, or return
5457 the NULL path_var.
5458 For example, a region for a field of a local would be a path_var
5459 wrapping a COMPONENT_REF. */
5460
5461path_var
5462region_model::get_representative_path_var (region_id rid) const
5463{
5464 region *reg = get_region (rid);
5465 region *parent_region = get_region (reg->get_parent ());
5466 region_id stack_rid = get_stack_region_id ();
5467 if (!stack_rid.null_p ())
5468 if (parent_region->get_parent () == stack_rid)
5469 {
5470 frame_region *parent_frame = (frame_region *)parent_region;
5471 tree t = parent_frame->get_tree_for_child_region (rid);
5472 return path_var (t, parent_frame->get_depth ());
5473 }
5474 if (reg->get_parent () == get_globals_region_id ())
5475 {
5476 map_region *globals = get_root_region ()->get_globals_region (this);
5477 if (globals)
5478 return path_var (globals->get_tree_for_child_region (rid), -1);
5479 }
5480
5481 /* Handle e.g. fields of a local by recursing. */
5482 region_id parent_rid = reg->get_parent ();
5483 region *parent_reg = get_region (parent_rid);
5484 if (parent_reg)
5485 {
5486 if (parent_reg->get_kind () == RK_STRUCT)
5487 {
5488 map_region *parent_map_region = (map_region *)parent_reg;
5489 /* This can fail if we have a view, rather than a field. */
5490 if (tree child_key
5491 = parent_map_region->get_tree_for_child_region (rid))
5492 {
5493 path_var parent_pv = get_representative_path_var (parent_rid);
5494 if (parent_pv.m_tree && TREE_CODE (child_key) == FIELD_DECL)
5495 return path_var (build3 (COMPONENT_REF,
5496 TREE_TYPE (child_key),
5497 parent_pv.m_tree, child_key,
5498 NULL_TREE),
5499 parent_pv.m_stack_depth);
5500 }
5501 }
5502 }
5503
5504 return path_var (NULL_TREE, 0);
5505}
5506
5507/* Locate all regions that directly have value SID and append representative
5508 path_var instances for them into *OUT. */
5509
5510void
5511region_model::get_path_vars_for_svalue (svalue_id sid, vec<path_var> *out) const
5512{
5513 unsigned i;
5514 region *region;
5515 FOR_EACH_VEC_ELT (m_regions, i, region)
5516 if (sid == region->get_value_direct ())
5517 {
5518 path_var pv = get_representative_path_var (region_id::from_int (i));
5519 if (pv.m_tree)
5520 out->safe_push (pv);
5521 }
5522}
5523
5524/* Set DST_RID value to be a new unknown value of type TYPE. */
5525
5526svalue_id
5527region_model::set_to_new_unknown_value (region_id dst_rid, tree type,
5528 region_model_context *ctxt)
5529{
5530 gcc_assert (!dst_rid.null_p ());
5531 svalue_id new_sid = add_svalue (new unknown_svalue (type));
5532 set_value (dst_rid, new_sid, ctxt);
5533
5534 // TODO: presumably purge all child regions too (but do this in set_value?)
5535
5536 return new_sid;
5537}
5538
5539/* Update this model for any phis in SNODE, assuming we came from
5540 LAST_CFG_SUPEREDGE. */
5541
5542void
5543region_model::update_for_phis (const supernode *snode,
5544 const cfg_superedge *last_cfg_superedge,
5545 region_model_context *ctxt)
5546{
5547 gcc_assert (last_cfg_superedge);
5548
5549 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
5550 !gsi_end_p (gpi); gsi_next (&gpi))
5551 {
5552 gphi *phi = gpi.phi ();
5553
5554 tree src = last_cfg_superedge->get_phi_arg (phi);
5555 tree lhs = gimple_phi_result (phi);
5556
5557 /* Update next_state based on phi. */
5558 bool is_back_edge = last_cfg_superedge->back_edge_p ();
5559 handle_phi (lhs, src, is_back_edge, ctxt);
5560 }
5561}
5562
5563/* Attempt to update this model for taking EDGE (where the last statement
5564 was LAST_STMT), returning true if the edge can be taken, false
5565 otherwise.
5566
5567 For CFG superedges where LAST_STMT is a conditional or a switch
5568 statement, attempt to add the relevant conditions for EDGE to this
5569 model, returning true if they are feasible, or false if they are
5570 impossible.
5571
5572 For call superedges, push frame information and store arguments
5573 into parameters.
5574
5575 For return superedges, pop frame information and store return
5576 values into any lhs.
5577
5578 Rejection of call/return superedges happens elsewhere, in
5579 program_point::on_edge (i.e. based on program point, rather
5580 than program state). */
5581
5582bool
5583region_model::maybe_update_for_edge (const superedge &edge,
5584 const gimple *last_stmt,
5585 region_model_context *ctxt)
5586{
5587 /* Handle frame updates for interprocedural edges. */
5588 switch (edge.m_kind)
5589 {
5590 default:
5591 break;
5592
5593 case SUPEREDGE_CALL:
5594 {
5595 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
5596 update_for_call_superedge (*call_edge, ctxt);
5597 }
5598 break;
5599
5600 case SUPEREDGE_RETURN:
5601 {
5602 const return_superedge *return_edge
5603 = as_a <const return_superedge *> (&edge);
5604 update_for_return_superedge (*return_edge, ctxt);
5605 }
5606 break;
5607
5608 case SUPEREDGE_INTRAPROCEDURAL_CALL:
5609 {
5610 const callgraph_superedge *cg_sedge
5611 = as_a <const callgraph_superedge *> (&edge);
5612 update_for_call_summary (*cg_sedge, ctxt);
5613 }
5614 break;
5615 }
5616
5617 if (last_stmt == NULL)
5618 return true;
5619
5620 /* Apply any constraints for conditionals/switch statements. */
5621
5622 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
5623 {
5624 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
5625 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt);
5626 }
5627
5628 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
5629 {
5630 const switch_cfg_superedge *switch_sedge
5631 = as_a <const switch_cfg_superedge *> (&edge);
5632 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt, ctxt);
5633 }
5634
5635 return true;
5636}
5637
5638/* Push a new frame_region on to the stack region.
5639 Populate the frame_region with child regions for the function call's
5640 parameters, using values from the arguments at the callsite in the
5641 caller's frame. */
5642
5643void
5644region_model::update_for_call_superedge (const call_superedge &call_edge,
5645 region_model_context *ctxt)
5646{
5647 /* Build a vec of argument svalue_id, using the current top
5648 frame for resolving tree expressions. */
5649 const gcall *call_stmt = call_edge.get_call_stmt ();
5650 auto_vec<svalue_id> arg_sids (gimple_call_num_args (call_stmt));
5651
5652 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
5653 {
5654 tree arg = gimple_call_arg (call_stmt, i);
5655 arg_sids.quick_push (get_rvalue (arg, ctxt));
5656 }
5657
5658 push_frame (call_edge.get_callee_function (), &arg_sids, ctxt);
5659}
5660
5661/* Pop the top-most frame_region from the stack, and store the svalue
5662 for any returned value into the region for the lvalue of the LHS of
5663 the call (if any). */
5664
5665void
5666region_model::update_for_return_superedge (const return_superedge &return_edge,
5667 region_model_context *ctxt)
5668{
5669 purge_stats stats;
5670 svalue_id result_sid = pop_frame (true, &stats, ctxt);
5671 // TODO: do something with the stats?
5672
5673 /* Set the result of the call, within the caller frame. */
5674 const gcall *call_stmt = return_edge.get_call_stmt ();
5675 tree lhs = gimple_call_lhs (call_stmt);
5676 if (lhs)
5677 set_value (get_lvalue (lhs, ctxt), result_sid, ctxt);
5678 else if (!result_sid.null_p ())
5679 {
5680 /* This could be a leak; try purging again, but this time,
5681 don't special-case the result_sid. */
5682 purge_stats stats;
5683 purge_unused_svalues (&stats, ctxt);
5684 }
5685}
5686
5687/* Update this region_model with a summary of the effect of calling
5688 and returning from CG_SEDGE.
5689
5690 TODO: Currently this is extremely simplistic: we merely set the
5691 return value to "unknown". A proper implementation would e.g. update
5692 sm-state, and presumably be reworked to support multiple outcomes. */
5693
5694void
5695region_model::update_for_call_summary (const callgraph_superedge &cg_sedge,
5696 region_model_context *ctxt)
5697{
5698 /* For now, set any return value to "unknown". */
5699 const gcall *call_stmt = cg_sedge.get_call_stmt ();
5700 tree lhs = gimple_call_lhs (call_stmt);
5701 if (lhs)
5702 set_to_new_unknown_value (get_lvalue (lhs, ctxt), TREE_TYPE (lhs), ctxt);
5703
5704 // TODO: actually implement some kind of summary here
5705}
5706
5707/* Given a true or false edge guarded by conditional statement COND_STMT,
5708 determine appropriate constraints for the edge to be taken.
5709
5710 If they are feasible, add the constraints and return true.
5711
5712 Return false if the constraints contradict existing knowledge
5713 (and so the edge should not be taken). */
5714
5715bool
5716region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
5717 const gcond *cond_stmt,
5718 region_model_context *ctxt)
5719{
5720 ::edge cfg_edge = sedge.get_cfg_edge ();
5721 gcc_assert (cfg_edge != NULL);
5722 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
5723
5724 enum tree_code op = gimple_cond_code (cond_stmt);
5725 tree lhs = gimple_cond_lhs (cond_stmt);
5726 tree rhs = gimple_cond_rhs (cond_stmt);
5727 if (cfg_edge->flags & EDGE_FALSE_VALUE)
5728 op = invert_tree_comparison (op, false /* honor_nans */);
5729 return add_constraint (lhs, op, rhs, ctxt);
5730}
5731
5732/* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
5733 for the edge to be taken.
5734
5735 If they are feasible, add the constraints and return true.
5736
5737 Return false if the constraints contradict existing knowledge
5738 (and so the edge should not be taken). */
5739
5740bool
5741region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
5742 const gswitch *switch_stmt,
5743 region_model_context *ctxt)
5744{
5745 tree index = gimple_switch_index (switch_stmt);
5746 tree case_label = edge.get_case_label ();
5747 gcc_assert (TREE_CODE (case_label) == CASE_LABEL_EXPR);
5748 tree lower_bound = CASE_LOW (case_label);
5749 tree upper_bound = CASE_HIGH (case_label);
5750 if (lower_bound)
5751 {
5752 if (upper_bound)
5753 {
5754 /* Range. */
5755 if (!add_constraint (index, GE_EXPR, lower_bound, ctxt))
5756 return false;
5757 return add_constraint (index, LE_EXPR, upper_bound, ctxt);
5758 }
5759 else
5760 /* Single-value. */
5761 return add_constraint (index, EQ_EXPR, lower_bound, ctxt);
5762 }
5763 else
5764 {
5765 /* The default case.
5766 Add exclusions based on the other cases. */
5767 for (unsigned other_idx = 1;
5768 other_idx < gimple_switch_num_labels (switch_stmt);
5769 other_idx++)
5770 {
5771 tree other_label = gimple_switch_label (switch_stmt,
5772 other_idx);
5773 tree other_lower_bound = CASE_LOW (other_label);
5774 tree other_upper_bound = CASE_HIGH (other_label);
5775 gcc_assert (other_lower_bound);
5776 if (other_upper_bound)
5777 {
5778 /* Exclude this range-valued case.
5779 For now, we just exclude the boundary values.
5780 TODO: exclude the values within the region. */
5781 if (!add_constraint (index, NE_EXPR, other_lower_bound, ctxt))
5782 return false;
5783 if (!add_constraint (index, NE_EXPR, other_upper_bound, ctxt))
5784 return false;
5785 }
5786 else
5787 /* Exclude this single-valued case. */
5788 if (!add_constraint (index, NE_EXPR, other_lower_bound, ctxt))
5789 return false;
5790 }
5791 return true;
5792 }
5793}
5794
5795/* Get the root_region within this model (guaranteed to be non-null). */
5796
5797root_region *
5798region_model::get_root_region () const
5799{
5800 return get_region<root_region> (m_root_rid);
5801}
5802
5803/* Get the region_id of this model's stack region (if any). */
5804
5805region_id
5806region_model::get_stack_region_id () const
5807{
5808 return get_root_region ()->get_stack_region_id ();
5809}
5810
5811/* Create a new frame_region for a call to FUN and push it onto
5812 the stack.
5813
5814 If ARG_SIDS is non-NULL, use it to populate the parameters
5815 in the new frame.
5816 Otherwise, populate them with unknown values.
5817
5818 Return the region_id of the new frame_region. */
5819
5820region_id
5821region_model::push_frame (function *fun, vec<svalue_id> *arg_sids,
5822 region_model_context *ctxt)
5823{
5824 return get_root_region ()->push_frame (this, fun, arg_sids, ctxt);
5825}
5826
5827/* Get the region_id of the top-most frame in this region_model's stack,
5828 if any. */
5829
5830region_id
5831region_model::get_current_frame_id () const
5832{
5833 return get_root_region ()->get_current_frame_id (*this);
5834}
5835
5836/* Get the function of the top-most frame in this region_model's stack.
5837 There must be such a frame. */
5838
5839function *
5840region_model::get_current_function () const
5841{
5842 region_id frame_id = get_current_frame_id ();
5843 frame_region *frame = get_region<frame_region> (frame_id);
5844 return frame->get_function ();
5845}
5846
5847/* Pop the topmost frame_region from this region_model's stack;
5848 see the comment for stack_region::pop_frame. */
5849
5850svalue_id
5851region_model::pop_frame (bool purge, purge_stats *out,
5852 region_model_context *ctxt)
5853{
5854 return get_root_region ()->pop_frame (this, purge, out, ctxt);
5855}
5856
5857/* Get the number of frames in this region_model's stack. */
5858
5859int
5860region_model::get_stack_depth () const
5861{
5862 stack_region *stack = get_root_region ()->get_stack_region (this);
5863 if (stack)
5864 return stack->get_num_frames ();
5865 else
5866 return 0;
5867}
5868
5869/* Get the function * at DEPTH within the call stack. */
5870
5871function *
5872region_model::get_function_at_depth (unsigned depth) const
5873{
5874 stack_region *stack = get_root_region ()->get_stack_region (this);
5875 gcc_assert (stack);
5876 region_id frame_rid = stack->get_frame_rid (depth);
5877 frame_region *frame = get_region <frame_region> (frame_rid);
5878 return frame->get_function ();
5879}
5880
5881/* Get the region_id of this model's globals region (if any). */
5882
5883region_id
5884region_model::get_globals_region_id () const
5885{
5886 return get_root_region ()->get_globals_region_id ();
5887}
5888
5889/* Add SVAL to this model, taking ownership, and returning its new
5890 svalue_id. */
5891
5892svalue_id
5893region_model::add_svalue (svalue *sval)
5894{
5895 gcc_assert (sval);
5896 m_svalues.safe_push (sval);
5897 return svalue_id::from_int (m_svalues.length () - 1);
5898}
5899
5900/* Change the meaning of SID to be NEW_SVAL
5901 (e.g. when deferencing an unknown pointer, the pointer
5902 becomes a pointer to a symbolic region, so that all users
5903 of the former unknown pointer are now effectively pointing
5904 at the same region). */
5905
5906void
5907region_model::replace_svalue (svalue_id sid, svalue *new_sval)
5908{
5909 gcc_assert (!sid.null_p ());
5910 int idx = sid.as_int ();
5911
5912 gcc_assert (m_svalues[idx]);
5913 gcc_assert (m_svalues[idx]->get_type () == new_sval->get_type ());
5914 delete m_svalues[idx];
5915
5916 m_svalues[idx] = new_sval;
5917}
5918
5919/* Add region R to this model, taking ownership, and returning its new
5920 region_id. */
5921
5922region_id
5923region_model::add_region (region *r)
5924{
5925 gcc_assert (r);
5926 m_regions.safe_push (r);
5927 return region_id::from_int (m_regions.length () - 1);
5928}
5929
5930/* Return the svalue with id SVAL_ID, or NULL for a null id. */
5931
5932svalue *
5933region_model::get_svalue (svalue_id sval_id) const
5934{
5935 if (sval_id.null_p ())
5936 return NULL;
5937 return m_svalues[sval_id.as_int ()];
5938}
5939
5940/* Return the region with id RID, or NULL for a null id. */
5941
5942region *
5943region_model::get_region (region_id rid) const
5944{
5945 if (rid.null_p ())
5946 return NULL;
5947 return m_regions[rid.as_int ()];
5948}
5949
5950/* Make a region of an appropriate subclass for TYPE,
5951 with parent PARENT_RID. */
5952
5953static region *
5954make_region_for_type (region_id parent_rid, tree type)
5955{
5956 gcc_assert (TYPE_P (type));
5957
5958 if (INTEGRAL_TYPE_P (type)
5959 || SCALAR_FLOAT_TYPE_P (type)
5960 || POINTER_TYPE_P (type)
5961 || TREE_CODE (type) == COMPLEX_TYPE)
5962 return new primitive_region (parent_rid, type);
5963
5964 if (TREE_CODE (type) == RECORD_TYPE)
5965 return new struct_region (parent_rid, type);
5966
5967 if (TREE_CODE (type) == ARRAY_TYPE)
5968 return new array_region (parent_rid, type);
5969
5970 if (TREE_CODE (type) == UNION_TYPE)
5971 return new union_region (parent_rid, type);
5972
32077b69 5973 if (FUNC_OR_METHOD_TYPE_P (type))
757bf1df
DM
5974 return new function_region (parent_rid, type);
5975
5976 /* If we have a void *, make a new symbolic region. */
5977 if (type == void_type_node)
5978 return new symbolic_region (parent_rid, false);
5979
5980 gcc_unreachable ();
5981}
5982
5983/* Add a region with type TYPE and parent PARENT_RID. */
5984
5985region_id
5986region_model::add_region_for_type (region_id parent_rid, tree type)
5987{
5988 gcc_assert (TYPE_P (type));
5989
5990 region *new_region = make_region_for_type (parent_rid, type);
5991 return add_region (new_region);
5992}
5993
5994/* Helper class for region_model::purge_unused_svalues. */
5995
5996class restrict_to_used_svalues : public purge_criteria
5997{
5998public:
5999 restrict_to_used_svalues (const auto_sbitmap &used) : m_used (used) {}
6000
6001 bool should_purge_p (svalue_id sid) const FINAL OVERRIDE
6002 {
6003 gcc_assert (!sid.null_p ());
6004 return !bitmap_bit_p (m_used, sid.as_int ());
6005 }
6006
6007private:
6008 const auto_sbitmap &m_used;
6009};
6010
6011/* Remove unused svalues from this model, accumulating stats into STATS.
6012 Unused svalues are deleted. Doing so could reorder the svalues, and
6013 thus change the meaning of svalue_ids.
6014
6015 If CTXT is non-NULL, then it is notified about svalue_id remappings,
6016 and about svalue_ids that are about to be deleted. This allows e.g.
6017 for warning about resource leaks, for the case where the svalue
6018 represents a resource handle in the user code (e.g. a FILE * or a malloc
6019 buffer).
6020
6021 Amongst other things, removing unused svalues is important for ensuring
6022 that the analysis of loops terminates. Otherwise, we could generate a
6023 succession of models with unreferenced "unknown" values, where the
6024 number of redundant unknown values could grow without bounds, and each
6025 such model would be treated as distinct.
6026
6027 If KNOWN_USED is non-NULL, treat *KNOWN_USED as used (this is for
6028 handling values being returned from functions as their frame is popped,
6029 since otherwise we'd have to simultaneously determine both the rvalue
6030 of the return expr in the callee frame and the lvalue for the gcall's
6031 assignment in the caller frame, and it seems cleaner to express all
6032 lvalue and rvalue lookups implicitly relative to a "current" frame). */
6033
6034void
6035region_model::purge_unused_svalues (purge_stats *stats,
6036 region_model_context *ctxt,
6037 svalue_id *known_used_sid)
6038{
6039 // TODO: might want to avoid a vfunc call just to do logging here:
6040 logger *logger = ctxt ? ctxt->get_logger () : NULL;
6041
6042 LOG_SCOPE (logger);
6043
6044 auto_sbitmap used (m_svalues.length ());
6045 bitmap_clear (used);
6046
6047 if (known_used_sid)
6048 if (!known_used_sid->null_p ())
6049 bitmap_set_bit (used, known_used_sid->as_int ());
6050
6051 /* Walk the regions, marking sids that are used. */
6052 unsigned i;
6053 region *r;
6054 FOR_EACH_VEC_ELT (m_regions, i, r)
6055 {
6056 svalue_id sid = r->get_value_direct ();
6057 if (!sid.null_p ())
6058 bitmap_set_bit (used, sid.as_int ());
6059 }
6060
6061 /* Now purge any constraints involving svalues we don't care about. */
6062 restrict_to_used_svalues criterion (used);
6063 m_constraints->purge (criterion, stats);
6064
6065 /* Mark any sids that are in constraints that survived. */
6066 {
6067 equiv_class *ec;
6068 FOR_EACH_VEC_ELT (m_constraints->m_equiv_classes, i, ec)
6069 {
6070 int j;
6071 svalue_id *sid;
6072 FOR_EACH_VEC_ELT (ec->m_vars, j, sid)
6073 {
6074 gcc_assert (!sid->null_p ());
6075 bitmap_set_bit (used, sid->as_int ());
6076 }
6077 }
6078 }
6079
6080 /* Build a mapping from old-sid to new-sid so that we can preserve
6081 order of the used IDs and move all redundant ones to the end.
6082 Iterate though svalue IDs, adding used ones to the front of
6083 the new list, and unused ones to the back. */
6084 svalue_id_map map (m_svalues.length ());
6085 int next_used_new_sid = 0;
6086 int after_next_unused_new_sid = m_svalues.length ();
6087 for (unsigned i = 0; i < m_svalues.length (); i++)
6088 {
6089 svalue_id src (svalue_id::from_int (i));
6090 if (bitmap_bit_p (used, i))
6091 {
6092 if (logger)
6093 logger->log ("sv%i is used", i);
6094 map.put (src, svalue_id::from_int (next_used_new_sid++));
6095 }
6096 else
6097 {
6098 if (logger)
6099 logger->log ("sv%i is unused", i);
6100 map.put (src, svalue_id::from_int (--after_next_unused_new_sid));
6101 }
6102 }
6103 /* The two insertion points should have met. */
6104 gcc_assert (next_used_new_sid == after_next_unused_new_sid);
6105
6106 /* Now walk the regions and the constraints, remapping sids,
6107 so that all the redundant svalues are at the end. */
6108 remap_svalue_ids (map);
6109
6110 if (logger)
6111 {
6112 logger->start_log_line ();
6113 logger->log_partial ("map: ");
6114 map.dump_to_pp (logger->get_printer ());
6115 logger->end_log_line ();
6116 }
6117
6118 /* Notify any client about the remapping and pending deletion.
6119 Potentially this could trigger leak warnings. */
6120 if (ctxt)
6121 {
6122 ctxt->remap_svalue_ids (map);
6123 int num_client_items_purged
6124 = ctxt->on_svalue_purge (svalue_id::from_int (next_used_new_sid), map);
6125 if (stats)
6126 stats->m_num_client_items += num_client_items_purged;
6127 }
6128
6129 /* Drop the redundant svalues from the end of the vector. */
6130 while ((signed)m_svalues.length () > next_used_new_sid)
6131 {
6132 if (logger)
6133 {
6134 svalue_id victim = svalue_id::from_int (m_svalues.length () - 1);
6135 logger->log ("deleting sv%i (was sv%i)",
6136 victim.as_int (),
6137 map.get_src_for_dst (victim).as_int ());
6138 }
6139 delete m_svalues.pop ();
6140 if (stats)
6141 stats->m_num_svalues++;
6142 }
6143
6144 if (known_used_sid)
6145 map.update (known_used_sid);
6146
6147 validate ();
6148}
6149
6150/* Renumber the svalues within this model according to MAP. */
6151
6152void
6153region_model::remap_svalue_ids (const svalue_id_map &map)
6154{
6155 /* Update IDs within regions. */
6156 unsigned i;
6157 region *r;
6158 FOR_EACH_VEC_ELT (m_regions, i, r)
6159 r->remap_svalue_ids (map);
6160
6161 /* Update IDs within ECs within constraints. */
6162 m_constraints->remap_svalue_ids (map);
6163
6164 /* Build a reordered svalues vector. */
6165 auto_vec<svalue *> new_svalues (m_svalues.length ());
6166 for (unsigned i = 0; i < m_svalues.length (); i++)
6167 {
6168 svalue_id dst (svalue_id::from_int (i));
6169 svalue_id src = map.get_src_for_dst (dst);
6170 new_svalues.quick_push (get_svalue (src));
6171 }
6172
6173 /* Copy over the reordered vec to m_svalues. */
6174 m_svalues.truncate (0);
6175 gcc_assert (m_svalues.space (new_svalues.length ()));
6176 svalue *sval;
6177 FOR_EACH_VEC_ELT (new_svalues, i, sval)
6178 m_svalues.quick_push (sval);
6179}
6180
6181/* Renumber the regions within this model according to MAP. */
6182
6183void
6184region_model::remap_region_ids (const region_id_map &map)
6185{
6186 /* Update IDs within regions. */
6187 unsigned i;
6188 region *r;
6189 FOR_EACH_VEC_ELT (m_regions, i, r)
6190 r->remap_region_ids (map);
6191
6192 /* Update IDs within svalues. */
6193 svalue *sval;
6194 FOR_EACH_VEC_ELT (m_svalues, i, sval)
6195 sval->remap_region_ids (map);
6196
6197 /* Build a reordered regions vector. */
6198 auto_vec<region *> new_regions (m_regions.length ());
6199 for (unsigned i = 0; i < m_regions.length (); i++)
6200 {
6201 region_id dst (region_id::from_int (i));
6202 region_id src = map.get_src_for_dst (dst);
6203 new_regions.quick_push (get_region (src));
6204 }
6205
6206 /* Copy over the reordered vec to m_regions. */
6207 m_regions.truncate (0);
6208 gcc_assert (m_regions.space (new_regions.length ()));
6209 FOR_EACH_VEC_ELT (new_regions, i, r)
6210 m_regions.quick_push (r);
6211}
6212
6213/* Delete all regions within SET_TO_PURGE, remapping region IDs for
6214 other regions. It's required that there are no uses of the
6215 regions within the set (or the region IDs will become invalid).
6216
6217 Accumulate stats to STATS. */
6218
6219void
6220region_model::purge_regions (const region_id_set &set_to_purge,
6221 purge_stats *stats,
6222 logger *)
6223{
6224 /* Build a mapping from old-rid to new-rid so that we can preserve
6225 order of the used IDs and move all redundant ones to the end.
6226 Iterate though region IDs, adding used ones to the front of
6227 the new list, and unused ones to the back. */
6228 region_id_map map (m_regions.length ());
6229 int next_used_new_rid = 0;
6230 int after_next_unused_new_rid = m_regions.length ();
6231 for (unsigned i = 0; i < m_regions.length (); i++)
6232 {
6233 region_id src (region_id::from_int (i));
6234 if (set_to_purge.region_p (src))
6235 map.put (src, region_id::from_int (--after_next_unused_new_rid));
6236 else
6237 map.put (src, region_id::from_int (next_used_new_rid++));
6238 }
6239 /* The two insertion points should have met. */
6240 gcc_assert (next_used_new_rid == after_next_unused_new_rid);
6241
6242 /* Now walk the regions and svalues, remapping rids,
6243 so that all the redundant regions are at the end. */
6244 remap_region_ids (map);
6245
6246 /* Drop the redundant regions from the end of the vector. */
6247 while ((signed)m_regions.length () > next_used_new_rid)
6248 {
6249 delete m_regions.pop ();
6250 if (stats)
6251 stats->m_num_regions++;
6252 }
6253}
6254
6255/* Populate *OUT with RID and all of its descendents.
6256 If EXCLUDE_RID is non-null, then don't add it or its descendents. */
6257
6258void
6259region_model::get_descendents (region_id rid, region_id_set *out,
6260 region_id exclude_rid) const
6261{
6262 out->add_region (rid);
6263
6264 bool changed = true;
6265 while (changed)
6266 {
6267 changed = false;
6268 unsigned i;
6269 region *r;
6270 FOR_EACH_VEC_ELT (m_regions, i, r)
6271 {
6272 region_id iter_rid = region_id::from_int (i);
6273 if (iter_rid == exclude_rid)
6274 continue;
6275 if (!out->region_p (iter_rid))
6276 {
6277 region_id parent_rid = r->get_parent ();
6278 if (!parent_rid.null_p ())
6279 if (out->region_p (parent_rid))
6280 {
6281 out->add_region (iter_rid);
6282 changed = true;
6283 }
6284 }
6285 }
6286 }
6287}
6288
6289/* Delete RID and all descendent regions.
6290 Find any pointers to such regions; convert convert them to
6291 poisoned values of kind PKIND.
6292 Accumulate stats on purged entities into STATS. */
6293
6294void
6295region_model::delete_region_and_descendents (region_id rid,
6296 enum poison_kind pkind,
6297 purge_stats *stats,
6298 logger *logger)
6299{
6300 /* Find all child and descendent regions. */
6301 region_id_set descendents (this);
6302 get_descendents (rid, &descendents, region_id::null ());
6303
6304 /* Find any pointers to such regions; convert to poisoned. */
6305 poison_any_pointers_to_bad_regions (descendents, pkind);
6306
6307 /* Delete all such regions. */
6308 purge_regions (descendents, stats, logger);
6309}
6310
6311/* Find any pointers to regions within BAD_REGIONS; convert them to
6312 poisoned values of kind PKIND. */
6313
6314void
6315region_model::poison_any_pointers_to_bad_regions (const region_id_set &
6316 bad_regions,
6317 enum poison_kind pkind)
6318{
6319 int i;
6320 svalue *sval;
6321 FOR_EACH_VEC_ELT (m_svalues, i, sval)
6322 if (region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
6323 {
6324 region_id ptr_dst = ptr_sval->get_pointee ();
6325 if (!ptr_dst.null_p ())
6326 if (bad_regions.region_p (ptr_dst))
6327 replace_svalue
6328 (svalue_id::from_int (i),
6329 new poisoned_svalue (pkind, sval->get_type ()));
6330 }
6331}
6332
6333/* Attempt to merge THIS with OTHER_MODEL, writing the result
6334 to OUT_MODEL, and populating SID_MAPPING. */
6335
6336bool
6337region_model::can_merge_with_p (const region_model &other_model,
6338 region_model *out_model,
6339 svalue_id_merger_mapping *sid_mapping) const
6340{
6341 gcc_assert (m_root_rid == other_model.m_root_rid);
6342 gcc_assert (m_root_rid.as_int () == 0);
6343 gcc_assert (sid_mapping);
6344 gcc_assert (out_model);
6345
6346 model_merger merger (this, &other_model, out_model, sid_mapping);
6347
6348 if (!root_region::can_merge_p (get_root_region (),
6349 other_model.get_root_region (),
6350 out_model->get_root_region (),
6351 &merger))
6352 return false;
6353
6354 /* Merge constraints. */
6355 constraint_manager::merge (*m_constraints,
6356 *other_model.m_constraints,
6357 out_model->m_constraints,
6358 merger);
6359
6360 out_model->validate ();
6361
6362 /* The merged model should be simpler (or as simple) as the inputs. */
6363#if 0
6364 gcc_assert (out_model->m_svalues.length () <= m_svalues.length ());
6365 gcc_assert (out_model->m_svalues.length ()
6366 <= other_model.m_svalues.length ());
6367#endif
6368 gcc_assert (out_model->m_regions.length () <= m_regions.length ());
6369 gcc_assert (out_model->m_regions.length ()
6370 <= other_model.m_regions.length ());
6371 // TODO: same, for constraints
6372
6373 return true;
6374}
6375
6376/* As above, but supply a placeholder svalue_id_merger_mapping
6377 instance to be used and receive output. For use in selftests. */
6378
6379bool
6380region_model::can_merge_with_p (const region_model &other_model,
6381 region_model *out_model) const
6382{
6383 svalue_id_merger_mapping sid_mapping (*this, other_model);
6384 return can_merge_with_p (other_model, out_model, &sid_mapping);
6385}
6386
6387/* For debugging purposes: look for a region within this region_model
6388 for a decl named NAME (or an SSA_NAME for such a decl),
6389 returning its value, or svalue_id::null if none are found. */
6390
6391svalue_id
6392region_model::get_value_by_name (const char *name) const
6393{
6394 gcc_assert (name);
6395 tree identifier = get_identifier (name);
6396 return get_root_region ()->get_value_by_name (identifier, *this);
6397}
6398
6399/* Generate or reuse an svalue_id within this model for an index
6400 into an array of type PTR_TYPE, based on OFFSET_SID. */
6401
6402svalue_id
6403region_model::convert_byte_offset_to_array_index (tree ptr_type,
6404 svalue_id offset_sid)
6405{
6406 gcc_assert (POINTER_TYPE_P (ptr_type));
6407
6408 if (tree offset_cst = maybe_get_constant (offset_sid))
6409 {
6410 tree elem_type = TREE_TYPE (ptr_type);
6411
6412 /* Arithmetic on void-pointers is a GNU C extension, treating the size
6413 of a void as 1.
6414 https://gcc.gnu.org/onlinedocs/gcc/Pointer-Arith.html
6415
6416 Returning early for this case avoids a diagnostic from within the
6417 call to size_in_bytes. */
6418 if (TREE_CODE (elem_type) == VOID_TYPE)
6419 return offset_sid;
6420
6421 /* This might not be a constant. */
6422 tree byte_size = size_in_bytes (elem_type);
6423
5f030383
DM
6424 /* Try to get a constant by dividing, ensuring that we're in a
6425 signed representation first. */
757bf1df 6426 tree index
5f030383
DM
6427 = fold_binary (TRUNC_DIV_EXPR, ssizetype,
6428 fold_convert (ssizetype, offset_cst),
6429 fold_convert (ssizetype, byte_size));
6430 if (index && TREE_CODE (index) == INTEGER_CST)
757bf1df
DM
6431 return get_or_create_constant_svalue (index);
6432 }
6433
6434 /* Otherwise, we don't know the array index; generate a new unknown value.
6435 TODO: do we need to capture the relationship between two unknown
6436 values (the offset and the index)? */
6437 return add_svalue (new unknown_svalue (integer_type_node));
6438}
6439
6440/* Get a region of type TYPE for PTR_SID[OFFSET_SID/sizeof (*PTR_SID)].
6441
6442 If OFFSET_SID is known to be zero, then dereference PTR_SID.
6443 Otherwise, impose a view of "typeof(*PTR_SID)[]" on *PTR_SID,
6444 and then get a view of type TYPE on the relevant array element. */
6445
6446region_id
6447region_model::get_or_create_mem_ref (tree type,
6448 svalue_id ptr_sid,
6449 svalue_id offset_sid,
6450 region_model_context *ctxt)
6451{
6452 svalue *ptr_sval = get_svalue (ptr_sid);
6453 tree ptr_type = ptr_sval->get_type ();
6454 gcc_assert (ptr_type);
6455
6456 region_id raw_rid = deref_rvalue (ptr_sid, ctxt);
6457
6458 svalue *offset_sval = get_svalue (offset_sid);
6459 tree offset_type = offset_sval->get_type ();
6460 gcc_assert (offset_type);
6461
6462 if (constant_svalue *cst_sval = offset_sval->dyn_cast_constant_svalue ())
6463 {
6464 if (zerop (cst_sval->get_constant ()))
6465 {
6466 /* Handle the zero offset case. */
6467 return get_or_create_view (raw_rid, type);
6468 }
6469
6470 /* If we're already within an array of the correct type,
6471 then we want to reuse that array, rather than starting
6472 a new view.
6473 If so, figure out our raw_rid's offset from its parent,
6474 if we can, and use that to offset OFFSET_SID, and create
6475 the element within the parent region. */
6476 region *raw_reg = get_region (raw_rid);
6477 region_id parent_rid = raw_reg->get_parent ();
6478 tree parent_type = get_region (parent_rid)->get_type ();
6479 if (parent_type
6480 && TREE_CODE (parent_type) == ARRAY_TYPE)
6481 {
6482 // TODO: check we have the correct parent type
6483 array_region *parent_array = get_region <array_region> (parent_rid);
6484 array_region::key_t key_for_raw_rid;
6485 if (parent_array->get_key_for_child_region (raw_rid,
6486 &key_for_raw_rid))
6487 {
6488 /* Convert from offset to index. */
6489 svalue_id index_sid
6490 = convert_byte_offset_to_array_index (ptr_type, offset_sid);
6491 if (tree index_cst
6492 = get_svalue (index_sid)->maybe_get_constant ())
6493 {
6494 array_region::key_t index_offset
6495 = array_region::key_from_constant (index_cst);
6496 array_region::key_t index_rel_to_parent
6497 = key_for_raw_rid + index_offset;
6498 tree index_rel_to_parent_cst
6499 = wide_int_to_tree (integer_type_node,
6500 index_rel_to_parent);
6501 svalue_id index_sid
6502 = get_or_create_constant_svalue (index_rel_to_parent_cst);
6503
6504 /* Carry on, using the parent region and adjusted index. */
6505 region_id element_rid
6506 = parent_array->get_element (this, raw_rid, index_sid,
6507 ctxt);
6508 return get_or_create_view (element_rid, type);
6509 }
6510 }
6511 }
6512 }
6513
6514 tree array_type = build_array_type (TREE_TYPE (ptr_type),
6515 integer_type_node);
6516 region_id array_view_rid = get_or_create_view (raw_rid, array_type);
6517 array_region *array_reg = get_region <array_region> (array_view_rid);
6518
6519 svalue_id index_sid
6520 = convert_byte_offset_to_array_index (ptr_type, offset_sid);
6521
6522 region_id element_rid
6523 = array_reg->get_element (this, array_view_rid, index_sid, ctxt);
6524
6525 return get_or_create_view (element_rid, type);
6526}
6527
6528/* Get a region of type TYPE for PTR_SID + OFFSET_SID.
6529
6530 If OFFSET_SID is known to be zero, then dereference PTR_SID.
6531 Otherwise, impose a view of "typeof(*PTR_SID)[]" on *PTR_SID,
6532 and then get a view of type TYPE on the relevant array element. */
6533
6534region_id
6535region_model::get_or_create_pointer_plus_expr (tree type,
6536 svalue_id ptr_sid,
6537 svalue_id offset_in_bytes_sid,
6538 region_model_context *ctxt)
6539{
6540 return get_or_create_mem_ref (type,
6541 ptr_sid,
6542 offset_in_bytes_sid,
6543 ctxt);
6544}
6545
6546/* Get or create a view of type TYPE of the region with id RAW_ID.
6547 Return the id of the view (or RAW_ID if it of the same type). */
6548
6549region_id
6550region_model::get_or_create_view (region_id raw_rid, tree type)
6551{
6552 region *raw_region = get_region (raw_rid);
6553
6554 gcc_assert (TYPE_P (type));
6555 if (type != raw_region->get_type ())
6556 {
6557 /* If the region already has a view of the requested type,
6558 reuse it. */
6559 region_id existing_view_rid = raw_region->get_view (type, this);
6560 if (!existing_view_rid.null_p ())
6561 return existing_view_rid;
6562
6563 /* Otherwise, make one (adding it to the region_model and
6564 to the viewed region). */
6565 region_id view_rid = add_region_for_type (raw_rid, type);
6566 raw_region->add_view (view_rid, this);
6567 // TODO: something to signify that this is a "view"
6568 return view_rid;
6569 }
6570
6571 return raw_rid;
6572}
6573
6574/* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
6575 otherwise. */
6576
6577tree
6578region_model::get_fndecl_for_call (const gcall *call,
6579 region_model_context *ctxt)
6580{
6581 tree fn_ptr = gimple_call_fn (call);
6582 if (fn_ptr == NULL_TREE)
6583 return NULL_TREE;
6584 svalue_id fn_ptr_sid = get_rvalue (fn_ptr, ctxt);
6585 svalue *fn_ptr_sval = get_svalue (fn_ptr_sid);
6586 if (region_svalue *fn_ptr_ptr = fn_ptr_sval->dyn_cast_region_svalue ())
6587 {
6588 region_id fn_rid = fn_ptr_ptr->get_pointee ();
6589 code_region *code = get_root_region ()->get_code_region (this);
6590 if (code)
6591 {
6592 tree fn_decl = code->get_tree_for_child_region (fn_rid);
6593 return fn_decl;
6594 }
6595 }
6596
6597 return NULL_TREE;
6598}
6599
6600/* struct model_merger. */
6601
6602/* Dump a multiline representation of this merger to PP. */
6603
6604void
6605model_merger::dump_to_pp (pretty_printer *pp) const
6606{
6607 pp_string (pp, "model A:");
6608 pp_newline (pp);
6609 m_model_a->dump_to_pp (pp, false);
6610 pp_newline (pp);
6611
6612 pp_string (pp, "model B:");
6613 pp_newline (pp);
6614 m_model_b->dump_to_pp (pp, false);
6615 pp_newline (pp);
6616
6617 pp_string (pp, "merged model:");
6618 pp_newline (pp);
6619 m_merged_model->dump_to_pp (pp, false);
6620 pp_newline (pp);
6621
6622 pp_string (pp, "region map: model A to merged model:");
6623 pp_newline (pp);
6624 m_map_regions_from_a_to_m.dump_to_pp (pp);
6625 pp_newline (pp);
6626
6627 pp_string (pp, "region map: model B to merged model:");
6628 pp_newline (pp);
6629 m_map_regions_from_b_to_m.dump_to_pp (pp);
6630 pp_newline (pp);
6631
6632 m_sid_mapping->dump_to_pp (pp);
6633}
6634
6635/* Dump a multiline representation of this merger to FILE. */
6636
6637void
6638model_merger::dump (FILE *fp) const
6639{
6640 pretty_printer pp;
6641 pp_format_decoder (&pp) = default_tree_printer;
6642 pp_show_color (&pp) = pp_show_color (global_dc->printer);
6643 pp.buffer->stream = fp;
6644 dump_to_pp (&pp);
6645 pp_flush (&pp);
6646}
6647
6648/* Dump a multiline representation of this merger to stderr. */
6649
6650DEBUG_FUNCTION void
6651model_merger::dump () const
6652{
6653 dump (stderr);
6654}
6655
6656/* Attempt to merge the svalues of SID_A and SID_B (from their
6657 respective models), writing the id of the resulting svalue
6658 into *MERGED_SID.
6659 Return true if the merger is possible, false otherwise. */
6660
6661bool
6662model_merger::can_merge_values_p (svalue_id sid_a,
6663 svalue_id sid_b,
6664 svalue_id *merged_sid)
6665{
6666 gcc_assert (merged_sid);
6667 svalue *sval_a = m_model_a->get_svalue (sid_a);
6668 svalue *sval_b = m_model_b->get_svalue (sid_b);
6669
6670 /* If both are NULL, then the "values" are trivially mergeable. */
6671 if (!sval_a && !sval_b)
6672 return true;
6673
6674 /* If one is NULL and the other non-NULL, then the "values"
6675 are not mergeable. */
6676 if (!(sval_a && sval_b))
6677 return false;
6678
6679 /* Have they both already been mapped to the same new svalue_id?
6680 If so, use it. */
6681 svalue_id sid_a_in_m
6682 = m_sid_mapping->m_map_from_a_to_m.get_dst_for_src (sid_a);
6683 svalue_id sid_b_in_m
6684 = m_sid_mapping->m_map_from_b_to_m.get_dst_for_src (sid_b);
6685 if (!sid_a_in_m.null_p ()
6686 && !sid_b_in_m.null_p ()
6687 && sid_a_in_m == sid_b_in_m)
6688 {
6689 *merged_sid = sid_a_in_m;
6690 return true;
6691 }
6692
6693 tree type = sval_a->get_type ();
6694 if (type == NULL_TREE)
6695 type = sval_b->get_type ();
6696
6697 /* If the values have different kinds, or are both unknown,
6698 then merge as "unknown". */
6699 if (sval_a->get_kind () != sval_b->get_kind ()
6700 || sval_a->get_kind () == SK_UNKNOWN)
6701 {
6702 svalue *merged_sval = new unknown_svalue (type);
6703 *merged_sid = m_merged_model->add_svalue (merged_sval);
6704 record_svalues (sid_a, sid_b, *merged_sid);
6705 return true;
6706 }
6707
6708 gcc_assert (sval_a->get_kind () == sval_b->get_kind ());
6709
6710 switch (sval_a->get_kind ())
6711 {
6712 default:
6713 case SK_UNKNOWN: /* SK_UNKNOWN handled above. */
6714 gcc_unreachable ();
6715
6716 case SK_REGION:
6717 {
6718 /* If we have two region pointers, then we can merge (possibly to
6719 "unknown"). */
6720 const region_svalue &region_sval_a = *as_a <region_svalue *> (sval_a);
6721 const region_svalue &region_sval_b = *as_a <region_svalue *> (sval_b);
6722 region_svalue::merge_values (region_sval_a, region_sval_b,
6723 merged_sid, type,
6724 this);
6725 record_svalues (sid_a, sid_b, *merged_sid);
6726 return true;
6727 }
6728 break;
6729 case SK_CONSTANT:
6730 {
6731 /* If we have two constants, then we can merge. */
6732 const constant_svalue &cst_sval_a = *as_a <constant_svalue *> (sval_a);
6733 const constant_svalue &cst_sval_b = *as_a <constant_svalue *> (sval_b);
6734 constant_svalue::merge_values (cst_sval_a, cst_sval_b,
6735 merged_sid, this);
6736 record_svalues (sid_a, sid_b, *merged_sid);
6737 return true;
6738 }
6739 break;
6740
6741 case SK_POISONED:
6742 case SK_SETJMP:
6743 return false;
6744 }
6745}
6746
6747/* Record that A_RID in model A and B_RID in model B
6748 correspond to MERGED_RID in the merged model, so
6749 that pointers can be accurately merged. */
6750
6751void
6752model_merger::record_regions (region_id a_rid,
6753 region_id b_rid,
6754 region_id merged_rid)
6755{
6756 m_map_regions_from_a_to_m.put (a_rid, merged_rid);
6757 m_map_regions_from_b_to_m.put (b_rid, merged_rid);
6758}
6759
6760/* Record that A_SID in model A and B_SID in model B
6761 correspond to MERGED_SID in the merged model. */
6762
6763void
6764model_merger::record_svalues (svalue_id a_sid,
6765 svalue_id b_sid,
6766 svalue_id merged_sid)
6767{
6768 gcc_assert (m_sid_mapping);
6769 m_sid_mapping->m_map_from_a_to_m.put (a_sid, merged_sid);
6770 m_sid_mapping->m_map_from_b_to_m.put (b_sid, merged_sid);
6771}
6772
6773/* struct svalue_id_merger_mapping. */
6774
6775/* svalue_id_merger_mapping's ctor. */
6776
6777svalue_id_merger_mapping::svalue_id_merger_mapping (const region_model &a,
6778 const region_model &b)
6779: m_map_from_a_to_m (a.get_num_svalues ()),
6780 m_map_from_b_to_m (b.get_num_svalues ())
6781{
6782}
6783
6784/* Dump a multiline representation of this to PP. */
6785
6786void
6787svalue_id_merger_mapping::dump_to_pp (pretty_printer *pp) const
6788{
6789 pp_string (pp, "svalue_id map: model A to merged model:");
6790 pp_newline (pp);
6791 m_map_from_a_to_m.dump_to_pp (pp);
6792 pp_newline (pp);
6793
6794 pp_string (pp, "svalue_id map: model B to merged model:");
6795 pp_newline (pp);
6796 m_map_from_b_to_m.dump_to_pp (pp);
6797 pp_newline (pp);
6798}
6799
6800/* Dump a multiline representation of this to FILE. */
6801
6802void
6803svalue_id_merger_mapping::dump (FILE *fp) const
6804{
6805 pretty_printer pp;
6806 pp_format_decoder (&pp) = default_tree_printer;
6807 pp_show_color (&pp) = pp_show_color (global_dc->printer);
6808 pp.buffer->stream = fp;
6809 dump_to_pp (&pp);
6810 pp_flush (&pp);
6811}
6812
6813/* Dump a multiline representation of this to stderr. */
6814
6815DEBUG_FUNCTION void
6816svalue_id_merger_mapping::dump () const
6817{
6818 dump (stderr);
6819}
6820
6821/* struct canonicalization. */
6822
6823/* canonicalization's ctor. */
6824
6825canonicalization::canonicalization (const region_model &model)
6826: m_model (model),
6827 m_rid_map (model.get_num_regions ()),
6828 m_sid_map (model.get_num_svalues ()),
6829 m_next_rid_int (0),
6830 m_next_sid_int (0)
6831{
6832}
6833
6834/* If we've not seen RID yet, assign it a canonicalized region_id,
6835 and walk the region's svalue and then the region. */
6836
6837void
6838canonicalization::walk_rid (region_id rid)
6839{
6840 /* Stop if we've already seen RID. */
6841 if (!m_rid_map.get_dst_for_src (rid).null_p ())
6842 return;
6843
6844 region *region = m_model.get_region (rid);
6845 if (region)
6846 {
6847 m_rid_map.put (rid, region_id::from_int (m_next_rid_int++));
6848 walk_sid (region->get_value_direct ());
6849 region->walk_for_canonicalization (this);
6850 }
6851}
6852
6853/* If we've not seen SID yet, assign it a canonicalized svalue_id,
6854 and walk the svalue (and potentially regions e.g. for ptr values). */
6855
6856void
6857canonicalization::walk_sid (svalue_id sid)
6858{
6859 /* Stop if we've already seen SID. */
6860 if (!m_sid_map.get_dst_for_src (sid).null_p ())
6861 return;
6862
6863 svalue *sval = m_model.get_svalue (sid);
6864 if (sval)
6865 {
6866 m_sid_map.put (sid, svalue_id::from_int (m_next_sid_int++));
6867 /* Potentially walk regions e.g. for ptrs. */
6868 sval->walk_for_canonicalization (this);
6869 }
6870}
6871
6872/* Dump a multiline representation of this to PP. */
6873
6874void
6875canonicalization::dump_to_pp (pretty_printer *pp) const
6876{
6877 pp_string (pp, "region_id map:");
6878 pp_newline (pp);
6879 m_rid_map.dump_to_pp (pp);
6880 pp_newline (pp);
6881
6882 pp_string (pp, "svalue_id map:");
6883 pp_newline (pp);
6884 m_sid_map.dump_to_pp (pp);
6885 pp_newline (pp);
6886}
6887
6888/* Dump a multiline representation of this to FILE. */
6889
6890void
6891canonicalization::dump (FILE *fp) const
6892{
6893 pretty_printer pp;
6894 pp_format_decoder (&pp) = default_tree_printer;
6895 pp_show_color (&pp) = pp_show_color (global_dc->printer);
6896 pp.buffer->stream = fp;
6897 dump_to_pp (&pp);
6898 pp_flush (&pp);
6899}
6900
6901/* Dump a multiline representation of this to stderr. */
6902
6903DEBUG_FUNCTION void
6904canonicalization::dump () const
6905{
6906 dump (stderr);
6907}
6908
6909/* Update HSTATE with a hash of SID. */
6910
6911void
6912inchash::add (svalue_id sid, inchash::hash &hstate)
6913{
6914 hstate.add_int (sid.as_int ());
6915}
6916
6917/* Update HSTATE with a hash of RID. */
6918
6919void
6920inchash::add (region_id rid, inchash::hash &hstate)
6921{
6922 hstate.add_int (rid.as_int ());
6923}
6924
6925/* Dump RMODEL fully to stderr (i.e. without summarization). */
6926
6927DEBUG_FUNCTION void
6928debug (const region_model &rmodel)
6929{
6930 rmodel.dump (false);
6931}
6932
6933#if CHECKING_P
6934
6935namespace selftest {
6936
6937/* Implementation detail of the ASSERT_CONDITION_* macros. */
6938
6939void
6940assert_condition (const location &loc,
6941 region_model &model,
6942 tree lhs, tree_code op, tree rhs,
6943 tristate expected)
6944{
6945 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
6946 ASSERT_EQ_AT (loc, actual, expected);
6947}
6948
6949/* Implementation detail of ASSERT_DUMP_EQ. */
6950
6951static void
6952assert_dump_eq (const location &loc,
6953 const region_model &model,
6954 bool summarize,
6955 const char *expected)
6956{
6957 auto_fix_quotes sentinel;
6958 pretty_printer pp;
6959 pp_format_decoder (&pp) = default_tree_printer;
6960 model.dump_to_pp (&pp, summarize);
6961 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
6962}
6963
6964/* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
6965
6966#define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
6967 SELFTEST_BEGIN_STMT \
6968 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
6969 SELFTEST_END_STMT
6970
6971/* Smoketest for region_model::dump_to_pp. */
6972
6973static void
6974test_dump ()
6975{
6976 region_model model;
6977 model.get_root_region ()->ensure_stack_region (&model);
6978 model.get_root_region ()->ensure_globals_region (&model);
6979 model.get_root_region ()->ensure_heap_region (&model);
6980
6981 ASSERT_DUMP_EQ (model, false,
6982 "r0: {kind: `root', parent: null, sval: null}\n"
6983 "|-stack: r1: {kind: `stack', parent: r0, sval: sv0}\n"
6984 "| |: sval: sv0: {poisoned: uninit}\n"
6985 "|-globals: r2: {kind: `globals', parent: r0, sval: null, map: {}}\n"
6986 "`-heap: r3: {kind: `heap', parent: r0, sval: sv1}\n"
6987 " |: sval: sv1: {poisoned: uninit}\n"
6988 "svalues:\n"
6989 " sv0: {poisoned: uninit}\n"
6990 " sv1: {poisoned: uninit}\n"
6991 "constraint manager:\n"
6992 " equiv classes:\n"
6993 " constraints:\n");
6994 ASSERT_DUMP_EQ (model, true, "");
6995}
6996
6997/* Verify that calling region_model::get_rvalue repeatedly on the same
6998 tree constant retrieves the same svalue_id. */
6999
7000static void
7001test_unique_constants ()
7002{
7003 tree int_0 = build_int_cst (integer_type_node, 0);
7004 tree int_42 = build_int_cst (integer_type_node, 42);
7005
7006 test_region_model_context ctxt;
7007 region_model model;
7008 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
7009 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
7010 model.get_rvalue (int_42, &ctxt));
7011 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
7012 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
7013}
7014
7015/* Check that operator== and hashing works as expected for the
7016 various svalue subclasses. */
7017
7018static void
7019test_svalue_equality ()
7020{
7021 tree int_42 = build_int_cst (integer_type_node, 42);
7022 tree int_0 = build_int_cst (integer_type_node, 0);
7023
7024 /* Create pairs instances of the various subclasses of svalue,
7025 testing for hash and equality between (this, this) and
7026 (this, other of same subclass). */
7027 svalue *ptr_to_r0
7028 = new region_svalue (ptr_type_node, region_id::from_int (0));
7029 svalue *ptr_to_r1
7030 = new region_svalue (ptr_type_node, region_id::from_int (1));
7031
7032 ASSERT_EQ (ptr_to_r0->hash (), ptr_to_r0->hash ());
7033 ASSERT_EQ (*ptr_to_r0, *ptr_to_r0);
7034
7035 ASSERT_NE (ptr_to_r0->hash (), ptr_to_r1->hash ());
7036 ASSERT_NE (*ptr_to_r0, *ptr_to_r1);
7037
7038 svalue *cst_int_42 = new constant_svalue (int_42);
7039 svalue *cst_int_0 = new constant_svalue (int_0);
7040
7041 ASSERT_EQ (cst_int_42->hash (), cst_int_42->hash ());
7042 ASSERT_EQ (*cst_int_42, *cst_int_42);
7043
7044 ASSERT_NE (cst_int_42->hash (), cst_int_0->hash ());
7045 ASSERT_NE (*cst_int_42, *cst_int_0);
7046
7047 svalue *uninit = new poisoned_svalue (POISON_KIND_UNINIT, NULL_TREE);
7048 svalue *freed = new poisoned_svalue (POISON_KIND_FREED, NULL_TREE);
7049
7050 ASSERT_EQ (uninit->hash (), uninit->hash ());
7051 ASSERT_EQ (*uninit, *uninit);
7052
7053 ASSERT_NE (uninit->hash (), freed->hash ());
7054 ASSERT_NE (*uninit, *freed);
7055
7056 svalue *unknown_0 = new unknown_svalue (ptr_type_node);
7057 svalue *unknown_1 = new unknown_svalue (ptr_type_node);
7058 ASSERT_EQ (unknown_0->hash (), unknown_0->hash ());
7059 ASSERT_EQ (*unknown_0, *unknown_0);
7060 ASSERT_EQ (*unknown_1, *unknown_1);
7061
7062 /* Comparisons between different kinds of svalue. */
7063 ASSERT_NE (*ptr_to_r0, *cst_int_42);
7064 ASSERT_NE (*ptr_to_r0, *uninit);
7065 ASSERT_NE (*ptr_to_r0, *unknown_0);
7066 ASSERT_NE (*cst_int_42, *ptr_to_r0);
7067 ASSERT_NE (*cst_int_42, *uninit);
7068 ASSERT_NE (*cst_int_42, *unknown_0);
7069 ASSERT_NE (*uninit, *ptr_to_r0);
7070 ASSERT_NE (*uninit, *cst_int_42);
7071 ASSERT_NE (*uninit, *unknown_0);
7072 ASSERT_NE (*unknown_0, *ptr_to_r0);
7073 ASSERT_NE (*unknown_0, *cst_int_42);
7074 ASSERT_NE (*unknown_0, *uninit);
7075
7076 delete ptr_to_r0;
7077 delete ptr_to_r1;
7078 delete cst_int_42;
7079 delete cst_int_0;
7080 delete uninit;
7081 delete freed;
7082 delete unknown_0;
7083 delete unknown_1;
7084}
7085
7086/* Check that operator== and hashing works as expected for the
7087 various region subclasses. */
7088
7089static void
7090test_region_equality ()
7091{
7092 region *r0
7093 = new primitive_region (region_id::from_int (3), integer_type_node);
7094 region *r1
7095 = new primitive_region (region_id::from_int (4), integer_type_node);
7096
7097 ASSERT_EQ (*r0, *r0);
7098 ASSERT_EQ (r0->hash (), r0->hash ());
7099 ASSERT_NE (*r0, *r1);
7100 ASSERT_NE (r0->hash (), r1->hash ());
7101
7102 delete r0;
7103 delete r1;
7104
7105 // TODO: test coverage for the map within a map_region
7106}
7107
7108/* A subclass of purge_criteria for selftests: purge all svalue_id instances. */
7109
7110class purge_all_svalue_ids : public purge_criteria
7111{
7112public:
7113 bool should_purge_p (svalue_id) const FINAL OVERRIDE
7114 {
7115 return true;
7116 }
7117};
7118
7119/* A subclass of purge_criteria: purge a specific svalue_id. */
7120
7121class purge_one_svalue_id : public purge_criteria
7122{
7123public:
7124 purge_one_svalue_id (svalue_id victim) : m_victim (victim) {}
7125
7126 purge_one_svalue_id (region_model model, tree expr)
7127 : m_victim (model.get_rvalue (expr, NULL)) {}
7128
7129 bool should_purge_p (svalue_id sid) const FINAL OVERRIDE
7130 {
7131 return sid == m_victim;
7132 }
7133
7134private:
7135 svalue_id m_victim;
7136};
7137
7138/* Check that constraint_manager::purge works for individual svalue_ids. */
7139
7140static void
7141test_purging_by_criteria ()
7142{
7143 tree int_42 = build_int_cst (integer_type_node, 42);
7144 tree int_0 = build_int_cst (integer_type_node, 0);
7145
7146 tree x = build_global_decl ("x", integer_type_node);
7147 tree y = build_global_decl ("y", integer_type_node);
7148
7149 {
7150 region_model model0;
7151 region_model model1;
7152
7153 ADD_SAT_CONSTRAINT (model1, x, EQ_EXPR, y);
7154 ASSERT_NE (model0, model1);
7155
7156 purge_stats stats_for_px;
7157 purge_one_svalue_id px (model1, x);
7158 model1.get_constraints ()->purge (px, &stats_for_px);
7159 ASSERT_EQ (stats_for_px.m_num_equiv_classes, 0);
7160
7161 purge_stats stats_for_py;
7162 purge_one_svalue_id py (model1.get_rvalue (y, NULL));
7163 model1.get_constraints ()->purge (py, &stats_for_py);
7164 ASSERT_EQ (stats_for_py.m_num_equiv_classes, 1);
7165
7166 ASSERT_EQ (*model0.get_constraints (), *model1.get_constraints ());
7167 }
7168
7169 {
7170 region_model model0;
7171 region_model model1;
7172
7173 ADD_SAT_CONSTRAINT (model1, x, EQ_EXPR, int_42);
7174 ASSERT_NE (model0, model1);
7175 ASSERT_CONDITION_TRUE (model1, x, EQ_EXPR, int_42);
7176
7177 purge_stats stats;
7178 model1.get_constraints ()->purge (purge_one_svalue_id (model1, x), &stats);
7179
7180 ASSERT_CONDITION_UNKNOWN (model1, x, EQ_EXPR, int_42);
7181 }
7182
7183 {
7184 region_model model0;
7185 region_model model1;
7186
7187 ADD_SAT_CONSTRAINT (model1, x, GE_EXPR, int_0);
7188 ADD_SAT_CONSTRAINT (model1, x, LE_EXPR, int_42);
7189 ASSERT_NE (model0, model1);
7190
7191 ASSERT_CONDITION_TRUE (model1, x, GE_EXPR, int_0);
7192 ASSERT_CONDITION_TRUE (model1, x, LE_EXPR, int_42);
7193
7194 purge_stats stats;
7195 model1.get_constraints ()->purge (purge_one_svalue_id (model1, x), &stats);
7196
7197 ASSERT_CONDITION_UNKNOWN (model1, x, GE_EXPR, int_0);
7198 ASSERT_CONDITION_UNKNOWN (model1, x, LE_EXPR, int_42);
7199 }
7200
7201 {
7202 region_model model0;
7203 region_model model1;
7204
7205 ADD_SAT_CONSTRAINT (model1, x, NE_EXPR, int_42);
7206 ADD_SAT_CONSTRAINT (model1, y, NE_EXPR, int_0);
7207 ASSERT_NE (model0, model1);
7208 ASSERT_CONDITION_TRUE (model1, x, NE_EXPR, int_42);
7209 ASSERT_CONDITION_TRUE (model1, y, NE_EXPR, int_0);
7210
7211 purge_stats stats;
7212 model1.get_constraints ()->purge (purge_one_svalue_id (model1, x), &stats);
7213 ASSERT_NE (model0, model1);
7214
7215 ASSERT_CONDITION_UNKNOWN (model1, x, NE_EXPR, int_42);
7216 ASSERT_CONDITION_TRUE (model1, y, NE_EXPR, int_0);
7217 }
7218
7219 {
7220 region_model model0;
7221 region_model model1;
7222
7223 ADD_SAT_CONSTRAINT (model1, x, NE_EXPR, int_42);
7224 ADD_SAT_CONSTRAINT (model1, y, NE_EXPR, int_0);
7225 ASSERT_NE (model0, model1);
7226 ASSERT_CONDITION_TRUE (model1, x, NE_EXPR, int_42);
7227 ASSERT_CONDITION_TRUE (model1, y, NE_EXPR, int_0);
7228
7229 purge_stats stats;
7230 model1.get_constraints ()->purge (purge_all_svalue_ids (), &stats);
7231 ASSERT_CONDITION_UNKNOWN (model1, x, NE_EXPR, int_42);
7232 ASSERT_CONDITION_UNKNOWN (model1, y, NE_EXPR, int_0);
7233 }
7234
7235}
7236
7237/* Test that region_model::purge_unused_svalues works as expected. */
7238
7239static void
7240test_purge_unused_svalues ()
7241{
7242 tree int_42 = build_int_cst (integer_type_node, 42);
7243 tree int_0 = build_int_cst (integer_type_node, 0);
7244 tree x = build_global_decl ("x", integer_type_node);
7245 tree y = build_global_decl ("y", integer_type_node);
7246
7247 test_region_model_context ctxt;
7248 region_model model;
7249 model.set_to_new_unknown_value (model.get_lvalue (x, &ctxt), TREE_TYPE (x),
7250 &ctxt);
7251 model.set_to_new_unknown_value (model.get_lvalue (x, &ctxt), TREE_TYPE (x),
7252 &ctxt);
7253 model.set_to_new_unknown_value (model.get_lvalue (x, &ctxt), TREE_TYPE (x),
7254 &ctxt);
7255 model.add_constraint (x, NE_EXPR, int_42, &ctxt);
7256
7257 model.set_value (model.get_lvalue (x, &ctxt),
7258 model.get_rvalue (int_42, &ctxt),
7259 &ctxt);
7260 model.add_constraint (y, GT_EXPR, int_0, &ctxt);
7261
7262 /* The redundant unknown values should have been purged. */
7263 purge_stats purged;
7264 model.purge_unused_svalues (&purged, NULL);
7265 ASSERT_EQ (purged.m_num_svalues, 3);
7266
7267 /* and the redundant constraint on an old, unknown value for x should
7268 have been purged. */
7269 ASSERT_EQ (purged.m_num_equiv_classes, 1);
7270 ASSERT_EQ (purged.m_num_constraints, 1);
7271 ASSERT_EQ (model.get_constraints ()->m_constraints.length (), 2);
7272
7273 /* ...but we should still have x == 42. */
7274 ASSERT_EQ (model.eval_condition (x, EQ_EXPR, int_42, &ctxt),
7275 tristate::TS_TRUE);
7276
7277 /* ...and we should still have the constraint on y. */
7278 ASSERT_EQ (model.eval_condition (y, GT_EXPR, int_0, &ctxt),
7279 tristate::TS_TRUE);
7280
7281 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
7282}
7283
7284/* Verify that simple assignments work as expected. */
7285
7286static void
7287test_assignment ()
7288{
7289 tree int_0 = build_int_cst (integer_type_node, 0);
7290 tree x = build_global_decl ("x", integer_type_node);
7291 tree y = build_global_decl ("y", integer_type_node);
7292
7293 /* "x == 0", then use of y, then "y = 0;". */
7294 region_model model;
7295 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
7296 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
7297 model.set_value (model.get_lvalue (y, NULL),
7298 model.get_rvalue (int_0, NULL),
7299 NULL);
7300 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
7301 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
7302
7303 ASSERT_DUMP_EQ (model, true, "y: 0, {x}: unknown, x == y");
7304}
7305
7306/* Verify the details of pushing and popping stack frames. */
7307
7308static void
7309test_stack_frames ()
7310{
7311 tree int_42 = build_int_cst (integer_type_node, 42);
7312 tree int_10 = build_int_cst (integer_type_node, 10);
7313 tree int_5 = build_int_cst (integer_type_node, 5);
7314 tree int_0 = build_int_cst (integer_type_node, 0);
7315
7316 auto_vec <tree> param_types;
7317 tree parent_fndecl = make_fndecl (integer_type_node,
7318 "parent_fn",
7319 param_types);
7320 allocate_struct_function (parent_fndecl, true);
7321
7322 tree child_fndecl = make_fndecl (integer_type_node,
7323 "child_fn",
7324 param_types);
7325 allocate_struct_function (child_fndecl, true);
7326
7327 /* "a" and "b" in the parent frame. */
7328 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7329 get_identifier ("a"),
7330 integer_type_node);
7331 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7332 get_identifier ("b"),
7333 integer_type_node);
7334 /* "x" and "y" in a child frame. */
7335 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7336 get_identifier ("x"),
7337 integer_type_node);
7338 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7339 get_identifier ("y"),
7340 integer_type_node);
7341
7342 /* "p" global. */
7343 tree p = build_global_decl ("p", ptr_type_node);
7344
7345 /* "q" global. */
7346 tree q = build_global_decl ("q", ptr_type_node);
7347
7348 test_region_model_context ctxt;
7349 region_model model;
7350
7351 /* Push stack frame for "parent_fn". */
7352 region_id parent_frame_rid
7353 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl), NULL, &ctxt);
7354 ASSERT_EQ (model.get_current_frame_id (), parent_frame_rid);
7355 region_id a_in_parent_rid = model.get_lvalue (a, &ctxt);
7356 model.set_value (a_in_parent_rid, model.get_rvalue (int_42, &ctxt), &ctxt);
7357 model.set_to_new_unknown_value (model.get_lvalue (b, &ctxt),
7358 integer_type_node, &ctxt);
7359 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
7360 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
7361 tristate (tristate::TS_TRUE));
7362
7363 /* Push stack frame for "child_fn". */
7364 region_id child_frame_rid
7365 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
7366 ASSERT_EQ (model.get_current_frame_id (), child_frame_rid);
7367 region_id x_in_child_rid = model.get_lvalue (x, &ctxt);
7368 model.set_value (x_in_child_rid, model.get_rvalue (int_0, &ctxt), &ctxt);
7369 model.set_to_new_unknown_value (model.get_lvalue (y, &ctxt),
7370 integer_type_node, &ctxt);
7371 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
7372 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
7373 tristate (tristate::TS_TRUE));
7374
7375 /* Point a global pointer at a local in the child frame: p = &x. */
7376 region_id p_in_globals_rid = model.get_lvalue (p, &ctxt);
7377 model.set_value (p_in_globals_rid,
7378 model.get_or_create_ptr_svalue (ptr_type_node,
7379 x_in_child_rid),
7380 &ctxt);
7381
7382 /* Point another global pointer at p: q = &p. */
7383 region_id q_in_globals_rid = model.get_lvalue (q, &ctxt);
7384 model.set_value (q_in_globals_rid,
7385 model.get_or_create_ptr_svalue (ptr_type_node,
7386 p_in_globals_rid),
7387 &ctxt);
7388
7389 /* Test get_descendents. */
7390 region_id_set descendents (&model);
7391 model.get_descendents (child_frame_rid, &descendents, region_id::null ());
7392 ASSERT_TRUE (descendents.region_p (child_frame_rid));
7393 ASSERT_TRUE (descendents.region_p (x_in_child_rid));
7394 ASSERT_FALSE (descendents.region_p (a_in_parent_rid));
7395 ASSERT_EQ (descendents.num_regions (), 3);
7396#if 0
7397 auto_vec<region_id> test_vec;
7398 for (region_id_set::iterator_t iter = descendents.begin ();
7399 iter != descendents.end ();
7400 ++iter)
7401 test_vec.safe_push (*iter);
7402 gcc_unreachable (); // TODO
7403 //ASSERT_EQ ();
7404#endif
7405
7406 ASSERT_DUMP_EQ (model, true,
7407 "x: 0, {y}: unknown, p: &x, q: &p, b < 10, y != 5");
7408
7409 /* Pop the "child_fn" frame from the stack. */
7410 purge_stats purged;
7411 model.pop_frame (true, &purged, &ctxt);
7412
7413 /* We should have purged the unknown values for x and y. */
7414 ASSERT_EQ (purged.m_num_svalues, 2);
7415
7416 /* We should have purged the frame region and the regions for x and y. */
7417 ASSERT_EQ (purged.m_num_regions, 3);
7418
7419 /* We should have purged the constraint on y. */
7420 ASSERT_EQ (purged.m_num_equiv_classes, 1);
7421 ASSERT_EQ (purged.m_num_constraints, 1);
7422
7423 /* Verify that p (which was pointing at the local "x" in the popped
7424 frame) has been poisoned. */
7425 svalue *new_p_sval = model.get_svalue (model.get_rvalue (p, &ctxt));
7426 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
7427 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
7428 POISON_KIND_POPPED_STACK);
7429
7430 /* Verify that q still points to p, in spite of the region
7431 renumbering. */
7432 svalue *new_q_sval = model.get_svalue (model.get_rvalue (q, &ctxt));
7433 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
7434 ASSERT_EQ (new_q_sval->dyn_cast_region_svalue ()->get_pointee (),
7435 model.get_lvalue (p, &ctxt));
7436
7437 /* Verify that top of stack has been updated. */
7438 ASSERT_EQ (model.get_current_frame_id (), parent_frame_rid);
7439
7440 /* Verify locals in parent frame. */
7441 /* Verify "a" still has its value. */
7442 svalue *new_a_sval = model.get_svalue (model.get_rvalue (a, &ctxt));
7443 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
7444 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
7445 int_42);
7446 /* Verify "b" still has its constraint. */
7447 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
7448 tristate (tristate::TS_TRUE));
7449}
7450
7451/* Verify that get_representative_path_var works as expected, that
7452 we can map from region ids to parms and back within a recursive call
7453 stack. */
7454
7455static void
7456test_get_representative_path_var ()
7457{
7458 auto_vec <tree> param_types;
7459 tree fndecl = make_fndecl (integer_type_node,
7460 "factorial",
7461 param_types);
7462 allocate_struct_function (fndecl, true);
7463
7464 /* Parm "n". */
7465 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7466 get_identifier ("n"),
7467 integer_type_node);
7468
7469 region_model model;
7470
7471 /* Push 5 stack frames for "factorial", each with a param */
7472 auto_vec<region_id> parm_rids;
7473 auto_vec<svalue_id> parm_sids;
7474 for (int depth = 0; depth < 5; depth++)
7475 {
7476 region_id frame_rid
7477 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, NULL);
7478 region_id rid_n = model.get_lvalue (path_var (n, depth), NULL);
7479 parm_rids.safe_push (rid_n);
7480
7481 ASSERT_EQ (model.get_region (rid_n)->get_parent (), frame_rid);
7482
7483 svalue_id sid_n
7484 = model.set_to_new_unknown_value (rid_n, integer_type_node, NULL);
7485 parm_sids.safe_push (sid_n);
7486 }
7487
7488 /* Verify that we can recognize that the regions are the parms,
7489 at every depth. */
7490 for (int depth = 0; depth < 5; depth++)
7491 {
7492 ASSERT_EQ (model.get_representative_path_var (parm_rids[depth]),
7493 path_var (n, depth));
7494 /* ...and that we can lookup lvalues for locals for all frames,
7495 not just the top. */
7496 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
7497 parm_rids[depth]);
7498 /* ...and that we can locate the svalues. */
7499 auto_vec<path_var> pvs;
7500 model.get_path_vars_for_svalue (parm_sids[depth], &pvs);
7501 ASSERT_EQ (pvs.length (), 1);
7502 ASSERT_EQ (pvs[0], path_var (n, depth));
7503 }
7504}
7505
7506/* Verify that the core regions within a region_model are in a consistent
7507 order after canonicalization. */
7508
7509static void
7510test_canonicalization_1 ()
7511{
7512 region_model model0;
7513 model0.get_root_region ()->ensure_stack_region (&model0);
7514 model0.get_root_region ()->ensure_globals_region (&model0);
7515
7516 region_model model1;
7517 model1.get_root_region ()->ensure_globals_region (&model1);
7518 model1.get_root_region ()->ensure_stack_region (&model1);
7519
7520 model0.canonicalize (NULL);
7521 model1.canonicalize (NULL);
7522 ASSERT_EQ (model0, model1);
7523}
7524
7525/* Verify that region models for
7526 x = 42; y = 113;
7527 and
7528 y = 113; x = 42;
7529 are equal after canonicalization. */
7530
7531static void
7532test_canonicalization_2 ()
7533{
7534 tree int_42 = build_int_cst (integer_type_node, 42);
7535 tree int_113 = build_int_cst (integer_type_node, 113);
7536 tree x = build_global_decl ("x", integer_type_node);
7537 tree y = build_global_decl ("y", integer_type_node);
7538
7539 region_model model0;
7540 model0.set_value (model0.get_lvalue (x, NULL),
7541 model0.get_rvalue (int_42, NULL),
7542 NULL);
7543 model0.set_value (model0.get_lvalue (y, NULL),
7544 model0.get_rvalue (int_113, NULL),
7545 NULL);
7546
7547 region_model model1;
7548 model1.set_value (model1.get_lvalue (y, NULL),
7549 model1.get_rvalue (int_113, NULL),
7550 NULL);
7551 model1.set_value (model1.get_lvalue (x, NULL),
7552 model1.get_rvalue (int_42, NULL),
7553 NULL);
7554
7555 model0.canonicalize (NULL);
7556 model1.canonicalize (NULL);
7557 ASSERT_EQ (model0, model1);
7558}
7559
7560/* Verify that constraints for
7561 x > 3 && y > 42
7562 and
7563 y > 42 && x > 3
7564 are equal after canonicalization. */
7565
7566static void
7567test_canonicalization_3 ()
7568{
7569 tree int_3 = build_int_cst (integer_type_node, 3);
7570 tree int_42 = build_int_cst (integer_type_node, 42);
7571 tree x = build_global_decl ("x", integer_type_node);
7572 tree y = build_global_decl ("y", integer_type_node);
7573
7574 region_model model0;
7575 model0.add_constraint (x, GT_EXPR, int_3, NULL);
7576 model0.add_constraint (y, GT_EXPR, int_42, NULL);
7577
7578 region_model model1;
7579 model1.add_constraint (y, GT_EXPR, int_42, NULL);
7580 model1.add_constraint (x, GT_EXPR, int_3, NULL);
7581
7582 model0.canonicalize (NULL);
7583 model1.canonicalize (NULL);
7584 ASSERT_EQ (model0, model1);
7585}
7586
7587/* Assert that if we have two region_model instances
7588 with values VAL_A and VAL_B for EXPR that they are
7589 mergable. Write the merged model to *OUT_MERGED_MODEL,
7590 and the merged svalue ptr to *OUT_MERGED_SVALUE.
7591 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
7592 for that region_model. */
7593
7594static void
7595assert_region_models_merge (tree expr, tree val_a, tree val_b,
7596 region_model *out_merged_model,
7597 svalue **out_merged_svalue)
7598{
7599 test_region_model_context ctxt;
7600 region_model model0;
7601 region_model model1;
7602 if (val_a)
7603 model0.set_value (model0.get_lvalue (expr, &ctxt),
7604 model0.get_rvalue (val_a, &ctxt),
7605 &ctxt);
7606 if (val_b)
7607 model1.set_value (model1.get_lvalue (expr, &ctxt),
7608 model1.get_rvalue (val_b, &ctxt),
7609 &ctxt);
7610
7611 /* They should be mergeable. */
7612 ASSERT_TRUE (model0.can_merge_with_p (model1, out_merged_model));
7613
7614 svalue_id merged_svalue_sid = out_merged_model->get_rvalue (expr, &ctxt);
7615 *out_merged_svalue = out_merged_model->get_svalue (merged_svalue_sid);
7616}
7617
7618/* Verify that we can merge region_model instances. */
7619
7620static void
7621test_state_merging ()
7622{
7623 tree int_42 = build_int_cst (integer_type_node, 42);
7624 tree int_113 = build_int_cst (integer_type_node, 113);
7625 tree x = build_global_decl ("x", integer_type_node);
7626 tree y = build_global_decl ("y", integer_type_node);
7627 tree z = build_global_decl ("z", integer_type_node);
7628 tree p = build_global_decl ("p", ptr_type_node);
7629
7630 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
7631 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
7632
7633 auto_vec <tree> param_types;
7634 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
7635 allocate_struct_function (test_fndecl, true);
7636
7637 /* Param "a". */
7638 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7639 get_identifier ("a"),
7640 integer_type_node);
7641 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
7642
7643 {
7644 region_model model0;
7645 region_model model1;
7646 region_model merged;
7647 /* Verify empty models can be merged. */
7648 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
7649 ASSERT_EQ (model0, merged);
7650 }
7651
7652 /* Verify that we can merge two contradictory constraints on the
7653 value for a global. */
7654 /* TODO: verify that the merged model doesn't have a value for
7655 the global */
7656 {
7657 region_model model0;
7658 region_model model1;
7659 region_model merged;
7660 test_region_model_context ctxt;
7661 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7662 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
7663 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
7664 ASSERT_NE (model0, merged);
7665 ASSERT_NE (model1, merged);
7666 }
7667
7668 /* Verify handling of a PARM_DECL. */
7669 {
7670 test_region_model_context ctxt;
7671 region_model model0;
7672 region_model model1;
7673 ASSERT_EQ (model0.get_stack_depth (), 0);
7674 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7675 ASSERT_EQ (model0.get_stack_depth (), 1);
7676 ASSERT_EQ (model0.get_function_at_depth (0),
7677 DECL_STRUCT_FUNCTION (test_fndecl));
7678 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7679
7680 svalue_id sid_a
7681 = model0.set_to_new_unknown_value (model0.get_lvalue (a, &ctxt),
7682 integer_type_node, &ctxt);
7683 model1.set_to_new_unknown_value (model1.get_lvalue (a, &ctxt),
7684 integer_type_node, &ctxt);
7685 ASSERT_EQ (model0, model1);
7686
7687 /* Check that get_value_by_name works for locals. */
7688 ASSERT_EQ (model0.get_value_by_name ("a"), sid_a);
7689
7690 /* They should be mergeable, and the result should be the same. */
7691 region_model merged;
7692 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
7693 ASSERT_EQ (model0, merged);
7694 /* In particular, there should be an unknown value for "a". */
7695 svalue *merged_a_sval = merged.get_svalue (merged.get_rvalue (a, &ctxt));
7696 ASSERT_EQ (merged_a_sval->get_kind (), SK_UNKNOWN);
7697 }
7698
7699 /* Verify handling of a global. */
7700 {
7701 test_region_model_context ctxt;
7702 region_model model0;
7703 region_model model1;
7704 svalue_id sid_x
7705 = model0.set_to_new_unknown_value (model0.get_lvalue (x, &ctxt),
7706 integer_type_node, &ctxt);
7707 model1.set_to_new_unknown_value (model1.get_lvalue (x, &ctxt),
7708 integer_type_node, &ctxt);
7709 ASSERT_EQ (model0, model1);
7710
7711 /* Check that get_value_by_name works for globals. */
7712 ASSERT_EQ (model0.get_value_by_name ("x"), sid_x);
7713
7714 /* They should be mergeable, and the result should be the same. */
7715 region_model merged;
7716 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
7717 ASSERT_EQ (model0, merged);
7718 /* In particular, there should be an unknown value for "x". */
7719 svalue *merged_x_sval = merged.get_svalue (merged.get_rvalue (x, &ctxt));
7720 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7721 }
7722
7723 /* Use global-handling to verify various combinations of values. */
7724
7725 /* Two equal constant values. */
7726 {
7727 region_model merged;
7728 svalue *merged_x_sval;
7729 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
7730
7731 /* In particular, there should be a constant value for "x". */
7732 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
7733 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
7734 int_42);
7735 }
7736
7737 /* Two non-equal constant values. */
7738 {
7739 region_model merged;
7740 svalue *merged_x_sval;
7741 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
7742
7743 /* In particular, there should be an unknown value for "x". */
7744 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7745 }
7746
7747 /* Uninit and constant. */
7748 {
7749 region_model merged;
7750 svalue *merged_x_sval;
7751 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
7752
7753 /* In particular, there should be an unknown value for "x". */
7754 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7755 }
7756
7757 /* Constant and uninit. */
7758 {
7759 region_model merged;
7760 svalue *merged_x_sval;
7761 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
7762
7763 /* In particular, there should be an unknown value for "x". */
7764 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7765 }
7766
7767 /* Unknown and constant. */
7768 // TODO
7769
7770 /* Pointers: NULL and NULL. */
7771 // TODO
7772
7773 /* Pointers: NULL and non-NULL. */
7774 // TODO
7775
7776 /* Pointers: non-NULL and non-NULL: ptr to a local. */
7777 {
7778 region_model model0;
7779 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7780 model0.set_to_new_unknown_value (model0.get_lvalue (a, NULL),
7781 integer_type_node, NULL);
7782 model0.set_value (model0.get_lvalue (p, NULL),
7783 model0.get_rvalue (addr_of_a, NULL), NULL);
7784
7785 region_model model1 (model0);
7786 ASSERT_EQ (model0, model1);
7787
7788 /* They should be mergeable, and the result should be the same. */
7789 region_model merged;
7790 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
7791 ASSERT_EQ (model0, merged);
7792 }
7793
7794 /* Pointers: non-NULL and non-NULL: ptr to a global. */
7795 {
7796 region_model merged;
7797 /* p == &y in both input models. */
7798 svalue *merged_p_sval;
7799 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
7800 &merged_p_sval);
7801
7802 /* We should get p == &y in the merged model. */
7803 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
7804 region_svalue *merged_p_ptr = merged_p_sval->dyn_cast_region_svalue ();
7805 region_id merged_p_star_rid = merged_p_ptr->get_pointee ();
7806 ASSERT_EQ (merged_p_star_rid, merged.get_lvalue (y, NULL));
7807 }
7808
7809 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
7810 {
7811 region_model merged;
7812 /* x == &y vs x == &z in the input models. */
7813 svalue *merged_x_sval;
7814 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
7815 &merged_x_sval);
7816
7817 /* We should get x == unknown in the merged model. */
7818 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7819 }
7820
7821 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
7822 {
7823 test_region_model_context ctxt;
7824 region_model model0;
7825 region_id new_rid = model0.add_new_malloc_region ();
7826 svalue_id ptr_sid
7827 = model0.get_or_create_ptr_svalue (ptr_type_node, new_rid);
7828 model0.set_value (model0.get_lvalue (p, &ctxt),
7829 ptr_sid, &ctxt);
7830 model0.canonicalize (&ctxt);
7831
7832 region_model model1 (model0);
7833
7834 ASSERT_EQ (model0, model1);
7835
7836 region_model merged;
7837 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
7838
7839 merged.canonicalize (&ctxt);
7840
7841 /* The merged model ought to be identical (after canonicalization,
7842 at least). */
7843 ASSERT_EQ (model0, merged);
7844 }
7845
7846 /* Two regions sharing the same unknown svalue should continue sharing
7847 an unknown svalue after self-merger. */
7848 {
7849 test_region_model_context ctxt;
7850 region_model model0;
7851 svalue_id sid
7852 = model0.set_to_new_unknown_value (model0.get_lvalue (x, &ctxt),
7853 integer_type_node, &ctxt);
7854 model0.set_value (model0.get_lvalue (y, &ctxt), sid, &ctxt);
7855 region_model model1 (model0);
7856
7857 /* They should be mergeable, and the result should be the same. */
7858 region_model merged;
7859 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
7860 ASSERT_EQ (model0, merged);
7861
7862 /* In particular, we should have x == y. */
7863 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
7864 tristate (tristate::TS_TRUE));
7865 }
7866
7867#if 0
7868 {
7869 region_model model0;
7870 region_model model1;
7871 test_region_model_context ctxt;
7872 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7873 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
7874 ASSERT_TRUE (model0.can_merge_with_p (model1));
7875 }
7876
7877 {
7878 region_model model0;
7879 region_model model1;
7880 test_region_model_context ctxt;
7881 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7882 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
7883 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
7884 ASSERT_TRUE (model0.can_merge_with_p (model1));
7885 }
7886#endif
7887
7888 // TODO: what can't we merge? need at least one such test
7889
7890 /* TODO: various things
7891 - heap regions
7892 - value merging:
7893 - every combination, but in particular
7894 - pairs of regions
7895 */
7896
7897 /* Views. */
7898 {
7899 test_region_model_context ctxt;
7900 region_model model0;
7901
7902 region_id x_rid = model0.get_lvalue (x, &ctxt);
7903 region_id x_as_ptr = model0.get_or_create_view (x_rid, ptr_type_node);
7904 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
7905
7906 region_model model1 (model0);
7907 ASSERT_EQ (model1, model0);
7908
7909 /* They should be mergeable, and the result should be the same. */
7910 region_model merged;
7911 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
7912 }
7913}
7914
7915/* Verify that constraints are correctly merged when merging region_model
7916 instances. */
7917
7918static void
7919test_constraint_merging ()
7920{
7921 tree int_0 = build_int_cst (integer_type_node, 0);
7922 tree int_5 = build_int_cst (integer_type_node, 5);
7923 tree x = build_global_decl ("x", integer_type_node);
7924 tree y = build_global_decl ("y", integer_type_node);
7925 tree z = build_global_decl ("z", integer_type_node);
7926 tree n = build_global_decl ("n", integer_type_node);
7927
7928 test_region_model_context ctxt;
7929
7930 /* model0: 0 <= (x == y) < n. */
7931 region_model model0;
7932 model0.set_to_new_unknown_value (model0.get_lvalue (x, &ctxt),
7933 integer_type_node, &ctxt);
7934 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
7935 model0.add_constraint (x, GE_EXPR, int_0, NULL);
7936 model0.add_constraint (x, LT_EXPR, n, NULL);
7937
7938 /* model1: z != 5 && (0 <= x < n). */
7939 region_model model1;
7940 model1.set_to_new_unknown_value (model1.get_lvalue (x, &ctxt),
7941 integer_type_node, &ctxt);
7942 model1.add_constraint (z, NE_EXPR, int_5, NULL);
7943 model1.add_constraint (x, GE_EXPR, int_0, NULL);
7944 model1.add_constraint (x, LT_EXPR, n, NULL);
7945
7946 /* They should be mergeable; the merged constraints should
7947 be: (0 <= x < n). */
7948 region_model merged;
7949 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
7950
7951 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
7952 tristate (tristate::TS_TRUE));
7953 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
7954 tristate (tristate::TS_TRUE));
7955
7956 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
7957 tristate (tristate::TS_UNKNOWN));
7958 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
7959 tristate (tristate::TS_UNKNOWN));
7960}
7961
7962/* Run all of the selftests within this file. */
7963
7964void
7965analyzer_region_model_cc_tests ()
7966{
7967 test_dump ();
7968 test_unique_constants ();
7969 test_svalue_equality ();
7970 test_region_equality ();
7971 test_purging_by_criteria ();
7972 test_purge_unused_svalues ();
7973 test_assignment ();
7974 test_stack_frames ();
7975 test_get_representative_path_var ();
7976 test_canonicalization_1 ();
7977 test_canonicalization_2 ();
7978 test_canonicalization_3 ();
7979 test_state_merging ();
7980 test_constraint_merging ();
7981}
7982
7983} // namespace selftest
7984
7985#endif /* CHECKING_P */
7986
7987#endif /* #if ENABLE_ANALYZER */