]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/analyzer/region-model.cc
analyzer: fix ICEs in region_model::get_lvalue_1 [PR 93388]
[thirdparty/gcc.git] / gcc / analyzer / region-model.cc
1 /* Classes for modeling the state of memory.
2 Copyright (C) 2019-2020 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tree.h"
25 #include "function.h"
26 #include "basic-block.h"
27 #include "gimple.h"
28 #include "gimple-iterator.h"
29 #include "diagnostic-core.h"
30 #include "graphviz.h"
31 #include "options.h"
32 #include "cgraph.h"
33 #include "tree-dfa.h"
34 #include "stringpool.h"
35 #include "convert.h"
36 #include "target.h"
37 #include "fold-const.h"
38 #include "tree-pretty-print.h"
39 #include "diagnostic-color.h"
40 #include "diagnostic-metadata.h"
41 #include "tristate.h"
42 #include "bitmap.h"
43 #include "selftest.h"
44 #include "function.h"
45 #include "analyzer/analyzer.h"
46 #include "analyzer/analyzer-logging.h"
47 #include "ordered-hash-map.h"
48 #include "options.h"
49 #include "cgraph.h"
50 #include "cfg.h"
51 #include "digraph.h"
52 #include "analyzer/supergraph.h"
53 #include "sbitmap.h"
54 #include "analyzer/region-model.h"
55 #include "analyzer/constraint-manager.h"
56 #include "diagnostic-event-id.h"
57 #include "analyzer/sm.h"
58 #include "diagnostic-event-id.h"
59 #include "analyzer/sm.h"
60 #include "analyzer/pending-diagnostic.h"
61 #include "analyzer/analyzer-selftests.h"
62
63 #if ENABLE_ANALYZER
64
65 namespace ana {
66
67 /* Dump T to PP in language-independent form, for debugging/logging/dumping
68 purposes. */
69
70 static void
71 dump_tree (pretty_printer *pp, tree t)
72 {
73 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
74 }
75
76 /* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
77 calls within other pp_printf calls.
78
79 default_tree_printer handles 'T' and some other codes by calling
80 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
81 dump_generic_node calls pp_printf in various places, leading to
82 garbled output.
83
84 Ideally pp_printf could be made to be reentrant, but in the meantime
85 this function provides a workaround. */
86
87 static void
88 print_quoted_type (pretty_printer *pp, tree t)
89 {
90 pp_begin_quote (pp, pp_show_color (pp));
91 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
92 pp_end_quote (pp, pp_show_color (pp));
93 }
94
95 /* Dump this path_var to PP (which must support %E for trees).
96
97 Express the stack depth using an "@DEPTH" suffix, so e.g. given
98 void foo (int j);
99 void bar (int i)
100 {
101 foo (i);
102 }
103 then:
104 - the "i" in "bar" would be "(i @ 0)"
105 - the "j" in "foo" would be "(j @ 1)". */
106
107 void
108 path_var::dump (pretty_printer *pp) const
109 {
110 if (m_tree == NULL_TREE)
111 pp_string (pp, "NULL");
112 if (CONSTANT_CLASS_P (m_tree))
113 pp_printf (pp, "%qE", m_tree);
114 else
115 pp_printf (pp, "(%qE @ %i)", m_tree, m_stack_depth);
116 }
117
118 /* For use in printing a comma-separated list. */
119
120 static void
121 dump_separator (pretty_printer *pp, bool *is_first)
122 {
123 if (!*is_first)
124 pp_string (pp, ", ");
125 *is_first = false;
126 }
127
128 /* Concrete subclass of constraint_manager that wires it up to a region_model
129 (whilst allowing the constraint_manager and region_model to be somewhat
130 at arms length).
131 TODO: revisit this; maybe put the region_model * into the constraint_manager
132 base class. */
133
134 class impl_constraint_manager : public constraint_manager
135 {
136 public:
137 impl_constraint_manager (region_model *model)
138 : constraint_manager (),
139 m_model (model)
140 {}
141
142 impl_constraint_manager (const impl_constraint_manager &other,
143 region_model *model)
144 : constraint_manager (other),
145 m_model (model)
146 {}
147
148 constraint_manager *clone (region_model *model) const
149 {
150 return new impl_constraint_manager (*this, model);
151 }
152
153 tree maybe_get_constant (svalue_id sid) const FINAL OVERRIDE
154 {
155 svalue *svalue = m_model->get_svalue (sid);
156 return svalue->maybe_get_constant ();
157 }
158
159 svalue_id get_sid_for_constant (tree cst) const FINAL OVERRIDE
160 {
161 gcc_assert (CONSTANT_CLASS_P (cst));
162 return m_model->get_rvalue (cst, NULL);
163 }
164
165 int get_num_svalues () const FINAL OVERRIDE
166 {
167 return m_model->get_num_svalues ();
168 }
169
170 private:
171 region_model *m_model;
172 };
173
174 /* class svalue_id. */
175
176 /* Print this svalue_id to PP. */
177
178 void
179 svalue_id::print (pretty_printer *pp) const
180 {
181 if (null_p ())
182 pp_printf (pp, "null");
183 else
184 pp_printf (pp, "sv%i", m_idx);
185 }
186
187 /* Print this svalue_id in .dot format to PP. */
188
189 void
190 svalue_id::dump_node_name_to_pp (pretty_printer *pp) const
191 {
192 gcc_assert (!null_p ());
193 pp_printf (pp, "svalue_%i", m_idx);
194 }
195
196 /* Assert that this object is valid (w.r.t. MODEL). */
197
198 void
199 svalue_id::validate (const region_model &model) const
200 {
201 gcc_assert (null_p () || m_idx < (int)model.get_num_svalues ());
202 }
203
204 /* class region_id. */
205
206 /* Print this region_id to PP. */
207
208 void
209 region_id::print (pretty_printer *pp) const
210 {
211 if (null_p ())
212 pp_printf (pp, "null");
213 else
214 pp_printf (pp, "r%i", m_idx);
215 }
216
217 /* Print this region_id in .dot format to PP. */
218
219 void
220 region_id::dump_node_name_to_pp (pretty_printer *pp) const
221 {
222 gcc_assert (!null_p ());
223 pp_printf (pp, "region_%i", m_idx);
224 }
225
226 /* Assert that this object is valid (w.r.t. MODEL). */
227
228 void
229 region_id::validate (const region_model &model) const
230 {
231 gcc_assert (null_p () || m_idx < (int)model.get_num_regions ());
232 }
233
234 /* class id_set. */
235
236 /* id_set<region_id>'s ctor. */
237
238 template<>
239 id_set<region_id>::id_set (const region_model *model)
240 : m_bitmap (model->get_num_regions ())
241 {
242 bitmap_clear (m_bitmap);
243 }
244
245 /* class svalue and its various subclasses. */
246
247 /* class svalue. */
248
249 /* svalue's equality operator. Most of the work is done by the
250 a "compare_fields" implementation on each subclass. */
251
252 bool
253 svalue::operator== (const svalue &other) const
254 {
255 enum svalue_kind this_kind = get_kind ();
256 enum svalue_kind other_kind = other.get_kind ();
257 if (this_kind != other_kind)
258 return false;
259
260 if (m_type != other.m_type)
261 return false;
262
263 switch (this_kind)
264 {
265 default:
266 gcc_unreachable ();
267 case SK_REGION:
268 {
269 const region_svalue &this_sub
270 = (const region_svalue &)*this;
271 const region_svalue &other_sub
272 = (const region_svalue &)other;
273 return this_sub.compare_fields (other_sub);
274 }
275 break;
276 case SK_CONSTANT:
277 {
278 const constant_svalue &this_sub
279 = (const constant_svalue &)*this;
280 const constant_svalue &other_sub
281 = (const constant_svalue &)other;
282 return this_sub.compare_fields (other_sub);
283 }
284 break;
285 case SK_UNKNOWN:
286 {
287 const unknown_svalue &this_sub
288 = (const unknown_svalue &)*this;
289 const unknown_svalue &other_sub
290 = (const unknown_svalue &)other;
291 return this_sub.compare_fields (other_sub);
292 }
293 break;
294 case SK_POISONED:
295 {
296 const poisoned_svalue &this_sub
297 = (const poisoned_svalue &)*this;
298 const poisoned_svalue &other_sub
299 = (const poisoned_svalue &)other;
300 return this_sub.compare_fields (other_sub);
301 }
302 break;
303 case SK_SETJMP:
304 {
305 const setjmp_svalue &this_sub
306 = (const setjmp_svalue &)*this;
307 const setjmp_svalue &other_sub
308 = (const setjmp_svalue &)other;
309 return this_sub.compare_fields (other_sub);
310 }
311 break;
312 }
313 }
314
315 /* Generate a hash value for this svalue. Most of the work is done by the
316 add_to_hash vfunc. */
317
318 hashval_t
319 svalue::hash () const
320 {
321 inchash::hash hstate;
322 if (m_type)
323 hstate.add_int (TYPE_UID (m_type));
324 add_to_hash (hstate);
325 return hstate.end ();
326 }
327
328 /* Print this svalue and its ID to PP. */
329
330 void
331 svalue::print (const region_model &model,
332 svalue_id this_sid,
333 pretty_printer *pp) const
334 {
335 this_sid.print (pp);
336 pp_string (pp, ": {");
337
338 if (m_type)
339 {
340 gcc_assert (TYPE_P (m_type));
341 pp_string (pp, "type: ");
342 print_quoted_type (pp, m_type);
343 pp_string (pp, ", ");
344 }
345
346 /* vfunc. */
347 print_details (model, this_sid, pp);
348
349 pp_string (pp, "}");
350 }
351
352 /* Dump this svalue in the form of a .dot record to PP. */
353
354 void
355 svalue::dump_dot_to_pp (const region_model &model,
356 svalue_id this_sid,
357 pretty_printer *pp) const
358 {
359 this_sid.dump_node_name_to_pp (pp);
360 pp_printf (pp, " [label=\"");
361 pp_write_text_to_stream (pp);
362 this_sid.print (pp);
363 pp_string (pp, ": {");
364 print (model, this_sid, pp);
365 pp_write_text_as_dot_label_to_stream (pp, /*for_record=*/false);
366 pp_string (pp, "}\"];");
367 pp_newline (pp);
368 }
369
370 /* Base implementation of svalue::remap_region_ids vfunc. */
371
372 void
373 svalue::remap_region_ids (const region_id_map &)
374 {
375 /* Empty. */
376 }
377
378 /* Base implementation of svalue::walk_for_canonicalization vfunc. */
379
380 void
381 svalue::walk_for_canonicalization (canonicalization *) const
382 {
383 /* Empty. */
384 }
385
386 /* Base implementation of svalue::get_child_sid vfunc. */
387
388 svalue_id
389 svalue::get_child_sid (region *parent ATTRIBUTE_UNUSED,
390 region *child,
391 region_model &model,
392 region_model_context *ctxt ATTRIBUTE_UNUSED)
393 {
394 svalue *new_child_value = clone ();
395 if (child->get_type ())
396 new_child_value->m_type = child->get_type ();
397 svalue_id new_child_sid = model.add_svalue (new_child_value);
398 return new_child_sid;
399 }
400
401 /* If this svalue is a constant_svalue, return the underlying tree constant.
402 Otherwise return NULL_TREE. */
403
404 tree
405 svalue::maybe_get_constant () const
406 {
407 if (const constant_svalue *cst_sval = dyn_cast_constant_svalue ())
408 return cst_sval->get_constant ();
409 else
410 return NULL_TREE;
411 }
412
413 /* class region_svalue : public svalue. */
414
415 /* Compare the fields of this region_svalue with OTHER, returning true
416 if they are equal.
417 For use by svalue::operator==. */
418
419 bool
420 region_svalue::compare_fields (const region_svalue &other) const
421 {
422 return m_rid == other.m_rid;
423 }
424
425 /* Implementation of svalue::add_to_hash vfunc for region_svalue. */
426
427 void
428 region_svalue::add_to_hash (inchash::hash &hstate) const
429 {
430 inchash::add (m_rid, hstate);
431 }
432
433 /* Implementation of svalue::print_details vfunc for region_svalue. */
434
435 void
436 region_svalue::print_details (const region_model &model ATTRIBUTE_UNUSED,
437 svalue_id this_sid ATTRIBUTE_UNUSED,
438 pretty_printer *pp) const
439 {
440 if (m_rid.null_p ())
441 pp_string (pp, "NULL");
442 else
443 {
444 pp_string (pp, "&");
445 m_rid.print (pp);
446 }
447 }
448
449 /* Implementation of svalue::dump_dot_to_pp for region_svalue. */
450
451 void
452 region_svalue::dump_dot_to_pp (const region_model &model,
453 svalue_id this_sid,
454 pretty_printer *pp) const
455 {
456 svalue::dump_dot_to_pp (model, this_sid, pp);
457
458 /* If non-NULL, add an edge to the pointed-to region. */
459 if (!m_rid.null_p ())
460 {
461 this_sid.dump_node_name_to_pp (pp);
462 pp_string (pp, " -> ");
463 m_rid.dump_node_name_to_pp (pp);
464 pp_string (pp, ";");
465 pp_newline (pp);
466 }
467 }
468
469 /* Implementation of svalue::remap_region_ids vfunc for region_svalue. */
470
471 void
472 region_svalue::remap_region_ids (const region_id_map &map)
473 {
474 map.update (&m_rid);
475 }
476
477 /* Merge REGION_SVAL_A and REGION_SVAL_B using MERGER, writing the result
478 into *MERGED_SID. */
479
480 void
481 region_svalue::merge_values (const region_svalue &region_sval_a,
482 const region_svalue &region_sval_b,
483 svalue_id *merged_sid,
484 tree type,
485 model_merger *merger)
486 {
487 region_id a_rid = region_sval_a.get_pointee ();
488 region_id b_rid = region_sval_b.get_pointee ();
489
490 /* Both are non-NULL. */
491 gcc_assert (!a_rid.null_p () && !b_rid.null_p ());
492
493 /* Have these ptr-values already been merged? */
494
495 region_id a_rid_in_m
496 = merger->m_map_regions_from_a_to_m.get_dst_for_src (a_rid);
497 region_id b_rid_in_m
498 = merger->m_map_regions_from_b_to_m.get_dst_for_src (b_rid);
499
500 /* "null_p" here means "we haven't seen this ptr-value before".
501 If we've seen one but not the other, or we have different
502 regions, then the merged ptr has to be "unknown". */
503 if (a_rid_in_m != b_rid_in_m)
504 {
505 svalue *merged_sval = new unknown_svalue (type);
506 *merged_sid = merger->m_merged_model->add_svalue (merged_sval);
507 return;
508 }
509
510 /* Have we seen this yet? If so, reuse the value. */
511 if (!a_rid_in_m.null_p ())
512 {
513 *merged_sid
514 = merger->m_merged_model->get_or_create_ptr_svalue (type, a_rid_in_m);
515 return;
516 }
517
518 /* Otherwise we have A/B regions that haven't been referenced yet. */
519
520 /* Are the regions the "same", when seen from the tree point-of-view.
521 If so, create a merged pointer to it. */
522 path_var pv_a = merger->m_model_a->get_representative_path_var (a_rid);
523 path_var pv_b = merger->m_model_b->get_representative_path_var (b_rid);
524 if (pv_a.m_tree
525 && pv_a == pv_b)
526 {
527 region_id merged_pointee_rid
528 = merger->m_merged_model->get_lvalue (pv_a, NULL);
529 *merged_sid
530 = merger->m_merged_model->get_or_create_ptr_svalue (type,
531 merged_pointee_rid);
532 merger->record_regions (a_rid, b_rid, merged_pointee_rid);
533 return;
534 }
535
536 /* Handle an A/B pair of ptrs that both point at heap regions.
537 If they both have a heap region in the merger model, merge them. */
538 region *region_a = merger->m_model_a->get_region (a_rid);
539 region *region_b = merger->m_model_b->get_region (b_rid);
540 region_id a_parent_rid = region_a->get_parent ();
541 region_id b_parent_rid = region_b->get_parent ();
542 region *parent_region_a = merger->m_model_a->get_region (a_parent_rid);
543 region *parent_region_b = merger->m_model_b->get_region (b_parent_rid);
544 if (parent_region_a
545 && parent_region_b
546 && parent_region_a->get_kind () == RK_HEAP
547 && parent_region_b->get_kind () == RK_HEAP)
548 {
549 /* We have an A/B pair of ptrs that both point at heap regions. */
550 /* presumably we want to see if each A/B heap region already
551 has a merged region, and, if so, is it the same one. */
552 // This check is above
553
554 region_id merged_pointee_rid
555 = merger->m_merged_model->add_new_malloc_region ();
556 *merged_sid
557 = merger->m_merged_model->get_or_create_ptr_svalue
558 (type, merged_pointee_rid);
559 merger->record_regions (a_rid, b_rid, merged_pointee_rid);
560 return;
561 }
562
563 /* Two different non-NULL pointers? Merge to unknown. */
564 svalue *merged_sval = new unknown_svalue (type);
565 *merged_sid = merger->m_merged_model->add_svalue (merged_sval);
566 return;
567 }
568
569 /* Implementation of svalue::walk_for_canonicalization vfunc for
570 region_svalue. */
571
572 void
573 region_svalue::walk_for_canonicalization (canonicalization *c) const
574 {
575 c->walk_rid (m_rid);
576 }
577
578 /* Evaluate the condition LHS OP RHS.
579 Subroutine of region_model::eval_condition for when we have a pair of
580 pointers. */
581
582 tristate
583 region_svalue::eval_condition (region_svalue *lhs,
584 enum tree_code op,
585 region_svalue *rhs)
586 {
587 /* See if they point to the same region. */
588 /* TODO: what about child regions where the child is the first child
589 (or descendent)? */
590 region_id lhs_rid = lhs->get_pointee ();
591 region_id rhs_rid = rhs->get_pointee ();
592 switch (op)
593 {
594 default:
595 gcc_unreachable ();
596
597 case EQ_EXPR:
598 if (lhs_rid == rhs_rid)
599 return tristate::TS_TRUE;
600 else
601 return tristate::TS_FALSE;
602 break;
603
604 case NE_EXPR:
605 if (lhs_rid != rhs_rid)
606 return tristate::TS_TRUE;
607 else
608 return tristate::TS_FALSE;
609 break;
610
611 case GE_EXPR:
612 case LE_EXPR:
613 if (lhs_rid == rhs_rid)
614 return tristate::TS_TRUE;
615 break;
616
617 case GT_EXPR:
618 case LT_EXPR:
619 if (lhs_rid == rhs_rid)
620 return tristate::TS_FALSE;
621 break;
622 }
623
624 return tristate::TS_UNKNOWN;
625 }
626
627 /* class constant_svalue : public svalue. */
628
629 /* Compare the fields of this constant_svalue with OTHER, returning true
630 if they are equal.
631 For use by svalue::operator==. */
632
633 bool
634 constant_svalue::compare_fields (const constant_svalue &other) const
635 {
636 return m_cst_expr == other.m_cst_expr;
637 }
638
639 /* Implementation of svalue::add_to_hash vfunc for constant_svalue. */
640
641 void
642 constant_svalue::add_to_hash (inchash::hash &hstate) const
643 {
644 inchash::add_expr (m_cst_expr, hstate);
645 }
646
647 /* Merge the CST_SVAL_A and CST_SVAL_B using MERGER, writing the id of
648 the resulting svalue into *MERGED_SID. */
649
650 void
651 constant_svalue::merge_values (const constant_svalue &cst_sval_a,
652 const constant_svalue &cst_sval_b,
653 svalue_id *merged_sid,
654 model_merger *merger)
655 {
656 tree cst_a = cst_sval_a.get_constant ();
657 tree cst_b = cst_sval_b.get_constant ();
658 svalue *merged_sval;
659 if (cst_a == cst_b)
660 {
661 /* If they are the same constant, merge as that constant value. */
662 merged_sval = new constant_svalue (cst_a);
663 }
664 else
665 {
666 /* Otherwise, we have two different constant values.
667 Merge as an unknown value.
668 TODO: impose constraints on the value?
669 (maybe just based on A, to avoid infinite chains) */
670 merged_sval = new unknown_svalue (TREE_TYPE (cst_a));
671 }
672 *merged_sid = merger->m_merged_model->add_svalue (merged_sval);
673 }
674
675 /* Evaluate the condition LHS OP RHS.
676 Subroutine of region_model::eval_condition for when we have a pair of
677 constants. */
678
679 tristate
680 constant_svalue::eval_condition (constant_svalue *lhs,
681 enum tree_code op,
682 constant_svalue *rhs)
683 {
684 tree lhs_const = lhs->get_constant ();
685 tree rhs_const = rhs->get_constant ();
686
687 gcc_assert (CONSTANT_CLASS_P (lhs_const));
688 gcc_assert (CONSTANT_CLASS_P (rhs_const));
689
690 /* Check for comparable types. */
691 if (types_compatible_p (TREE_TYPE (lhs_const), TREE_TYPE (rhs_const)))
692 {
693 tree comparison
694 = fold_binary (op, boolean_type_node, lhs_const, rhs_const);
695 if (comparison == boolean_true_node)
696 return tristate (tristate::TS_TRUE);
697 if (comparison == boolean_false_node)
698 return tristate (tristate::TS_FALSE);
699 }
700 return tristate::TS_UNKNOWN;
701 }
702
703 /* Implementation of svalue::print_details vfunc for constant_svalue. */
704
705 void
706 constant_svalue::print_details (const region_model &model ATTRIBUTE_UNUSED,
707 svalue_id this_sid ATTRIBUTE_UNUSED,
708 pretty_printer *pp) const
709 {
710 pp_printf (pp, "%qE", m_cst_expr);
711 }
712
713 /* Implementation of svalue::get_child_sid vfunc for constant_svalue. */
714
715 svalue_id
716 constant_svalue::get_child_sid (region *parent ATTRIBUTE_UNUSED,
717 region *child,
718 region_model &model,
719 region_model_context *ctxt ATTRIBUTE_UNUSED)
720 {
721 /* TODO: handle the all-zeroes case by returning an all-zeroes of the
722 child type. */
723
724 /* Otherwise, we don't have a good way to get a child value out of a
725 constant.
726
727 Handle this case by using an unknown value. */
728 svalue *unknown_sval = new unknown_svalue (child->get_type ());
729 return model.add_svalue (unknown_sval);
730 }
731
732 /* class unknown_svalue : public svalue. */
733
734 /* Compare the fields of this unknown_svalue with OTHER, returning true
735 if they are equal.
736 For use by svalue::operator==. */
737
738 bool
739 unknown_svalue::compare_fields (const unknown_svalue &) const
740 {
741 /* I *think* we want to return true here, in that when comparing
742 two region models, we want two peer unknown_svalue instances
743 to be the "same". */
744 return true;
745 }
746
747 /* Implementation of svalue::add_to_hash vfunc for unknown_svalue. */
748
749 void
750 unknown_svalue::add_to_hash (inchash::hash &) const
751 {
752 /* Empty. */
753 }
754
755 /* Implementation of svalue::print_details vfunc for unknown_svalue. */
756
757 void
758 unknown_svalue::print_details (const region_model &model ATTRIBUTE_UNUSED,
759 svalue_id this_sid ATTRIBUTE_UNUSED,
760 pretty_printer *pp) const
761 {
762 pp_string (pp, "unknown");
763 }
764
765 /* Get a string for KIND for use in debug dumps. */
766
767 const char *
768 poison_kind_to_str (enum poison_kind kind)
769 {
770 switch (kind)
771 {
772 default:
773 gcc_unreachable ();
774 case POISON_KIND_UNINIT:
775 return "uninit";
776 case POISON_KIND_FREED:
777 return "freed";
778 case POISON_KIND_POPPED_STACK:
779 return "popped stack";
780 }
781 }
782
783 /* class poisoned_svalue : public svalue. */
784
785 /* Compare the fields of this poisoned_svalue with OTHER, returning true
786 if they are equal.
787 For use by svalue::operator==. */
788
789 bool
790 poisoned_svalue::compare_fields (const poisoned_svalue &other) const
791 {
792 return m_kind == other.m_kind;
793 }
794
795 /* Implementation of svalue::add_to_hash vfunc for poisoned_svalue. */
796
797 void
798 poisoned_svalue::add_to_hash (inchash::hash &hstate) const
799 {
800 hstate.add_int (m_kind);
801 }
802
803 /* Implementation of svalue::print_details vfunc for poisoned_svalue. */
804
805 void
806 poisoned_svalue::print_details (const region_model &model ATTRIBUTE_UNUSED,
807 svalue_id this_sid ATTRIBUTE_UNUSED,
808 pretty_printer *pp) const
809 {
810 pp_printf (pp, "poisoned: %s", poison_kind_to_str (m_kind));
811 }
812
813 /* class setjmp_svalue's implementation is in engine.cc, so that it can use
814 the declaration of exploded_node. */
815
816 /* class region and its various subclasses. */
817
818 /* Get a string for KIND for use in debug dumps. */
819
820 const char *
821 region_kind_to_str (enum region_kind kind)
822 {
823 switch (kind)
824 {
825 default:
826 gcc_unreachable ();
827 case RK_PRIMITIVE:
828 return "primitive";
829 case RK_STRUCT:
830 return "struct";
831 case RK_UNION:
832 return "union";
833 case RK_ARRAY:
834 return "array";
835 case RK_FRAME:
836 return "frame";
837 case RK_GLOBALS:
838 return "globals";
839 case RK_CODE:
840 return "code";
841 case RK_FUNCTION:
842 return "function";
843 case RK_STACK:
844 return "stack";
845 case RK_HEAP:
846 return "heap";
847 case RK_ROOT:
848 return "root";
849 case RK_SYMBOLIC:
850 return "symbolic";
851 }
852 }
853
854 /* class region. */
855
856 /* Equality operator for region.
857 After comparing base class fields and kind, the rest of the
858 comparison is handled off to a "compare_fields" member function
859 specific to the appropriate subclass. */
860
861 bool
862 region::operator== (const region &other) const
863 {
864 if (m_parent_rid != other.m_parent_rid)
865 return false;
866 if (m_sval_id != other.m_sval_id)
867 return false;
868 if (m_type != other.m_type)
869 return false;
870
871 enum region_kind this_kind = get_kind ();
872 enum region_kind other_kind = other.get_kind ();
873 if (this_kind != other_kind)
874 return false;
875
876 /* Compare views. */
877 if (m_view_rids.length () != other.m_view_rids.length ())
878 return false;
879 int i;
880 region_id *rid;
881 FOR_EACH_VEC_ELT (m_view_rids, i, rid)
882 if (! (*rid == other.m_view_rids[i]))
883 return false;
884
885 switch (this_kind)
886 {
887 default:
888 gcc_unreachable ();
889 case RK_PRIMITIVE:
890 {
891 #if 1
892 return true;
893 #else
894 const primitive_region &this_sub
895 = (const primitive_region &)*this;
896 const primitive_region &other_sub
897 = (const primitive_region &)other;
898 return this_sub.compare_fields (other_sub);
899 #endif
900 }
901 case RK_STRUCT:
902 {
903 const struct_region &this_sub
904 = (const struct_region &)*this;
905 const struct_region &other_sub
906 = (const struct_region &)other;
907 return this_sub.compare_fields (other_sub);
908 }
909 case RK_UNION:
910 {
911 const union_region &this_sub
912 = (const union_region &)*this;
913 const union_region &other_sub
914 = (const union_region &)other;
915 return this_sub.compare_fields (other_sub);
916 }
917 case RK_ARRAY:
918 {
919 const array_region &this_sub
920 = (const array_region &)*this;
921 const array_region &other_sub
922 = (const array_region &)other;
923 return this_sub.compare_fields (other_sub);
924 }
925 case RK_FRAME:
926 {
927 const frame_region &this_sub
928 = (const frame_region &)*this;
929 const frame_region &other_sub
930 = (const frame_region &)other;
931 return this_sub.compare_fields (other_sub);
932 }
933 case RK_GLOBALS:
934 {
935 const globals_region &this_sub
936 = (const globals_region &)*this;
937 const globals_region &other_sub
938 = (const globals_region &)other;
939 return this_sub.compare_fields (other_sub);
940 }
941 case RK_CODE:
942 {
943 const code_region &this_sub
944 = (const code_region &)*this;
945 const code_region &other_sub
946 = (const code_region &)other;
947 return this_sub.compare_fields (other_sub);
948 }
949 case RK_FUNCTION:
950 {
951 const function_region &this_sub
952 = (const function_region &)*this;
953 const function_region &other_sub
954 = (const function_region &)other;
955 return this_sub.compare_fields (other_sub);
956 }
957 case RK_STACK:
958 {
959 const stack_region &this_sub
960 = (const stack_region &)*this;
961 const stack_region &other_sub
962 = (const stack_region &)other;
963 return this_sub.compare_fields (other_sub);
964 }
965 case RK_ROOT:
966 {
967 const root_region &this_sub
968 = (const root_region &)*this;
969 const root_region &other_sub
970 = (const root_region &)other;
971 return this_sub.compare_fields (other_sub);
972 }
973 case RK_SYMBOLIC:
974 {
975 const symbolic_region &this_sub
976 = (const symbolic_region &)*this;
977 const symbolic_region &other_sub
978 = (const symbolic_region &)other;
979 return this_sub.compare_fields (other_sub);
980 }
981 case RK_HEAP:
982 {
983 const heap_region &this_sub
984 = (const heap_region &)*this;
985 const heap_region &other_sub
986 = (const heap_region &)other;
987 return this_sub.compare_fields (other_sub);
988 }
989 }
990 }
991
992 /* Get the parent region of this region. */
993
994 region *
995 region::get_parent_region (const region_model &model) const
996 {
997 return model.get_region (m_parent_rid);
998 }
999
1000 /* Set this region's value to RHS_SID (or potentially a variant of it,
1001 for some kinds of casts). */
1002
1003 void
1004 region::set_value (region_model &model, region_id this_rid, svalue_id rhs_sid,
1005 region_model_context *ctxt)
1006 {
1007 /* Handle some kinds of casting. */
1008 if (m_type)
1009 {
1010 svalue *sval = model.get_svalue (rhs_sid);
1011 if (sval->get_type ())
1012 rhs_sid = model.maybe_cast (m_type, rhs_sid, ctxt);
1013
1014 sval = model.get_svalue (rhs_sid);
1015 if (sval->get_type ())
1016 gcc_assert (m_type == sval->get_type ());
1017 }
1018
1019 m_sval_id = rhs_sid;
1020
1021 /* Update views.
1022 If this is a view, it becomes its parent's active view.
1023 If there was already an active views, invalidate its value; otherwise
1024 if the parent itself had a value, invalidate it.
1025 If it's not a view, then deactivate any view that is active on this
1026 region. */
1027 {
1028 if (m_is_view)
1029 become_active_view (model, this_rid);
1030 else
1031 {
1032 deactivate_any_active_view (model);
1033 gcc_assert (m_active_view_rid.null_p ());
1034 }
1035 }
1036 }
1037
1038 /* Make this region (with id THIS_RID) the "active" view of its parent.
1039 Any other active view has its value set to "unknown" and descendent values
1040 cleared.
1041 If there wasn't an active view, then set the parent's value to unknown, and
1042 clear its descendent values (apart from this view). */
1043
1044 void
1045 region::become_active_view (region_model &model, region_id this_rid)
1046 {
1047 gcc_assert (m_is_view);
1048
1049 region *parent_reg = model.get_region (m_parent_rid);
1050 gcc_assert (parent_reg);
1051
1052 region_id old_active_view_rid = parent_reg->m_active_view_rid;
1053
1054 if (old_active_view_rid == this_rid)
1055 {
1056 /* Already the active view: do nothing. */
1057 return;
1058 }
1059
1060 /* We have a change of active view. */
1061 parent_reg->m_active_view_rid = this_rid;
1062
1063 if (old_active_view_rid.null_p ())
1064 {
1065 /* No previous active view, but the parent and its other children
1066 might have values.
1067 If so, invalidate those values - but not that of the new view. */
1068 region_id_set below_region (&model);
1069 model.get_descendents (m_parent_rid, &below_region, this_rid);
1070 for (unsigned i = 0; i < model.get_num_regions (); i++)
1071 {
1072 region_id rid (region_id::from_int (i));
1073 if (below_region.region_p (rid))
1074 {
1075 region *other_reg = model.get_region (rid);
1076 other_reg->m_sval_id = svalue_id::null ();
1077 }
1078 }
1079 region *parent = model.get_region (m_parent_rid);
1080 parent->m_sval_id
1081 = model.add_svalue (new unknown_svalue (parent->get_type ()));
1082 }
1083 else
1084 {
1085 /* If there was an active view, invalidate it. */
1086 region *old_active_view = model.get_region (old_active_view_rid);
1087 old_active_view->deactivate_view (model, old_active_view_rid);
1088 }
1089 }
1090
1091 /* If this region (with id THIS_RID) has an active view, deactivate it,
1092 clearing m_active_view_rid. */
1093
1094 void
1095 region::deactivate_any_active_view (region_model &model)
1096 {
1097 if (m_active_view_rid.null_p ())
1098 return;
1099 region *view = model.get_region (m_active_view_rid);
1100 view->deactivate_view (model, m_active_view_rid);
1101 m_active_view_rid = region_id::null ();
1102 }
1103
1104 /* Clear any values for regions below THIS_RID.
1105 Set the view's value to unknown. */
1106
1107 void
1108 region::deactivate_view (region_model &model, region_id this_view_rid)
1109 {
1110 gcc_assert (is_view_p ());
1111
1112 /* Purge values from old_active_this_view_rid and all its
1113 descendents. Potentially we could use a poison value
1114 for this, but let's use unknown for now. */
1115 region_id_set below_view (&model);
1116 model.get_descendents (this_view_rid, &below_view, region_id::null ());
1117
1118 for (unsigned i = 0; i < model.get_num_regions (); i++)
1119 {
1120 region_id rid (region_id::from_int (i));
1121 if (below_view.region_p (rid))
1122 {
1123 region *other_reg = model.get_region (rid);
1124 other_reg->m_sval_id = svalue_id::null ();
1125 }
1126 }
1127
1128 m_sval_id = model.add_svalue (new unknown_svalue (get_type ()));
1129 }
1130
1131 /* Get a value for this region, either its value if it has one,
1132 or, failing that, "inherit" a value from first ancestor with a
1133 non-null value.
1134
1135 For example, when getting the value for a local variable within
1136 a stack frame that doesn't have one, the frame doesn't have a value
1137 either, but the stack as a whole will have an "uninitialized" poison
1138 value, so inherit that. */
1139
1140 svalue_id
1141 region::get_value (region_model &model, bool non_null,
1142 region_model_context *ctxt)
1143 {
1144 /* If this region has a value, use it. */
1145 if (!m_sval_id.null_p ())
1146 return m_sval_id;
1147
1148 /* Otherwise, "inherit" value from first ancestor with a
1149 non-null value. */
1150
1151 region *parent = model.get_region (m_parent_rid);
1152 if (parent)
1153 {
1154 svalue_id inherited_sid
1155 = parent->get_inherited_child_sid (this, model, ctxt);
1156 if (!inherited_sid.null_p ())
1157 return inherited_sid;
1158 }
1159
1160 /* If a non-null value has been requested, then generate
1161 a new unknown value. Store it, so that repeated reads from this
1162 region will yield the same unknown value. */
1163 if (non_null)
1164 {
1165 svalue_id unknown_sid = model.add_svalue (new unknown_svalue (m_type));
1166 m_sval_id = unknown_sid;
1167 return unknown_sid;
1168 }
1169
1170 return svalue_id::null ();
1171 }
1172
1173 /* Get a value for CHILD, inheriting from this region.
1174
1175 Recurse, so this region will inherit a value if it doesn't already
1176 have one. */
1177
1178 svalue_id
1179 region::get_inherited_child_sid (region *child,
1180 region_model &model,
1181 region_model_context *ctxt)
1182 {
1183 if (m_sval_id.null_p ())
1184 {
1185 /* Recurse. */
1186 if (!m_parent_rid.null_p ())
1187 {
1188 region *parent = model.get_region (m_parent_rid);
1189 m_sval_id = parent->get_inherited_child_sid (this, model, ctxt);
1190 }
1191 }
1192
1193 if (!m_sval_id.null_p ())
1194 {
1195 /* Clone the parent's value, so that attempts to update it
1196 (e.g giving a specific value to an inherited "uninitialized"
1197 value) touch the child, and not the parent. */
1198 svalue *this_value = model.get_svalue (m_sval_id);
1199 svalue_id new_child_sid
1200 = this_value->get_child_sid (this, child, model, ctxt);
1201 if (ctxt)
1202 ctxt->on_inherited_svalue (m_sval_id, new_child_sid);
1203 child->m_sval_id = new_child_sid;
1204 return new_child_sid;
1205 }
1206
1207 return svalue_id::null ();
1208 }
1209
1210 /* Generate a hash value for this region. The work is done by the
1211 add_to_hash vfunc. */
1212
1213 hashval_t
1214 region::hash () const
1215 {
1216 inchash::hash hstate;
1217 add_to_hash (hstate);
1218 return hstate.end ();
1219 }
1220
1221 /* Print a one-liner representation of this region to PP, assuming
1222 that this region is within MODEL and its id is THIS_RID. */
1223
1224 void
1225 region::print (const region_model &model,
1226 region_id this_rid,
1227 pretty_printer *pp) const
1228 {
1229 this_rid.print (pp);
1230 pp_string (pp, ": {");
1231
1232 /* vfunc. */
1233 print_fields (model, this_rid, pp);
1234
1235 pp_string (pp, "}");
1236 }
1237
1238 /* Base class implementation of region::dump_dot_to_pp vfunc. */
1239
1240 void
1241 region::dump_dot_to_pp (const region_model &model,
1242 region_id this_rid,
1243 pretty_printer *pp) const
1244 {
1245 this_rid.dump_node_name_to_pp (pp);
1246 pp_printf (pp, " [shape=none,margin=0,style=filled,fillcolor=%s,label=\"",
1247 "lightgrey");
1248 pp_write_text_to_stream (pp);
1249 print (model, this_rid, pp);
1250 pp_write_text_as_dot_label_to_stream (pp, /*for_record=*/false);
1251 pp_string (pp, "\"];");
1252 pp_newline (pp);
1253
1254 /* Add edge to svalue. */
1255 if (!m_sval_id.null_p ())
1256 {
1257 this_rid.dump_node_name_to_pp (pp);
1258 pp_string (pp, " -> ");
1259 m_sval_id.dump_node_name_to_pp (pp);
1260 pp_string (pp, ";");
1261 pp_newline (pp);
1262 }
1263
1264 /* Add edge to parent. */
1265 if (!m_parent_rid.null_p ())
1266 {
1267 this_rid.dump_node_name_to_pp (pp);
1268 pp_string (pp, " -> ");
1269 m_parent_rid.dump_node_name_to_pp (pp);
1270 pp_string (pp, ";");
1271 pp_newline (pp);
1272 }
1273 }
1274
1275 /* Dump a tree-like ASCII-art representation of this region to PP. */
1276
1277 void
1278 region::dump_to_pp (const region_model &model,
1279 region_id this_rid,
1280 pretty_printer *pp,
1281 const char *prefix,
1282 bool is_last_child) const
1283 {
1284 print (model, this_rid, pp);
1285 pp_newline (pp);
1286
1287 const char *new_prefix;
1288 if (!m_parent_rid.null_p ())
1289 new_prefix = ACONCAT ((prefix, is_last_child ? " " : "| ", NULL));
1290 else
1291 new_prefix = prefix;
1292
1293 const char *begin_color = colorize_start (pp_show_color (pp), "note");
1294 const char *end_color = colorize_stop (pp_show_color (pp));
1295 char *field_prefix
1296 = ACONCAT ((begin_color, new_prefix, "|:", end_color, NULL));
1297
1298 if (!m_sval_id.null_p ())
1299 {
1300 pp_printf (pp, "%s sval: ", field_prefix);
1301 model.get_svalue (m_sval_id)->print (model, m_sval_id, pp);
1302 pp_newline (pp);
1303 }
1304 if (m_type)
1305 {
1306 pp_printf (pp, "%s type: ", field_prefix);
1307 print_quoted_type (pp, m_type);
1308 pp_newline (pp);
1309 }
1310
1311 /* Find the children. */
1312
1313 auto_vec<region_id> child_rids;
1314 unsigned i;
1315 for (unsigned i = 0; i < model.get_num_regions (); ++i)
1316 {
1317 region_id rid = region_id::from_int (i);
1318 region *child = model.get_region (rid);
1319 if (child->m_parent_rid == this_rid)
1320 child_rids.safe_push (rid);
1321 }
1322
1323 /* Print the children, using dump_child_label to label them. */
1324
1325 region_id *child_rid;
1326 FOR_EACH_VEC_ELT (child_rids, i, child_rid)
1327 {
1328 is_last_child = (i == child_rids.length () - 1);
1329 if (!this_rid.null_p ())
1330 {
1331 const char *tail = is_last_child ? "`-" : "|-";
1332 pp_printf (pp, "%r%s%s%R", "note", new_prefix, tail);
1333 }
1334 dump_child_label (model, this_rid, *child_rid, pp);
1335 model.get_region (*child_rid)->dump_to_pp (model, *child_rid, pp,
1336 new_prefix,
1337 is_last_child);
1338 }
1339 }
1340
1341 /* Base implementation of region::dump_child_label vfunc. */
1342
1343 void
1344 region::dump_child_label (const region_model &model,
1345 region_id this_rid ATTRIBUTE_UNUSED,
1346 region_id child_rid,
1347 pretty_printer *pp) const
1348 {
1349 region *child = model.get_region (child_rid);
1350 if (child->m_is_view)
1351 {
1352 gcc_assert (TYPE_P (child->get_type ()));
1353 if (m_active_view_rid == child_rid)
1354 pp_string (pp, "active ");
1355 else
1356 pp_string (pp, "inactive ");
1357 pp_string (pp, "view as ");
1358 print_quoted_type (pp, child->get_type ());
1359 pp_string (pp, ": ");
1360 }
1361 }
1362
1363 /* Assert that this object is valid. */
1364
1365 void
1366 region::validate (const region_model *model) const
1367 {
1368 m_parent_rid.validate (*model);
1369 m_sval_id.validate (*model);
1370 unsigned i;
1371 region_id *view_rid;
1372 FOR_EACH_VEC_ELT (m_view_rids, i, view_rid)
1373 {
1374 gcc_assert (!view_rid->null_p ());
1375 view_rid->validate (*model);
1376 }
1377 m_active_view_rid.validate (*model);
1378 }
1379
1380 /* Apply MAP to svalue_ids to this region. This updates the value
1381 for the region (if any). */
1382
1383 void
1384 region::remap_svalue_ids (const svalue_id_map &map)
1385 {
1386 map.update (&m_sval_id);
1387 }
1388
1389 /* Base implementation of region::remap_region_ids vfunc; subclasses should
1390 chain up to this, updating any region_id data. */
1391
1392 void
1393 region::remap_region_ids (const region_id_map &map)
1394 {
1395 map.update (&m_parent_rid);
1396 unsigned i;
1397 region_id *view_rid;
1398 FOR_EACH_VEC_ELT (m_view_rids, i, view_rid)
1399 map.update (view_rid);
1400 map.update (&m_active_view_rid);
1401 }
1402
1403 /* Add a new region with id VIEW_RID as a view of this region. */
1404
1405 void
1406 region::add_view (region_id view_rid, region_model *model)
1407 {
1408 gcc_assert (!view_rid.null_p ());
1409 region *new_view = model->get_region (view_rid);
1410 new_view->m_is_view = true;
1411 gcc_assert (!new_view->m_parent_rid.null_p ());
1412 gcc_assert (new_view->m_sval_id.null_p ());
1413
1414 //gcc_assert (new_view->get_type () != NULL_TREE);
1415 // TODO: this can sometimes be NULL, when viewing through a (void *)
1416
1417 // TODO: the type ought to not be present yet
1418
1419 m_view_rids.safe_push (view_rid);
1420 }
1421
1422 /* Look for a view of type TYPE of this region, returning its id if found,
1423 or null otherwise. */
1424
1425 region_id
1426 region::get_view (tree type, region_model *model) const
1427 {
1428 unsigned i;
1429 region_id *view_rid;
1430 FOR_EACH_VEC_ELT (m_view_rids, i, view_rid)
1431 {
1432 region *view = model->get_region (*view_rid);
1433 gcc_assert (view->m_is_view);
1434 if (view->get_type () == type)
1435 return *view_rid;
1436 }
1437 return region_id::null ();
1438 }
1439
1440 /* region's ctor. */
1441
1442 region::region (region_id parent_rid, svalue_id sval_id, tree type)
1443 : m_parent_rid (parent_rid), m_sval_id (sval_id), m_type (type),
1444 m_view_rids (), m_is_view (false), m_active_view_rid (region_id::null ())
1445 {
1446 gcc_assert (type == NULL_TREE || TYPE_P (type));
1447 }
1448
1449 /* region's copy ctor. */
1450
1451 region::region (const region &other)
1452 : m_parent_rid (other.m_parent_rid), m_sval_id (other.m_sval_id),
1453 m_type (other.m_type), m_view_rids (other.m_view_rids.length ()),
1454 m_is_view (other.m_is_view), m_active_view_rid (other.m_active_view_rid)
1455 {
1456 int i;
1457 region_id *rid;
1458 FOR_EACH_VEC_ELT (other.m_view_rids, i, rid)
1459 m_view_rids.quick_push (*rid);
1460 }
1461
1462 /* Base implementation of region::add_to_hash vfunc; subclasses should
1463 chain up to this. */
1464
1465 void
1466 region::add_to_hash (inchash::hash &hstate) const
1467 {
1468 inchash::add (m_parent_rid, hstate);
1469 inchash::add (m_sval_id, hstate);
1470 hstate.add_ptr (m_type);
1471 // TODO: views
1472 }
1473
1474 /* Base implementation of region::print_fields vfunc. */
1475
1476 void
1477 region::print_fields (const region_model &model ATTRIBUTE_UNUSED,
1478 region_id this_rid ATTRIBUTE_UNUSED,
1479 pretty_printer *pp) const
1480 {
1481 pp_printf (pp, "kind: %qs", region_kind_to_str (get_kind ()));
1482
1483 pp_string (pp, ", parent: ");
1484 m_parent_rid.print (pp);
1485
1486 pp_printf (pp, ", sval: ");
1487 m_sval_id.print (pp);
1488
1489 if (m_type)
1490 {
1491 pp_printf (pp, ", type: ");
1492 print_quoted_type (pp, m_type);
1493 }
1494 }
1495
1496 /* Determine if a pointer to this region must be non-NULL.
1497
1498 Generally, pointers to regions must be non-NULL, but pointers
1499 to symbolic_regions might, in fact, be NULL.
1500
1501 This allows us to simulate functions like malloc and calloc with:
1502 - only one "outcome" from each statement,
1503 - the idea that the pointer is on the heap if non-NULL
1504 - the possibility that the pointer could be NULL
1505 - the idea that successive values returned from malloc are non-equal
1506 - to be able to zero-fill for calloc. */
1507
1508 bool
1509 region::non_null_p (const region_model &model) const
1510 {
1511 /* Look through views to get at the underlying region. */
1512 if (is_view_p ())
1513 return model.get_region (m_parent_rid)->non_null_p (model);
1514
1515 /* Are we within a symbolic_region? If so, it could be NULL. */
1516 if (const symbolic_region *sym_reg = dyn_cast_symbolic_region ())
1517 {
1518 if (sym_reg->m_possibly_null)
1519 return false;
1520 }
1521
1522 return true;
1523 }
1524
1525 /* class primitive_region : public region. */
1526
1527 /* Implementation of region::clone vfunc for primitive_region. */
1528
1529 region *
1530 primitive_region::clone () const
1531 {
1532 return new primitive_region (*this);
1533 }
1534
1535 /* Implementation of region::walk_for_canonicalization vfunc for
1536 primitive_region. */
1537
1538 void
1539 primitive_region::walk_for_canonicalization (canonicalization *) const
1540 {
1541 /* Empty. */
1542 }
1543
1544 /* class map_region : public region. */
1545
1546 /* map_region's copy ctor. */
1547
1548 map_region::map_region (const map_region &other)
1549 : region (other),
1550 m_map (other.m_map)
1551 {
1552 }
1553
1554 /* Compare the fields of this map_region with OTHER, returning true
1555 if they are equal.
1556 For use by region::operator==. */
1557
1558 bool
1559 map_region::compare_fields (const map_region &other) const
1560 {
1561 if (m_map.elements () != other.m_map.elements ())
1562 return false;
1563
1564 for (map_t::iterator iter = m_map.begin ();
1565 iter != m_map.end ();
1566 ++iter)
1567 {
1568 tree key = (*iter).first;
1569 region_id e = (*iter).second;
1570 region_id *other_slot = const_cast <map_t &> (other.m_map).get (key);
1571 if (other_slot == NULL)
1572 return false;
1573 if (e != *other_slot)
1574 return false;
1575 }
1576 return true;
1577 }
1578
1579 /* Implementation of region::print_fields vfunc for map_region. */
1580
1581 void
1582 map_region::print_fields (const region_model &model,
1583 region_id this_rid,
1584 pretty_printer *pp) const
1585 {
1586 region::print_fields (model, this_rid, pp);
1587 pp_string (pp, ", map: {");
1588 for (map_t::iterator iter = m_map.begin ();
1589 iter != m_map.end ();
1590 ++iter)
1591 {
1592 if (iter != m_map.begin ())
1593 pp_string (pp, ", ");
1594 tree expr = (*iter).first;
1595 region_id child_rid = (*iter).second;
1596 pp_printf (pp, "%qE: ", expr);
1597 child_rid.print (pp);
1598 }
1599 pp_string (pp, "}");
1600 }
1601
1602 /* Implementation of region::dump_dot_to_pp vfunc for map_region. */
1603
1604 void
1605 map_region::dump_dot_to_pp (const region_model &model,
1606 region_id this_rid,
1607 pretty_printer *pp) const
1608 {
1609 region::dump_dot_to_pp (model, this_rid, pp);
1610 for (map_t::iterator iter = m_map.begin ();
1611 iter != m_map.end ();
1612 ++iter)
1613 {
1614 // TODO: add nodes/edges to label things
1615
1616 tree expr = (*iter).first;
1617 region_id child_rid = (*iter).second;
1618
1619 pp_printf (pp, "rid_label_%i [label=\"", child_rid.as_int ());
1620 pp_write_text_to_stream (pp);
1621 pp_printf (pp, "%qE", expr);
1622 pp_write_text_as_dot_label_to_stream (pp, /*for_record=*/false);
1623 pp_string (pp, "\"];");
1624 pp_newline (pp);
1625
1626 pp_printf (pp, "rid_label_%i", child_rid.as_int ());
1627 pp_string (pp, " -> ");
1628 child_rid.dump_node_name_to_pp (pp);
1629 pp_string (pp, ";");
1630 pp_newline (pp);
1631 }
1632 }
1633
1634 /* Implementation of region::dump_child_label vfunc for map_region. */
1635
1636 void
1637 map_region::dump_child_label (const region_model &model,
1638 region_id this_rid,
1639 region_id child_rid,
1640 pretty_printer *pp) const
1641 {
1642 region::dump_child_label (model, this_rid, child_rid, pp);
1643
1644 for (map_t::iterator iter = m_map.begin ();
1645 iter != m_map.end ();
1646 ++iter)
1647 {
1648 if (child_rid == (*iter).second)
1649 {
1650 tree key = (*iter).first;
1651 if (DECL_P (key))
1652 pp_printf (pp, "%qD: ", key);
1653 else
1654 pp_printf (pp, "%qE: ", key);
1655 }
1656 }
1657 }
1658
1659 /* Look for a child region for KEY within this map_region.
1660 If it doesn't already exist, create a child map_region, using TYPE for
1661 its type.
1662 Return the region_id of the child (whether pre-existing, or
1663 newly-created). */
1664
1665 region_id
1666 map_region::get_or_create (region_model *model,
1667 region_id this_rid,
1668 tree key,
1669 tree type)
1670 {
1671 gcc_assert (key);
1672 gcc_assert (valid_key_p (key));
1673 region_id *slot = m_map.get (key);
1674 if (slot)
1675 return *slot;
1676 region_id child_rid = model->add_region_for_type (this_rid, type);
1677 m_map.put (key, child_rid);
1678 return child_rid;
1679 }
1680
1681 /* Get the region_id for the child region for KEY within this
1682 MAP_REGION, or NULL if there is no such child region. */
1683
1684 region_id *
1685 map_region::get (tree key)
1686 {
1687 gcc_assert (key);
1688 gcc_assert (valid_key_p (key));
1689 region_id *slot = m_map.get (key);
1690 return slot;
1691 }
1692
1693 /* Implementation of region::add_to_hash vfunc for map_region. */
1694
1695 void
1696 map_region::add_to_hash (inchash::hash &hstate) const
1697 {
1698 region::add_to_hash (hstate);
1699 // TODO
1700 }
1701
1702 /* Implementation of region::remap_region_ids vfunc for map_region. */
1703
1704 void
1705 map_region::remap_region_ids (const region_id_map &map)
1706 {
1707 region::remap_region_ids (map);
1708
1709 /* Remap the region ids within the map entries. */
1710 for (map_t::iterator iter = m_map.begin ();
1711 iter != m_map.end (); ++iter)
1712 map.update (&(*iter).second);
1713 }
1714
1715 /* Remove the binding of KEY to its child region (but not the
1716 child region itself).
1717 For use when purging unneeded SSA names. */
1718
1719 void
1720 map_region::unbind (tree key)
1721 {
1722 gcc_assert (key);
1723 gcc_assert (valid_key_p (key));
1724 m_map.remove (key);
1725 }
1726
1727 /* Look for a child region with id CHILD_RID within this map_region.
1728 If one is found, return its tree key, otherwise return NULL_TREE. */
1729
1730 tree
1731 map_region::get_tree_for_child_region (region_id child_rid) const
1732 {
1733 // TODO: do we want to store an inverse map?
1734 for (map_t::iterator iter = m_map.begin ();
1735 iter != m_map.end ();
1736 ++iter)
1737 {
1738 tree key = (*iter).first;
1739 region_id r = (*iter).second;
1740 if (r == child_rid)
1741 return key;
1742 }
1743
1744 return NULL_TREE;
1745 }
1746
1747 /* Look for a child region CHILD within this map_region.
1748 If one is found, return its tree key, otherwise return NULL_TREE. */
1749
1750 tree
1751 map_region::get_tree_for_child_region (region *child,
1752 const region_model &model) const
1753 {
1754 // TODO: do we want to store an inverse map?
1755 for (map_t::iterator iter = m_map.begin ();
1756 iter != m_map.end ();
1757 ++iter)
1758 {
1759 tree key = (*iter).first;
1760 region_id r = (*iter).second;
1761 if (model.get_region (r) == child)
1762 return key;
1763 }
1764
1765 return NULL_TREE;
1766 }
1767
1768 /* Comparator for trees to impose a deterministic ordering on
1769 T1 and T2. */
1770
1771 static int
1772 tree_cmp (const_tree t1, const_tree t2)
1773 {
1774 gcc_assert (t1);
1775 gcc_assert (t2);
1776
1777 /* Test tree codes first. */
1778 if (TREE_CODE (t1) != TREE_CODE (t2))
1779 return TREE_CODE (t1) - TREE_CODE (t2);
1780
1781 /* From this point on, we know T1 and T2 have the same tree code. */
1782
1783 if (DECL_P (t1))
1784 {
1785 if (DECL_NAME (t1) && DECL_NAME (t2))
1786 return strcmp (IDENTIFIER_POINTER (DECL_NAME (t1)),
1787 IDENTIFIER_POINTER (DECL_NAME (t2)));
1788 else
1789 {
1790 if (DECL_NAME (t1))
1791 return -1;
1792 else if (DECL_NAME (t2))
1793 return 1;
1794 else
1795 return DECL_UID (t1) - DECL_UID (t2);
1796 }
1797 }
1798
1799 switch (TREE_CODE (t1))
1800 {
1801 case SSA_NAME:
1802 {
1803 if (SSA_NAME_VAR (t1) && SSA_NAME_VAR (t2))
1804 {
1805 int var_cmp = tree_cmp (SSA_NAME_VAR (t1), SSA_NAME_VAR (t2));
1806 if (var_cmp)
1807 return var_cmp;
1808 return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2);
1809 }
1810 else
1811 {
1812 if (SSA_NAME_VAR (t1))
1813 return -1;
1814 else if (SSA_NAME_VAR (t2))
1815 return 1;
1816 else
1817 return SSA_NAME_VERSION (t1) - SSA_NAME_VERSION (t2);
1818 }
1819 }
1820 break;
1821
1822 case INTEGER_CST:
1823 return tree_int_cst_compare (t1, t2);
1824
1825 case REAL_CST:
1826 {
1827 const real_value *rv1 = TREE_REAL_CST_PTR (t1);
1828 const real_value *rv2 = TREE_REAL_CST_PTR (t2);
1829 if (real_compare (UNORDERED_EXPR, rv1, rv2))
1830 {
1831 /* Impose an arbitrary order on NaNs relative to other NaNs
1832 and to non-NaNs. */
1833 if (int cmp_isnan = real_isnan (rv1) - real_isnan (rv2))
1834 return cmp_isnan;
1835 if (int cmp_issignaling_nan
1836 = real_issignaling_nan (rv1) - real_issignaling_nan (rv2))
1837 return cmp_issignaling_nan;
1838 return real_isneg (rv1) - real_isneg (rv2);
1839 }
1840 if (real_compare (LT_EXPR, rv1, rv2))
1841 return -1;
1842 if (real_compare (GT_EXPR, rv1, rv2))
1843 return 1;
1844 return 0;
1845 }
1846
1847 case STRING_CST:
1848 return strcmp (TREE_STRING_POINTER (t1),
1849 TREE_STRING_POINTER (t2));
1850
1851 default:
1852 gcc_unreachable ();
1853 break;
1854 }
1855
1856 gcc_unreachable ();
1857
1858 return 0;
1859 }
1860
1861 /* qsort comparator for trees to impose a deterministic ordering on
1862 P1 and P2. */
1863
1864 static int
1865 tree_cmp (const void *p1, const void *p2)
1866 {
1867 const_tree t1 = *(const_tree const *)p1;
1868 const_tree t2 = *(const_tree const *)p2;
1869
1870 return tree_cmp (t1, t2);
1871 }
1872
1873 /* Attempt to merge MAP_REGION_A and MAP_REGION_B into MERGED_MAP_REGION,
1874 which has region_id MERGED_RID, using MERGER.
1875 Return true if the merger is possible, false otherwise. */
1876
1877 bool
1878 map_region::can_merge_p (const map_region *map_region_a,
1879 const map_region *map_region_b,
1880 map_region *merged_map_region,
1881 region_id merged_rid,
1882 model_merger *merger)
1883 {
1884 for (map_t::iterator iter = map_region_a->m_map.begin ();
1885 iter != map_region_a->m_map.end ();
1886 ++iter)
1887 {
1888 tree key_a = (*iter).first;
1889 region_id rid_a = (*iter).second;
1890
1891 if (const region_id *slot_b
1892 = const_cast<map_region *>(map_region_b)->m_map.get (key_a))
1893 {
1894 region_id rid_b = *slot_b;
1895
1896 region *child_region_a = merger->get_region_a <region> (rid_a);
1897 region *child_region_b = merger->get_region_b <region> (rid_b);
1898
1899 gcc_assert (child_region_a->get_type ()
1900 == child_region_b->get_type ());
1901
1902 gcc_assert (child_region_a->get_kind ()
1903 == child_region_b->get_kind ());
1904
1905 region_id child_merged_rid
1906 = merged_map_region->get_or_create (merger->m_merged_model,
1907 merged_rid,
1908 key_a,
1909 child_region_a->get_type ());
1910
1911 region *child_merged_region
1912 = merger->m_merged_model->get_region (child_merged_rid);
1913
1914 /* Consider values. */
1915 svalue_id child_a_sid = child_region_a->get_value_direct ();
1916 svalue_id child_b_sid = child_region_b->get_value_direct ();
1917 svalue_id child_merged_sid;
1918 if (!merger->can_merge_values_p (child_a_sid, child_b_sid,
1919 &child_merged_sid))
1920 return false;
1921 if (!child_merged_sid.null_p ())
1922 child_merged_region->set_value (*merger->m_merged_model,
1923 child_merged_rid,
1924 child_merged_sid,
1925 NULL);
1926
1927 if (map_region *map_region_a = child_region_a->dyn_cast_map_region ())
1928 {
1929 /* Recurse. */
1930 if (!can_merge_p (map_region_a,
1931 as_a <map_region *> (child_region_b),
1932 as_a <map_region *> (child_merged_region),
1933 child_merged_rid,
1934 merger))
1935 return false;
1936 }
1937
1938 }
1939 else
1940 {
1941 /* TODO: region is present in A, but absent in B. */
1942 }
1943 }
1944
1945 /* TODO: check for keys in B that aren't in A. */
1946
1947 return true;
1948 }
1949
1950
1951 /* Implementation of region::walk_for_canonicalization vfunc for
1952 map_region. */
1953
1954 void
1955 map_region::walk_for_canonicalization (canonicalization *c) const
1956 {
1957 auto_vec<tree> keys (m_map.elements ());
1958 for (map_t::iterator iter = m_map.begin ();
1959 iter != m_map.end ();
1960 ++iter)
1961 {
1962 tree key_a = (*iter).first;
1963 keys.quick_push (key_a);
1964 }
1965 keys.qsort (tree_cmp);
1966
1967 unsigned i;
1968 tree key;
1969 FOR_EACH_VEC_ELT (keys, i, key)
1970 {
1971 region_id rid = *const_cast<map_region *>(this)->m_map.get (key);
1972 c->walk_rid (rid);
1973 }
1974 }
1975
1976 /* For debugging purposes: look for a child region for a decl named
1977 IDENTIFIER (or an SSA_NAME for such a decl), returning its value,
1978 or svalue_id::null if none are found. */
1979
1980 svalue_id
1981 map_region::get_value_by_name (tree identifier,
1982 const region_model &model) const
1983 {
1984 for (map_t::iterator iter = m_map.begin ();
1985 iter != m_map.end ();
1986 ++iter)
1987 {
1988 tree key = (*iter).first;
1989 if (TREE_CODE (key) == SSA_NAME)
1990 if (SSA_NAME_VAR (key))
1991 key = SSA_NAME_VAR (key);
1992 if (DECL_P (key))
1993 if (DECL_NAME (key) == identifier)
1994 {
1995 region_id rid = (*iter).second;
1996 region *region = model.get_region (rid);
1997 return region->get_value (const_cast<region_model &>(model),
1998 false, NULL);
1999 }
2000 }
2001 return svalue_id::null ();
2002 }
2003
2004 /* class struct_or_union_region : public map_region. */
2005
2006 /* Implementation of map_region::valid_key_p vfunc for
2007 struct_or_union_region. */
2008
2009 bool
2010 struct_or_union_region::valid_key_p (tree key) const
2011 {
2012 return TREE_CODE (key) == FIELD_DECL;
2013 }
2014
2015 /* Compare the fields of this struct_or_union_region with OTHER, returning
2016 true if they are equal.
2017 For use by region::operator==. */
2018
2019 bool
2020 struct_or_union_region::compare_fields (const struct_or_union_region &other)
2021 const
2022 {
2023 return map_region::compare_fields (other);
2024 }
2025
2026 /* class struct_region : public struct_or_union_region. */
2027
2028 /* Implementation of region::clone vfunc for struct_region. */
2029
2030 region *
2031 struct_region::clone () const
2032 {
2033 return new struct_region (*this);
2034 }
2035
2036 /* Compare the fields of this struct_region with OTHER, returning true
2037 if they are equal.
2038 For use by region::operator==. */
2039
2040 bool
2041 struct_region::compare_fields (const struct_region &other) const
2042 {
2043 return struct_or_union_region::compare_fields (other);
2044 }
2045
2046 /* class union_region : public struct_or_union_region. */
2047
2048 /* Implementation of region::clone vfunc for union_region. */
2049
2050 region *
2051 union_region::clone () const
2052 {
2053 return new union_region (*this);
2054 }
2055
2056 /* Compare the fields of this union_region with OTHER, returning true
2057 if they are equal.
2058 For use by region::operator==. */
2059
2060 bool
2061 union_region::compare_fields (const union_region &other) const
2062 {
2063 return struct_or_union_region::compare_fields (other);
2064 }
2065
2066 /* class frame_region : public map_region. */
2067
2068 /* Compare the fields of this frame_region with OTHER, returning true
2069 if they are equal.
2070 For use by region::operator==. */
2071
2072 bool
2073 frame_region::compare_fields (const frame_region &other) const
2074 {
2075 if (!map_region::compare_fields (other))
2076 return false;
2077 if (m_fun != other.m_fun)
2078 return false;
2079 if (m_depth != other.m_depth)
2080 return false;
2081 return true;
2082 }
2083
2084 /* Implementation of region::clone vfunc for frame_region. */
2085
2086 region *
2087 frame_region::clone () const
2088 {
2089 return new frame_region (*this);
2090 }
2091
2092 /* Implementation of map_region::valid_key_p vfunc for frame_region. */
2093
2094 bool
2095 frame_region::valid_key_p (tree key) const
2096 {
2097 // TODO: could also check that VAR_DECLs are locals
2098 return (TREE_CODE (key) == PARM_DECL
2099 || TREE_CODE (key) == VAR_DECL
2100 || TREE_CODE (key) == SSA_NAME
2101 || TREE_CODE (key) == RESULT_DECL);
2102 }
2103
2104 /* Implementation of region::print_fields vfunc for frame_region. */
2105
2106 void
2107 frame_region::print_fields (const region_model &model,
2108 region_id this_rid,
2109 pretty_printer *pp) const
2110 {
2111 map_region::print_fields (model, this_rid, pp);
2112 pp_printf (pp, ", function: %qs, depth: %i", function_name (m_fun), m_depth);
2113 }
2114
2115 /* Implementation of region::add_to_hash vfunc for frame_region. */
2116
2117 void
2118 frame_region::add_to_hash (inchash::hash &hstate) const
2119 {
2120 map_region::add_to_hash (hstate);
2121 hstate.add_ptr (m_fun);
2122 hstate.add_int (m_depth);
2123 }
2124
2125 /* class globals_region : public scope_region. */
2126
2127 /* Compare the fields of this globals_region with OTHER, returning true
2128 if they are equal.
2129 For use by region::operator==. */
2130
2131 bool
2132 globals_region::compare_fields (const globals_region &other) const
2133 {
2134 return map_region::compare_fields (other);
2135 }
2136
2137 /* Implementation of region::clone vfunc for globals_region. */
2138
2139 region *
2140 globals_region::clone () const
2141 {
2142 return new globals_region (*this);
2143 }
2144
2145 /* Implementation of map_region::valid_key_p vfunc for globals_region. */
2146
2147 bool
2148 globals_region::valid_key_p (tree key) const
2149 {
2150 return TREE_CODE (key) == VAR_DECL;
2151 }
2152
2153 /* class code_region : public map_region. */
2154
2155 /* Compare the fields of this code_region with OTHER, returning true
2156 if they are equal.
2157 For use by region::operator==. */
2158
2159 bool
2160 code_region::compare_fields (const code_region &other) const
2161 {
2162 return map_region::compare_fields (other);
2163 }
2164
2165 /* Implementation of region::clone vfunc for code_region. */
2166
2167 region *
2168 code_region::clone () const
2169 {
2170 return new code_region (*this);
2171 }
2172
2173 /* Implementation of map_region::valid_key_p vfunc for code_region. */
2174
2175 bool
2176 code_region::valid_key_p (tree key) const
2177 {
2178 return TREE_CODE (key) == FUNCTION_DECL;
2179 }
2180
2181 /* class array_region : public region. */
2182
2183 /* array_region's copy ctor. */
2184
2185 array_region::array_region (const array_region &other)
2186 : region (other),
2187 m_map (other.m_map)
2188 {
2189 }
2190
2191 /* Get a child region for the element with index INDEX_SID. */
2192
2193 region_id
2194 array_region::get_element (region_model *model,
2195 region_id this_rid,
2196 svalue_id index_sid,
2197 region_model_context *ctxt ATTRIBUTE_UNUSED)
2198 {
2199 tree element_type = TREE_TYPE (get_type ());
2200 svalue *index_sval = model->get_svalue (index_sid);
2201 if (tree cst_index = index_sval->maybe_get_constant ())
2202 {
2203 key_t key = key_from_constant (cst_index);
2204 region_id element_rid
2205 = get_or_create (model, this_rid, key, element_type);
2206 return element_rid;
2207 }
2208
2209 return model->get_or_create_view (this_rid, element_type);
2210 }
2211
2212 /* Implementation of region::clone vfunc for array_region. */
2213
2214 region *
2215 array_region::clone () const
2216 {
2217 return new array_region (*this);
2218 }
2219
2220 /* Compare the fields of this array_region with OTHER, returning true
2221 if they are equal.
2222 For use by region::operator==. */
2223
2224 bool
2225 array_region::compare_fields (const array_region &other) const
2226 {
2227 if (m_map.elements () != other.m_map.elements ())
2228 return false;
2229
2230 for (map_t::iterator iter = m_map.begin ();
2231 iter != m_map.end ();
2232 ++iter)
2233 {
2234 int key = (*iter).first;
2235 region_id e = (*iter).second;
2236 region_id *other_slot = const_cast <map_t &> (other.m_map).get (key);
2237 if (other_slot == NULL)
2238 return false;
2239 if (e != *other_slot)
2240 return false;
2241 }
2242 return true;
2243 }
2244
2245 /* Implementation of region::print_fields vfunc for array_region. */
2246
2247 void
2248 array_region::print_fields (const region_model &model,
2249 region_id this_rid,
2250 pretty_printer *pp) const
2251 {
2252 region::print_fields (model, this_rid, pp);
2253 pp_string (pp, ", array: {");
2254 for (map_t::iterator iter = m_map.begin ();
2255 iter != m_map.end ();
2256 ++iter)
2257 {
2258 if (iter != m_map.begin ())
2259 pp_string (pp, ", ");
2260 int key = (*iter).first;
2261 region_id child_rid = (*iter).second;
2262 pp_printf (pp, "[%i]: ", key);
2263 child_rid.print (pp);
2264 }
2265 pp_string (pp, "}");
2266 }
2267
2268 /* Implementation of region::dump_dot_to_pp vfunc for array_region. */
2269
2270 void
2271 array_region::dump_dot_to_pp (const region_model &model,
2272 region_id this_rid,
2273 pretty_printer *pp) const
2274 {
2275 region::dump_dot_to_pp (model, this_rid, pp);
2276 for (map_t::iterator iter = m_map.begin ();
2277 iter != m_map.end ();
2278 ++iter)
2279 {
2280 // TODO: add nodes/edges to label things
2281
2282 int key = (*iter).first;
2283 region_id child_rid = (*iter).second;
2284
2285 pp_printf (pp, "rid_label_%i [label=\"", child_rid.as_int ());
2286 pp_write_text_to_stream (pp);
2287 pp_printf (pp, "%qi", key);
2288 pp_write_text_as_dot_label_to_stream (pp, /*for_record=*/false);
2289 pp_string (pp, "\"];");
2290 pp_newline (pp);
2291
2292 pp_printf (pp, "rid_label_%i", child_rid.as_int ());
2293 pp_string (pp, " -> ");
2294 child_rid.dump_node_name_to_pp (pp);
2295 pp_string (pp, ";");
2296 pp_newline (pp);
2297 }
2298 }
2299
2300 /* Implementation of region::dump_child_label vfunc for array_region. */
2301
2302 void
2303 array_region::dump_child_label (const region_model &model,
2304 region_id this_rid,
2305 region_id child_rid,
2306 pretty_printer *pp) const
2307 {
2308 region::dump_child_label (model, this_rid, child_rid, pp);
2309
2310 for (map_t::iterator iter = m_map.begin ();
2311 iter != m_map.end ();
2312 ++iter)
2313 {
2314 if (child_rid == (*iter).second)
2315 {
2316 int key = (*iter).first;
2317 pp_printf (pp, "[%i]: ", key);
2318 }
2319 }
2320 }
2321
2322 /* Look for a child region for KEY within this array_region.
2323 If it doesn't already exist, create a child array_region, using TYPE for
2324 its type.
2325 Return the region_id of the child (whether pre-existing, or
2326 newly-created). */
2327
2328 region_id
2329 array_region::get_or_create (region_model *model,
2330 region_id this_rid,
2331 key_t key,
2332 tree type)
2333 {
2334 region_id *slot = m_map.get (key);
2335 if (slot)
2336 return *slot;
2337 region_id child_rid = model->add_region_for_type (this_rid, type);
2338 m_map.put (key, child_rid);
2339 return child_rid;
2340 }
2341
2342 /* Get the region_id for the child region for KEY within this
2343 ARRAY_REGION, or NULL if there is no such child region. */
2344
2345 region_id *
2346 array_region::get (key_t key)
2347 {
2348 region_id *slot = m_map.get (key);
2349 return slot;
2350 }
2351
2352 /* Implementation of region::add_to_hash vfunc for array_region. */
2353
2354 void
2355 array_region::add_to_hash (inchash::hash &hstate) const
2356 {
2357 region::add_to_hash (hstate);
2358 // TODO
2359 }
2360
2361 /* Implementation of region::remap_region_ids vfunc for array_region. */
2362
2363 void
2364 array_region::remap_region_ids (const region_id_map &map)
2365 {
2366 region::remap_region_ids (map);
2367
2368 /* Remap the region ids within the map entries. */
2369 for (map_t::iterator iter = m_map.begin ();
2370 iter != m_map.end (); ++iter)
2371 map.update (&(*iter).second);
2372 }
2373
2374 /* Look for a child region with id CHILD_RID within this array_region.
2375 If one is found, write its key to *OUT and return true,
2376 otherwise return false. */
2377
2378 bool
2379 array_region::get_key_for_child_region (region_id child_rid, key_t *out) const
2380 {
2381 // TODO: do we want to store an inverse map?
2382 for (map_t::iterator iter = m_map.begin ();
2383 iter != m_map.end ();
2384 ++iter)
2385 {
2386 key_t key = (*iter).first;
2387 region_id r = (*iter).second;
2388 if (r == child_rid)
2389 {
2390 *out = key;
2391 return true;
2392 }
2393 }
2394
2395 return false;
2396 }
2397
2398 /* qsort comparator for array_region's keys. */
2399
2400 int
2401 array_region::key_cmp (const void *p1, const void *p2)
2402 {
2403 key_t i1 = *(const key_t *)p1;
2404 key_t i2 = *(const key_t *)p2;
2405
2406 if (i1 > i2)
2407 return 1;
2408 else if (i1 < i2)
2409 return -1;
2410 else
2411 return 0;
2412 }
2413
2414 /* Implementation of region::walk_for_canonicalization vfunc for
2415 array_region. */
2416
2417 void
2418 array_region::walk_for_canonicalization (canonicalization *c) const
2419 {
2420 auto_vec<int> keys (m_map.elements ());
2421 for (map_t::iterator iter = m_map.begin ();
2422 iter != m_map.end ();
2423 ++iter)
2424 {
2425 int key_a = (*iter).first;
2426 keys.quick_push (key_a);
2427 }
2428 keys.qsort (key_cmp);
2429
2430 unsigned i;
2431 int key;
2432 FOR_EACH_VEC_ELT (keys, i, key)
2433 {
2434 region_id rid = *const_cast<array_region *>(this)->m_map.get (key);
2435 c->walk_rid (rid);
2436 }
2437 }
2438
2439 /* Convert constant CST into an array_region::key_t. */
2440
2441 array_region::key_t
2442 array_region::key_from_constant (tree cst)
2443 {
2444 gcc_assert (CONSTANT_CLASS_P (cst));
2445 wide_int w = wi::to_wide (cst);
2446 key_t result = w.to_shwi ();
2447 return result;
2448 }
2449
2450 /* class function_region : public map_region. */
2451
2452 /* Compare the fields of this function_region with OTHER, returning true
2453 if they are equal.
2454 For use by region::operator==. */
2455
2456 bool
2457 function_region::compare_fields (const function_region &other) const
2458 {
2459 return map_region::compare_fields (other);
2460 }
2461
2462 /* Implementation of region::clone vfunc for function_region. */
2463
2464 region *
2465 function_region::clone () const
2466 {
2467 return new function_region (*this);
2468 }
2469
2470 /* Implementation of map_region::valid_key_p vfunc for function_region. */
2471
2472 bool
2473 function_region::valid_key_p (tree key) const
2474 {
2475 return TREE_CODE (key) == LABEL_DECL;
2476 }
2477
2478 /* class stack_region : public region. */
2479
2480 /* stack_region's copy ctor. */
2481
2482 stack_region::stack_region (const stack_region &other)
2483 : region (other),
2484 m_frame_rids (other.m_frame_rids.length ())
2485 {
2486 int i;
2487 region_id *frame_rid;
2488 FOR_EACH_VEC_ELT (other.m_frame_rids, i, frame_rid)
2489 m_frame_rids.quick_push (*frame_rid);
2490 }
2491
2492 /* Compare the fields of this stack_region with OTHER, returning true
2493 if they are equal.
2494 For use by region::operator==. */
2495
2496 bool
2497 stack_region::compare_fields (const stack_region &other) const
2498 {
2499 if (m_frame_rids.length () != other.m_frame_rids.length ())
2500 return false;
2501
2502 int i;
2503 region_id *frame_rid;
2504 FOR_EACH_VEC_ELT (m_frame_rids, i, frame_rid)
2505 if (m_frame_rids[i] != other.m_frame_rids[i])
2506 return false;
2507
2508 return true;
2509 }
2510
2511 /* Implementation of region::clone vfunc for stack_region. */
2512
2513 region *
2514 stack_region::clone () const
2515 {
2516 return new stack_region (*this);
2517 }
2518
2519 /* Implementation of region::print_fields vfunc for stack_region. */
2520
2521 void
2522 stack_region::print_fields (const region_model &model,
2523 region_id this_rid,
2524 pretty_printer *pp) const
2525 {
2526 region::print_fields (model, this_rid, pp);
2527 // TODO
2528 }
2529
2530 /* Implementation of region::dump_child_label vfunc for stack_region. */
2531
2532 void
2533 stack_region::dump_child_label (const region_model &model,
2534 region_id this_rid ATTRIBUTE_UNUSED,
2535 region_id child_rid,
2536 pretty_printer *pp) const
2537 {
2538 function *fun = model.get_region<frame_region> (child_rid)->get_function ();
2539 pp_printf (pp, "frame for %qs: ", function_name (fun));
2540 }
2541
2542 /* Push FRAME_RID (for a frame_region) onto this stack. */
2543
2544 void
2545 stack_region::push_frame (region_id frame_rid)
2546 {
2547 m_frame_rids.safe_push (frame_rid);
2548 }
2549
2550 /* Get the region_id of the top-most frame in this stack, if any. */
2551
2552 region_id
2553 stack_region::get_current_frame_id () const
2554 {
2555 if (m_frame_rids.length () > 0)
2556 return m_frame_rids[m_frame_rids.length () - 1];
2557 else
2558 return region_id::null ();
2559 }
2560
2561 /* Pop the topmost frame_region from this stack.
2562
2563 Purge the frame region and all its descendent regions.
2564 Convert any pointers that point into such regions into
2565 POISON_KIND_POPPED_STACK svalues.
2566
2567 Return the ID of any return value from the frame.
2568
2569 If PURGE, then purge all unused svalues, with the exception of any
2570 return value for the frame, which is temporarily
2571 preserved in case no regions reference it, so it can
2572 be written into a region in the caller.
2573
2574 Accumulate stats on purged entities into STATS. */
2575
2576 svalue_id
2577 stack_region::pop_frame (region_model *model, bool purge, purge_stats *stats,
2578 region_model_context *ctxt)
2579 {
2580 gcc_assert (m_frame_rids.length () > 0);
2581
2582 region_id frame_rid = get_current_frame_id ();
2583 frame_region *frame = model->get_region<frame_region> (frame_rid);
2584
2585 /* Evaluate the result, within the callee frame. */
2586 svalue_id result_sid;
2587 tree fndecl = frame->get_function ()->decl;
2588 tree result = DECL_RESULT (fndecl);
2589 if (result && TREE_TYPE (result) != void_type_node)
2590 result_sid = model->get_rvalue (result, ctxt);
2591
2592 /* Pop the frame RID. */
2593 m_frame_rids.pop ();
2594
2595 model->delete_region_and_descendents (frame_rid,
2596 POISON_KIND_POPPED_STACK,
2597 stats,
2598 ctxt ? ctxt->get_logger () : NULL);
2599
2600 /* Delete unused svalues, but don't delete the return value. */
2601 if (purge)
2602 model->purge_unused_svalues (stats, ctxt, &result_sid);
2603
2604 model->validate ();
2605
2606 return result_sid;
2607 }
2608
2609 /* Implementation of region::add_to_hash vfunc for stack_region. */
2610
2611 void
2612 stack_region::add_to_hash (inchash::hash &hstate) const
2613 {
2614 region::add_to_hash (hstate);
2615
2616 int i;
2617 region_id *frame_rid;
2618 FOR_EACH_VEC_ELT (m_frame_rids, i, frame_rid)
2619 inchash::add (*frame_rid, hstate);
2620 }
2621
2622 /* Implementation of region::remap_region_ids vfunc for stack_region. */
2623
2624 void
2625 stack_region::remap_region_ids (const region_id_map &map)
2626 {
2627 region::remap_region_ids (map);
2628 int i;
2629 region_id *frame_rid;
2630 FOR_EACH_VEC_ELT (m_frame_rids, i, frame_rid)
2631 map.update (&m_frame_rids[i]);
2632 }
2633
2634 /* Attempt to merge STACK_REGION_A and STACK_REGION_B using MERGER.
2635 Return true if the merger is possible, false otherwise. */
2636
2637 bool
2638 stack_region::can_merge_p (const stack_region *stack_region_a,
2639 const stack_region *stack_region_b,
2640 model_merger *merger)
2641 {
2642 if (stack_region_a->get_num_frames ()
2643 != stack_region_b->get_num_frames ())
2644 return false;
2645
2646 region_model *merged_model = merger->m_merged_model;
2647
2648 region_id rid_merged_stack
2649 = merged_model->get_root_region ()->ensure_stack_region (merged_model);
2650
2651 stack_region *merged_stack
2652 = merged_model->get_region <stack_region> (rid_merged_stack);
2653
2654 /* First, create all frames in the merged model, without populating them.
2655 The merging code assumes that all frames in the merged model already exist,
2656 so we have to do this first to handle the case in which a local in an
2657 older frame points at a local in a more recent frame. */
2658 for (unsigned i = 0; i < stack_region_a->get_num_frames (); i++)
2659 {
2660 region_id rid_a = stack_region_a->get_frame_rid (i);
2661 frame_region *frame_a = merger->get_region_a <frame_region> (rid_a);
2662
2663 region_id rid_b = stack_region_b->get_frame_rid (i);
2664 frame_region *frame_b = merger->get_region_b <frame_region> (rid_b);
2665
2666 if (frame_a->get_function () != frame_b->get_function ())
2667 return false;
2668
2669 frame_region *merged_frame = new frame_region (rid_merged_stack,
2670 frame_a->get_function (),
2671 frame_a->get_depth ());
2672 region_id rid_merged_frame = merged_model->add_region (merged_frame);
2673 merged_stack->push_frame (rid_merged_frame);
2674 }
2675
2676 /* Now populate the frames we created. */
2677 for (unsigned i = 0; i < stack_region_a->get_num_frames (); i++)
2678 {
2679 region_id rid_a = stack_region_a->get_frame_rid (i);
2680 frame_region *frame_a = merger->get_region_a <frame_region> (rid_a);
2681
2682 region_id rid_b = stack_region_b->get_frame_rid (i);
2683 frame_region *frame_b = merger->get_region_b <frame_region> (rid_b);
2684
2685 region_id rid_merged_frame = merged_stack->get_frame_rid (i);
2686 frame_region *merged_frame
2687 = merged_model->get_region <frame_region> (rid_merged_frame);
2688 if (!map_region::can_merge_p (frame_a, frame_b,
2689 merged_frame, rid_merged_frame,
2690 merger))
2691 return false;
2692 }
2693
2694 return true;
2695 }
2696
2697 /* Implementation of region::walk_for_canonicalization vfunc for
2698 stack_region. */
2699
2700 void
2701 stack_region::walk_for_canonicalization (canonicalization *c) const
2702 {
2703 int i;
2704 region_id *frame_rid;
2705 FOR_EACH_VEC_ELT (m_frame_rids, i, frame_rid)
2706 c->walk_rid (*frame_rid);
2707 }
2708
2709 /* For debugging purposes: look for a grandchild region within one of
2710 the child frame regions, where the grandchild is for a decl named
2711 IDENTIFIER (or an SSA_NAME for such a decl):
2712
2713 stack_region
2714 `-frame_region
2715 `-region for decl named IDENTIFIER
2716
2717 returning its value, or svalue_id::null if none are found. */
2718
2719 svalue_id
2720 stack_region::get_value_by_name (tree identifier,
2721 const region_model &model) const
2722 {
2723 int i;
2724 region_id *frame_rid;
2725 FOR_EACH_VEC_ELT (m_frame_rids, i, frame_rid)
2726 {
2727 frame_region *frame = model.get_region<frame_region> (*frame_rid);
2728 svalue_id sid = frame->get_value_by_name (identifier, model);
2729 if (!sid.null_p ())
2730 return sid;
2731 }
2732
2733 return svalue_id::null ();
2734 }
2735
2736 /* class heap_region : public region. */
2737
2738 /* heap_region's copy ctor. */
2739
2740 heap_region::heap_region (const heap_region &other)
2741 : region (other)
2742 {
2743 }
2744
2745 /* Compare the fields of this heap_region with OTHER, returning true
2746 if they are equal.
2747 For use by region::operator==. */
2748
2749 bool
2750 heap_region::compare_fields (const heap_region &) const
2751 {
2752 /* Empty. */
2753 return true;
2754 }
2755
2756 /* Implementation of region::clone vfunc for heap_region. */
2757
2758 region *
2759 heap_region::clone () const
2760 {
2761 return new heap_region (*this);
2762 }
2763
2764 /* Implementation of region::walk_for_canonicalization vfunc for
2765 heap_region. */
2766
2767 void
2768 heap_region::walk_for_canonicalization (canonicalization *) const
2769 {
2770 /* Empty. */
2771 }
2772
2773 /* class root_region : public region. */
2774
2775 /* root_region's default ctor. */
2776
2777 root_region::root_region ()
2778 : region (region_id::null (),
2779 svalue_id::null (),
2780 NULL_TREE)
2781 {
2782 }
2783
2784 /* root_region's copy ctor. */
2785
2786 root_region::root_region (const root_region &other)
2787 : region (other),
2788 m_stack_rid (other.m_stack_rid),
2789 m_globals_rid (other.m_globals_rid),
2790 m_code_rid (other.m_code_rid),
2791 m_heap_rid (other.m_heap_rid)
2792 {
2793 }
2794
2795 /* Compare the fields of this root_region with OTHER, returning true
2796 if they are equal.
2797 For use by region::operator==. */
2798
2799 bool
2800 root_region::compare_fields (const root_region &other) const
2801 {
2802 if (m_stack_rid != other.m_stack_rid)
2803 return false;
2804 if (m_globals_rid != other.m_globals_rid)
2805 return false;
2806 if (m_code_rid != other.m_code_rid)
2807 return false;
2808 if (m_heap_rid != other.m_heap_rid)
2809 return false;
2810 return true;
2811 }
2812
2813 /* Implementation of region::clone vfunc for root_region. */
2814
2815 region *
2816 root_region::clone () const
2817 {
2818 return new root_region (*this);
2819 }
2820
2821 /* Implementation of region::print_fields vfunc for root_region. */
2822
2823 void
2824 root_region::print_fields (const region_model &model,
2825 region_id this_rid,
2826 pretty_printer *pp) const
2827 {
2828 region::print_fields (model, this_rid, pp);
2829 // TODO
2830 }
2831
2832 /* Implementation of region::dump_child_label vfunc for root_region. */
2833
2834 void
2835 root_region::dump_child_label (const region_model &model ATTRIBUTE_UNUSED,
2836 region_id this_rid ATTRIBUTE_UNUSED,
2837 region_id child_rid,
2838 pretty_printer *pp) const
2839 {
2840 if (child_rid == m_stack_rid)
2841 pp_printf (pp, "stack: ");
2842 else if (child_rid == m_globals_rid)
2843 pp_printf (pp, "globals: ");
2844 else if (child_rid == m_code_rid)
2845 pp_printf (pp, "code: ");
2846 else if (child_rid == m_heap_rid)
2847 pp_printf (pp, "heap: ");
2848 }
2849
2850 /* Create a new frame_region for a call to FUN and push it onto
2851 the stack.
2852
2853 If ARG_SIDS is non-NULL, use it to populate the parameters
2854 in the new frame.
2855 Otherwise, populate them with unknown values.
2856
2857 Return the region_id of the new frame. */
2858
2859 region_id
2860 root_region::push_frame (region_model *model, function *fun,
2861 vec<svalue_id> *arg_sids,
2862 region_model_context *ctxt)
2863 {
2864 gcc_assert (fun);
2865 /* arg_sids can be NULL. */
2866
2867 ensure_stack_region (model);
2868 stack_region *stack = model->get_region <stack_region> (m_stack_rid);
2869
2870 frame_region *region = new frame_region (m_stack_rid, fun,
2871 stack->get_num_frames ());
2872 region_id frame_rid = model->add_region (region);
2873
2874 // TODO: unify these cases by building a vec of unknown?
2875
2876 if (arg_sids)
2877 {
2878 /* Arguments supplied from a caller frame. */
2879
2880 tree fndecl = fun->decl;
2881 unsigned idx = 0;
2882 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
2883 iter_parm = DECL_CHAIN (iter_parm), ++idx)
2884 {
2885 /* If there's a mismatching declaration, the call stmt might
2886 not have enough args. Handle this case by leaving the
2887 rest of the params as uninitialized. */
2888 if (idx >= arg_sids->length ())
2889 break;
2890 svalue_id arg_sid = (*arg_sids)[idx];
2891 region_id parm_rid
2892 = region->get_or_create (model, frame_rid, iter_parm,
2893 TREE_TYPE (iter_parm));
2894 model->set_value (parm_rid, arg_sid, ctxt);
2895
2896 /* Also do it for default SSA name (sharing the same unknown
2897 value). */
2898 tree parm_default_ssa = ssa_default_def (fun, iter_parm);
2899 if (parm_default_ssa)
2900 {
2901 region_id defssa_rid
2902 = region->get_or_create (model, frame_rid, parm_default_ssa,
2903 TREE_TYPE (iter_parm));
2904 model->set_value (defssa_rid, arg_sid, ctxt);
2905 }
2906 }
2907 }
2908 else
2909 {
2910 /* No known arguments (a top-level call within the analysis). */
2911
2912 /* Params have a defined, unknown value; they should not inherit
2913 from the poisoned uninit value. */
2914 tree fndecl = fun->decl;
2915 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
2916 iter_parm = DECL_CHAIN (iter_parm))
2917 {
2918 region_id parm_rid
2919 = region->get_or_create (model, frame_rid, iter_parm,
2920 TREE_TYPE (iter_parm));
2921 svalue_id parm_sid
2922 = model->set_to_new_unknown_value (parm_rid, TREE_TYPE (iter_parm),
2923 ctxt);
2924
2925 /* Also do it for default SSA name (sharing the same unknown
2926 value). */
2927 tree parm_default_ssa = ssa_default_def (fun, iter_parm);
2928 if (parm_default_ssa)
2929 {
2930 region_id defssa_rid
2931 = region->get_or_create (model, frame_rid, parm_default_ssa,
2932 TREE_TYPE (iter_parm));
2933 model->get_region (defssa_rid)->set_value (*model, defssa_rid,
2934 parm_sid, ctxt);
2935 }
2936 }
2937 }
2938
2939 stack->push_frame (frame_rid);
2940
2941 return frame_rid;
2942 }
2943
2944 /* Get the region_id of the top-most frame in this root_region's stack,
2945 if any. */
2946
2947 region_id
2948 root_region::get_current_frame_id (const region_model &model) const
2949 {
2950 stack_region *stack = model.get_region <stack_region> (m_stack_rid);
2951 if (stack)
2952 return stack->get_current_frame_id ();
2953 else
2954 return region_id::null ();
2955 }
2956
2957 /* Pop the topmost frame_region from this root_region's stack;
2958 see the comment for stack_region::pop_frame. */
2959
2960 svalue_id
2961 root_region::pop_frame (region_model *model, bool purge, purge_stats *out,
2962 region_model_context *ctxt)
2963 {
2964 stack_region *stack = model->get_region <stack_region> (m_stack_rid);
2965 return stack->pop_frame (model, purge, out, ctxt);
2966 }
2967
2968 /* Return the region_id of the stack region, creating it if doesn't
2969 already exist. */
2970
2971 region_id
2972 root_region::ensure_stack_region (region_model *model)
2973 {
2974 if (m_stack_rid.null_p ())
2975 {
2976 svalue_id uninit_sid
2977 = model->add_svalue (new poisoned_svalue (POISON_KIND_UNINIT,
2978 NULL_TREE));
2979 m_stack_rid
2980 = model->add_region (new stack_region (model->get_root_rid (),
2981 uninit_sid));
2982 }
2983 return m_stack_rid;
2984 }
2985
2986 /* Return the stack region (which could be NULL). */
2987
2988 stack_region *
2989 root_region::get_stack_region (const region_model *model) const
2990 {
2991 return model->get_region <stack_region> (m_stack_rid);
2992 }
2993
2994 /* Return the region_id of the globals region, creating it if doesn't
2995 already exist. */
2996
2997 region_id
2998 root_region::ensure_globals_region (region_model *model)
2999 {
3000 if (m_globals_rid.null_p ())
3001 m_globals_rid
3002 = model->add_region (new globals_region (model->get_root_rid ()));
3003 return m_globals_rid;
3004 }
3005
3006 /* Return the code region (which could be NULL). */
3007
3008 code_region *
3009 root_region::get_code_region (const region_model *model) const
3010 {
3011 return model->get_region <code_region> (m_code_rid);
3012 }
3013
3014 /* Return the region_id of the code region, creating it if doesn't
3015 already exist. */
3016
3017 region_id
3018 root_region::ensure_code_region (region_model *model)
3019 {
3020 if (m_code_rid.null_p ())
3021 m_code_rid
3022 = model->add_region (new code_region (model->get_root_rid ()));
3023 return m_code_rid;
3024 }
3025
3026 /* Return the globals region (which could be NULL). */
3027
3028 globals_region *
3029 root_region::get_globals_region (const region_model *model) const
3030 {
3031 return model->get_region <globals_region> (m_globals_rid);
3032 }
3033
3034 /* Return the region_id of the heap region, creating it if doesn't
3035 already exist. */
3036
3037 region_id
3038 root_region::ensure_heap_region (region_model *model)
3039 {
3040 if (m_heap_rid.null_p ())
3041 {
3042 svalue_id uninit_sid
3043 = model->add_svalue (new poisoned_svalue (POISON_KIND_UNINIT,
3044 NULL_TREE));
3045 m_heap_rid
3046 = model->add_region (new heap_region (model->get_root_rid (),
3047 uninit_sid));
3048 }
3049 return m_heap_rid;
3050 }
3051
3052 /* Return the heap region (which could be NULL). */
3053
3054 heap_region *
3055 root_region::get_heap_region (const region_model *model) const
3056 {
3057 return model->get_region <heap_region> (m_heap_rid);
3058 }
3059
3060 /* Implementation of region::remap_region_ids vfunc for root_region. */
3061
3062 void
3063 root_region::remap_region_ids (const region_id_map &map)
3064 {
3065 map.update (&m_stack_rid);
3066 map.update (&m_globals_rid);
3067 map.update (&m_code_rid);
3068 map.update (&m_heap_rid);
3069 }
3070
3071 /* Attempt to merge ROOT_REGION_A and ROOT_REGION_B into
3072 MERGED_ROOT_REGION using MERGER.
3073 Return true if the merger is possible, false otherwise. */
3074
3075 bool
3076 root_region::can_merge_p (const root_region *root_region_a,
3077 const root_region *root_region_b,
3078 root_region *merged_root_region,
3079 model_merger *merger)
3080 {
3081 /* We can only merge if the stacks are sufficiently similar. */
3082 stack_region *stack_a = root_region_a->get_stack_region (merger->m_model_a);
3083 stack_region *stack_b = root_region_b->get_stack_region (merger->m_model_b);
3084 if (stack_a && stack_b)
3085 {
3086 /* If the two models both have a stack, attempt to merge them. */
3087 merged_root_region->ensure_stack_region (merger->m_merged_model);
3088 if (!stack_region::can_merge_p (stack_a, stack_b, merger))
3089 return false;
3090 }
3091 else if (stack_a || stack_b)
3092 /* Don't attempt to merge if one model has a stack and the other
3093 doesn't. */
3094 return false;
3095
3096 map_region *globals_a = root_region_a->get_globals_region (merger->m_model_a);
3097 map_region *globals_b = root_region_b->get_globals_region (merger->m_model_b);
3098 if (globals_a && globals_b)
3099 {
3100 /* If both models have globals regions, attempt to merge them. */
3101 region_id merged_globals_rid
3102 = merged_root_region->ensure_globals_region (merger->m_merged_model);
3103 map_region *merged_globals
3104 = merged_root_region->get_globals_region (merger->m_merged_model);
3105 if (!map_region::can_merge_p (globals_a, globals_b,
3106 merged_globals, merged_globals_rid,
3107 merger))
3108 return false;
3109 }
3110 /* otherwise, merge as "no globals". */
3111
3112 map_region *code_a = root_region_a->get_code_region (merger->m_model_a);
3113 map_region *code_b = root_region_b->get_code_region (merger->m_model_b);
3114 if (code_a && code_b)
3115 {
3116 /* If both models have code regions, attempt to merge them. */
3117 region_id merged_code_rid
3118 = merged_root_region->ensure_code_region (merger->m_merged_model);
3119 map_region *merged_code
3120 = merged_root_region->get_code_region (merger->m_merged_model);
3121 if (!map_region::can_merge_p (code_a, code_b,
3122 merged_code, merged_code_rid,
3123 merger))
3124 return false;
3125 }
3126 /* otherwise, merge as "no code". */
3127
3128 heap_region *heap_a = root_region_a->get_heap_region (merger->m_model_a);
3129 heap_region *heap_b = root_region_b->get_heap_region (merger->m_model_b);
3130 if (heap_a && heap_b)
3131 {
3132 /* If both have a heap, create a "merged" heap.
3133 Actually merging the heap contents happens via the region_svalue
3134 instances, as needed, when seeing pairs of region_svalue instances. */
3135 merged_root_region->ensure_heap_region (merger->m_merged_model);
3136 }
3137 /* otherwise, merge as "no heap". */
3138
3139 return true;
3140 }
3141
3142 /* Implementation of region::add_to_hash vfunc for root_region. */
3143
3144 void
3145 root_region::add_to_hash (inchash::hash &hstate) const
3146 {
3147 region::add_to_hash (hstate);
3148 inchash::add (m_stack_rid, hstate);
3149 inchash::add (m_globals_rid, hstate);
3150 inchash::add (m_code_rid, hstate);
3151 inchash::add (m_heap_rid, hstate);
3152 }
3153
3154 /* Implementation of region::walk_for_canonicalization vfunc for
3155 root_region. */
3156
3157 void
3158 root_region::walk_for_canonicalization (canonicalization *c) const
3159 {
3160 c->walk_rid (m_stack_rid);
3161 c->walk_rid (m_globals_rid);
3162 c->walk_rid (m_code_rid);
3163 c->walk_rid (m_heap_rid);
3164 }
3165
3166 /* For debugging purposes: look for a descendant region for a local
3167 or global decl named IDENTIFIER (or an SSA_NAME for such a decl),
3168 returning its value, or svalue_id::null if none are found. */
3169
3170 svalue_id
3171 root_region::get_value_by_name (tree identifier,
3172 const region_model &model) const
3173 {
3174 if (stack_region *stack = get_stack_region (&model))
3175 {
3176 svalue_id sid = stack->get_value_by_name (identifier, model);
3177 if (!sid.null_p ())
3178 return sid;
3179 }
3180 if (map_region *globals = get_globals_region (&model))
3181 {
3182 svalue_id sid = globals->get_value_by_name (identifier, model);
3183 if (!sid.null_p ())
3184 return sid;
3185 }
3186 return svalue_id::null ();
3187 }
3188
3189 /* class symbolic_region : public map_region. */
3190
3191 /* symbolic_region's copy ctor. */
3192
3193 symbolic_region::symbolic_region (const symbolic_region &other)
3194 : region (other),
3195 m_possibly_null (other.m_possibly_null)
3196 {
3197 }
3198
3199 /* Compare the fields of this symbolic_region with OTHER, returning true
3200 if they are equal.
3201 For use by region::operator==. */
3202
3203 bool
3204 symbolic_region::compare_fields (const symbolic_region &other) const
3205 {
3206 return m_possibly_null == other.m_possibly_null;
3207 }
3208
3209 /* Implementation of region::clone vfunc for symbolic_region. */
3210
3211 region *
3212 symbolic_region::clone () const
3213 {
3214 return new symbolic_region (*this);
3215 }
3216
3217 /* Implementation of region::walk_for_canonicalization vfunc for
3218 symbolic_region. */
3219
3220 void
3221 symbolic_region::walk_for_canonicalization (canonicalization *) const
3222 {
3223 /* Empty. */
3224 }
3225
3226 /* class region_model. */
3227
3228 /* region_model's default ctor. */
3229
3230 region_model::region_model ()
3231 {
3232 m_root_rid = add_region (new root_region ());
3233 m_constraints = new impl_constraint_manager (this);
3234 // TODO
3235 }
3236
3237 /* region_model's copy ctor. */
3238
3239 region_model::region_model (const region_model &other)
3240 : m_svalues (other.m_svalues.length ()),
3241 m_regions (other.m_regions.length ()),
3242 m_root_rid (other.m_root_rid)
3243 {
3244 /* Clone the svalues and regions. */
3245 int i;
3246
3247 svalue *svalue;
3248 FOR_EACH_VEC_ELT (other.m_svalues, i, svalue)
3249 m_svalues.quick_push (svalue->clone ());
3250
3251 region *region;
3252 FOR_EACH_VEC_ELT (other.m_regions, i, region)
3253 m_regions.quick_push (region->clone ());
3254
3255 m_constraints = other.m_constraints->clone (this);
3256 }
3257
3258 /* region_model's dtor. */
3259
3260 region_model::~region_model ()
3261 {
3262 delete m_constraints;
3263 }
3264
3265 /* region_model's assignment operator. */
3266
3267 region_model &
3268 region_model::operator= (const region_model &other)
3269 {
3270 unsigned i;
3271 svalue *svalue;
3272 region *region;
3273
3274 /* Delete existing content. */
3275 FOR_EACH_VEC_ELT (m_svalues, i, svalue)
3276 delete svalue;
3277 m_svalues.truncate (0);
3278
3279 FOR_EACH_VEC_ELT (m_regions, i, region)
3280 delete region;
3281 m_regions.truncate (0);
3282
3283 delete m_constraints;
3284
3285 /* Clone the svalues and regions. */
3286 m_svalues.reserve (other.m_svalues.length (), true);
3287 FOR_EACH_VEC_ELT (other.m_svalues, i, svalue)
3288 m_svalues.quick_push (svalue->clone ());
3289
3290 m_regions.reserve (other.m_regions.length (), true);
3291 FOR_EACH_VEC_ELT (other.m_regions, i, region)
3292 m_regions.quick_push (region->clone ());
3293
3294 m_root_rid = other.m_root_rid;
3295
3296 m_constraints = other.m_constraints->clone (this);
3297
3298 return *this;
3299 }
3300
3301 /* Equality operator for region_model.
3302
3303 Amongst other things this directly compares the svalue and region
3304 vectors and so for this to be meaningful both this and OTHER should
3305 have been canonicalized. */
3306
3307 bool
3308 region_model::operator== (const region_model &other) const
3309 {
3310 if (m_root_rid != other.m_root_rid)
3311 return false;
3312
3313 if (m_svalues.length () != other.m_svalues.length ())
3314 return false;
3315
3316 if (m_regions.length () != other.m_regions.length ())
3317 return false;
3318
3319 if (*m_constraints != *other.m_constraints)
3320 return false;
3321
3322 unsigned i;
3323 svalue *svalue;
3324 FOR_EACH_VEC_ELT (other.m_svalues, i, svalue)
3325 if (!(*m_svalues[i] == *other.m_svalues[i]))
3326 return false;
3327
3328 region *region;
3329 FOR_EACH_VEC_ELT (other.m_regions, i, region)
3330 if (!(*m_regions[i] == *other.m_regions[i]))
3331 return false;
3332
3333 gcc_checking_assert (hash () == other.hash ());
3334
3335 return true;
3336 }
3337
3338 /* Generate a hash value for this region_model. */
3339
3340 hashval_t
3341 region_model::hash () const
3342 {
3343 hashval_t result = 0;
3344 int i;
3345
3346 svalue *svalue;
3347 FOR_EACH_VEC_ELT (m_svalues, i, svalue)
3348 result ^= svalue->hash ();
3349
3350 region *region;
3351 FOR_EACH_VEC_ELT (m_regions, i, region)
3352 result ^= region->hash ();
3353
3354 result ^= m_constraints->hash ();
3355
3356 return result;
3357 }
3358
3359 /* Print an all-on-one-line representation of this region_model to PP,
3360 which must support %E for trees. */
3361
3362 void
3363 region_model::print (pretty_printer *pp) const
3364 {
3365 int i;
3366
3367 pp_string (pp, "svalues: [");
3368 svalue *svalue;
3369 FOR_EACH_VEC_ELT (m_svalues, i, svalue)
3370 {
3371 if (i > 0)
3372 pp_string (pp, ", ");
3373 print_svalue (svalue_id::from_int (i), pp);
3374 }
3375
3376 pp_string (pp, "], regions: [");
3377
3378 region *region;
3379 FOR_EACH_VEC_ELT (m_regions, i, region)
3380 {
3381 if (i > 0)
3382 pp_string (pp, ", ");
3383 region->print (*this, region_id::from_int (i), pp);
3384 }
3385
3386 pp_string (pp, "], constraints: ");
3387
3388 m_constraints->print (pp);
3389 }
3390
3391 /* Print the svalue with id SID to PP. */
3392
3393 void
3394 region_model::print_svalue (svalue_id sid, pretty_printer *pp) const
3395 {
3396 get_svalue (sid)->print (*this, sid, pp);
3397 }
3398
3399 /* Dump a .dot representation of this region_model to PP, showing
3400 the values and the hierarchy of regions. */
3401
3402 void
3403 region_model::dump_dot_to_pp (pretty_printer *pp) const
3404 {
3405 graphviz_out gv (pp);
3406
3407 pp_string (pp, "digraph \"");
3408 pp_write_text_to_stream (pp);
3409 pp_write_text_as_dot_label_to_stream (pp, /*for_record=*/false);
3410 pp_string (pp, "\" {\n");
3411
3412 gv.indent ();
3413
3414 pp_string (pp, "overlap=false;\n");
3415 pp_string (pp, "compound=true;\n");
3416
3417 int i;
3418
3419 svalue *svalue;
3420 FOR_EACH_VEC_ELT (m_svalues, i, svalue)
3421 svalue->dump_dot_to_pp (*this, svalue_id::from_int (i), pp);
3422
3423 region *region;
3424 FOR_EACH_VEC_ELT (m_regions, i, region)
3425 region->dump_dot_to_pp (*this, region_id::from_int (i), pp);
3426
3427 /* TODO: constraints. */
3428
3429 /* Terminate "digraph" */
3430 gv.outdent ();
3431 pp_string (pp, "}");
3432 pp_newline (pp);
3433 }
3434
3435 /* Dump a .dot representation of this region_model to FP. */
3436
3437 void
3438 region_model::dump_dot_to_file (FILE *fp) const
3439 {
3440 pretty_printer pp;
3441 pp_format_decoder (&pp) = default_tree_printer;
3442 pp.buffer->stream = fp;
3443 dump_dot_to_pp (&pp);
3444 pp_flush (&pp);
3445 }
3446
3447 /* Dump a .dot representation of this region_model to PATH. */
3448
3449 void
3450 region_model::dump_dot (const char *path) const
3451 {
3452 FILE *fp = fopen (path, "w");
3453 dump_dot_to_file (fp);
3454 fclose (fp);
3455 }
3456
3457 /* Dump a multiline representation of this model to PP, showing the
3458 region hierarchy, the svalues, and any constraints.
3459
3460 If SUMMARIZE is true, show only the most pertient information,
3461 in a form that attempts to be less verbose.
3462 Otherwise, show all information. */
3463
3464 void
3465 region_model::dump_to_pp (pretty_printer *pp, bool summarize) const
3466 {
3467 if (summarize)
3468 {
3469 bool is_first = true;
3470 region_id frame_id = get_current_frame_id ();
3471 frame_region *frame = get_region <frame_region> (frame_id);
3472 if (frame)
3473 dump_summary_of_map (pp, frame, &is_first);
3474
3475 region_id globals_id = get_globals_region_id ();
3476 map_region *globals = get_region <map_region> (globals_id);
3477 if (globals)
3478 dump_summary_of_map (pp, globals, &is_first);
3479
3480 unsigned i;
3481
3482 equiv_class *ec;
3483 FOR_EACH_VEC_ELT (m_constraints->m_equiv_classes, i, ec)
3484 {
3485 for (unsigned j = 0; j < ec->m_vars.length (); j++)
3486 {
3487 svalue_id lhs_sid = ec->m_vars[j];
3488 tree lhs_tree = get_representative_tree (lhs_sid);
3489 if (lhs_tree == NULL_TREE)
3490 continue;
3491 for (unsigned k = j + 1; k < ec->m_vars.length (); k++)
3492 {
3493 svalue_id rhs_sid = ec->m_vars[k];
3494 tree rhs_tree = get_representative_tree (rhs_sid);
3495 if (rhs_tree
3496 && !(CONSTANT_CLASS_P (lhs_tree)
3497 && CONSTANT_CLASS_P (rhs_tree)))
3498 {
3499 dump_separator (pp, &is_first);
3500 dump_tree (pp, lhs_tree);
3501 pp_string (pp, " == ");
3502 dump_tree (pp, rhs_tree);
3503 }
3504 }
3505 }
3506 }
3507
3508 constraint *c;
3509 FOR_EACH_VEC_ELT (m_constraints->m_constraints, i, c)
3510 {
3511 const equiv_class &lhs = c->m_lhs.get_obj (*m_constraints);
3512 const equiv_class &rhs = c->m_rhs.get_obj (*m_constraints);
3513 svalue_id lhs_sid = lhs.get_representative ();
3514 svalue_id rhs_sid = rhs.get_representative ();
3515 tree lhs_tree = get_representative_tree (lhs_sid);
3516 tree rhs_tree = get_representative_tree (rhs_sid);
3517 if (lhs_tree && rhs_tree
3518 && !(CONSTANT_CLASS_P (lhs_tree) && CONSTANT_CLASS_P (rhs_tree)))
3519 {
3520 dump_separator (pp, &is_first);
3521 dump_tree (pp, lhs_tree);
3522 pp_printf (pp, " %s ", constraint_op_code (c->m_op));
3523 dump_tree (pp, rhs_tree);
3524 }
3525 }
3526
3527 return;
3528 }
3529
3530 get_region (m_root_rid)->dump_to_pp (*this, m_root_rid, pp, "", true);
3531
3532 pp_string (pp, "svalues:");
3533 pp_newline (pp);
3534 int i;
3535 svalue *svalue;
3536 FOR_EACH_VEC_ELT (m_svalues, i, svalue)
3537 {
3538 pp_string (pp, " ");
3539 svalue_id sid = svalue_id::from_int (i);
3540 print_svalue (sid, pp);
3541 pp_newline (pp);
3542 }
3543
3544 pp_string (pp, "constraint manager:");
3545 pp_newline (pp);
3546 m_constraints->dump_to_pp (pp);
3547 }
3548
3549 /* Dump a multiline representation of this model to FILE. */
3550
3551 void
3552 region_model::dump (FILE *fp, bool summarize) const
3553 {
3554 pretty_printer pp;
3555 pp_format_decoder (&pp) = default_tree_printer;
3556 pp_show_color (&pp) = pp_show_color (global_dc->printer);
3557 pp.buffer->stream = fp;
3558 dump_to_pp (&pp, summarize);
3559 pp_flush (&pp);
3560 }
3561
3562 /* Dump a multiline representation of this model to stderr. */
3563
3564 DEBUG_FUNCTION void
3565 region_model::dump (bool summarize) const
3566 {
3567 dump (stderr, summarize);
3568 }
3569
3570 /* Dump RMODEL fully to stderr (i.e. without summarization). */
3571
3572 DEBUG_FUNCTION void
3573 region_model::debug () const
3574 {
3575 dump (false);
3576 }
3577
3578 /* Dump VEC to PP, in the form "{VEC elements}: LABEL". */
3579
3580 static void
3581 dump_vec_of_tree (pretty_printer *pp,
3582 bool *is_first,
3583 const auto_vec<tree> &vec,
3584 const char *label)
3585 {
3586 if (vec.length () == 0)
3587 return;
3588
3589 dump_separator (pp, is_first);
3590 pp_printf (pp, "{");
3591 unsigned i;
3592 tree key;
3593 FOR_EACH_VEC_ELT (vec, i, key)
3594 {
3595 if (i > 0)
3596 pp_string (pp, ", ");
3597 dump_tree (pp, key);
3598 }
3599 pp_printf (pp, "}: %s", label);
3600 }
3601
3602 /* Dump *MAP_REGION to PP in compact form, updating *IS_FIRST.
3603 Subroutine of region_model::dump_to_pp for use on stack frames and for
3604 the "globals" region. */
3605
3606 void
3607 region_model::dump_summary_of_map (pretty_printer *pp,
3608 map_region *map_region,
3609 bool *is_first) const
3610 {
3611 /* Get the keys, sorted by tree_cmp. In particular, this ought
3612 to alphabetize any decls. */
3613 auto_vec<tree> keys (map_region->elements ());
3614 for (map_region::iterator_t iter = map_region->begin ();
3615 iter != map_region->end ();
3616 ++iter)
3617 {
3618 tree key_a = (*iter).first;
3619 keys.quick_push (key_a);
3620 }
3621 keys.qsort (tree_cmp);
3622
3623 /* Print pointers, constants, and poisoned values that aren't "uninit";
3624 gather keys for unknown and uninit values. */
3625 unsigned i;
3626 tree key;
3627 auto_vec<tree> unknown_keys;
3628 auto_vec<tree> uninit_keys;
3629 FOR_EACH_VEC_ELT (keys, i, key)
3630 {
3631 region_id child_rid = *map_region->get (key);
3632
3633 region *child_region = get_region (child_rid);
3634 if (!child_region)
3635 continue;
3636 svalue_id sid = child_region->get_value_direct ();
3637 if (sid.null_p ())
3638 continue;
3639 svalue *sval = get_svalue (sid);
3640 switch (sval->get_kind ())
3641 {
3642 default:
3643 gcc_unreachable ();
3644 case SK_REGION:
3645 {
3646 region_svalue *region_sval = as_a <region_svalue *> (sval);
3647 region_id pointee_rid = region_sval->get_pointee ();
3648 tree pointee = get_representative_path_var (pointee_rid).m_tree;
3649 dump_separator (pp, is_first);
3650 dump_tree (pp, key);
3651 pp_string (pp, ": ");
3652 if (pointee)
3653 {
3654 pp_character (pp, '&');
3655 dump_tree (pp, pointee);
3656 }
3657 else
3658 pp_string (pp, "NULL");
3659 }
3660 break;
3661 case SK_CONSTANT:
3662 dump_separator (pp, is_first);
3663 dump_tree (pp, key);
3664 pp_string (pp, ": ");
3665 dump_tree (pp, sval->dyn_cast_constant_svalue ()->get_constant ());
3666 break;
3667 case SK_UNKNOWN:
3668 unknown_keys.safe_push (key);
3669 break;
3670 case SK_POISONED:
3671 {
3672 poisoned_svalue *poisoned_sval = as_a <poisoned_svalue *> (sval);
3673 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
3674 if (pkind == POISON_KIND_UNINIT)
3675 uninit_keys.safe_push (key);
3676 else
3677 {
3678 dump_separator (pp, is_first);
3679 dump_tree (pp, key);
3680 pp_printf (pp, ": %s", poison_kind_to_str (pkind));
3681 }
3682 }
3683 break;
3684 case SK_SETJMP:
3685 dump_separator (pp, is_first);
3686 pp_printf (pp, "setjmp: EN: %i",
3687 sval->dyn_cast_setjmp_svalue ()->get_enode_index ());
3688 break;
3689 }
3690 }
3691
3692 /* Print unknown and uninitialized values in consolidated form. */
3693 dump_vec_of_tree (pp, is_first, unknown_keys, "unknown");
3694 dump_vec_of_tree (pp, is_first, uninit_keys, "uninit");
3695 }
3696
3697 /* Assert that this object is valid. */
3698
3699 void
3700 region_model::validate () const
3701 {
3702 /* Skip this in a release build. */
3703 #if !CHECKING_P
3704 return;
3705 #endif
3706
3707 m_constraints->validate ();
3708
3709 unsigned i;
3710 region *r;
3711 FOR_EACH_VEC_ELT (m_regions, i, r)
3712 r->validate (this);
3713
3714 // TODO: anything else?
3715
3716 /* Verify that the stack region (if any) has an "uninitialized" value. */
3717 region *stack_region = get_root_region ()->get_stack_region (this);
3718 if (stack_region)
3719 {
3720 svalue_id stack_value_sid = stack_region->get_value_direct ();
3721 svalue *stack_value = get_svalue (stack_value_sid);
3722 gcc_assert (stack_value->get_kind () == SK_POISONED);
3723 poisoned_svalue *subclass = stack_value->dyn_cast_poisoned_svalue ();
3724 gcc_assert (subclass);
3725 gcc_assert (subclass->get_poison_kind () == POISON_KIND_UNINIT);
3726 }
3727 }
3728
3729 /* Global data for use by svalue_id_cmp_by_constant_svalue. */
3730
3731 static region_model *svalue_id_cmp_by_constant_svalue_model = NULL;
3732
3733 /* Comparator for use by region_model::canonicalize. */
3734
3735 static int
3736 svalue_id_cmp_by_constant_svalue (const void *p1, const void *p2)
3737 {
3738 const svalue_id *sid1 = (const svalue_id *)p1;
3739 const svalue_id *sid2 = (const svalue_id *)p2;
3740 gcc_assert (!sid1->null_p ());
3741 gcc_assert (!sid2->null_p ());
3742 gcc_assert (svalue_id_cmp_by_constant_svalue_model);
3743 const svalue &sval1
3744 = *svalue_id_cmp_by_constant_svalue_model->get_svalue (*sid1);
3745 const svalue &sval2
3746 = *svalue_id_cmp_by_constant_svalue_model->get_svalue (*sid2);
3747 gcc_assert (sval1.get_kind () == SK_CONSTANT);
3748 gcc_assert (sval2.get_kind () == SK_CONSTANT);
3749
3750 tree cst1 = ((const constant_svalue &)sval1).get_constant ();
3751 tree cst2 = ((const constant_svalue &)sval2).get_constant ();
3752 return tree_cmp (cst1, cst2);
3753 }
3754
3755 /* Reorder the regions and svalues into a deterministic "canonical" order,
3756 to maximize the chance of equality.
3757 If non-NULL, notify CTXT about the svalue id remapping. */
3758
3759 void
3760 region_model::canonicalize (region_model_context *ctxt)
3761 {
3762 /* Walk all regions and values in a deterministic order, visiting
3763 rids and sids, generating a rid and sid map. */
3764 canonicalization c (*this);
3765
3766 /* (1): Walk all svalues, putting constants first, sorting the constants
3767 (thus imposing an ordering on any constants that are purely referenced
3768 by constraints).
3769 Ignore other svalues for now. */
3770 {
3771 unsigned i;
3772 auto_vec<svalue_id> sids;
3773 svalue *sval;
3774 FOR_EACH_VEC_ELT (m_svalues, i, sval)
3775 {
3776 if (sval->get_kind () == SK_CONSTANT)
3777 sids.safe_push (svalue_id::from_int (i));
3778 }
3779 svalue_id_cmp_by_constant_svalue_model = this;
3780 sids.qsort (svalue_id_cmp_by_constant_svalue);
3781 svalue_id_cmp_by_constant_svalue_model = NULL;
3782 svalue_id *sid;
3783 FOR_EACH_VEC_ELT (sids, i, sid)
3784 c.walk_sid (*sid);
3785 }
3786
3787 /* (2): Walk all regions (and thus their values) in a deterministic
3788 order. */
3789 c.walk_rid (m_root_rid);
3790
3791 /* (3): Ensure we've visited everything, as we don't want to purge
3792 at this stage. Anything we visit for the first time here has
3793 arbitrary order. */
3794 {
3795 unsigned i;
3796 region *region;
3797 FOR_EACH_VEC_ELT (m_regions, i, region)
3798 c.walk_rid (region_id::from_int (i));
3799 svalue *sval;
3800 FOR_EACH_VEC_ELT (m_svalues, i, sval)
3801 c.walk_sid (svalue_id::from_int (i));
3802 }
3803
3804 /* (4): We now have a reordering of the regions and values.
3805 Apply it. */
3806 remap_svalue_ids (c.m_sid_map);
3807 remap_region_ids (c.m_rid_map);
3808 if (ctxt)
3809 ctxt->remap_svalue_ids (c.m_sid_map);
3810
3811 /* (5): Canonicalize the constraint_manager (it has already had its
3812 svalue_ids remapped above). This makes use of the new svalue_id
3813 values, and so must happen last. */
3814 m_constraints->canonicalize (get_num_svalues ());
3815
3816 validate ();
3817 }
3818
3819 /* Return true if this region_model is in canonical form. */
3820
3821 bool
3822 region_model::canonicalized_p () const
3823 {
3824 region_model copy (*this);
3825 copy.canonicalize (NULL);
3826 return *this == copy;
3827 }
3828
3829 /* A subclass of pending_diagnostic for complaining about uses of
3830 poisoned values. */
3831
3832 class poisoned_value_diagnostic
3833 : public pending_diagnostic_subclass<poisoned_value_diagnostic>
3834 {
3835 public:
3836 poisoned_value_diagnostic (tree expr, enum poison_kind pkind)
3837 : m_expr (expr), m_pkind (pkind)
3838 {}
3839
3840 const char *get_kind () const FINAL OVERRIDE { return "poisoned_value_diagnostic"; }
3841
3842 bool operator== (const poisoned_value_diagnostic &other) const
3843 {
3844 return m_expr == other.m_expr;
3845 }
3846
3847 bool emit (rich_location *rich_loc) FINAL OVERRIDE
3848 {
3849 switch (m_pkind)
3850 {
3851 default:
3852 gcc_unreachable ();
3853 case POISON_KIND_UNINIT:
3854 {
3855 diagnostic_metadata m;
3856 m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
3857 return warning_meta (rich_loc, m,
3858 OPT_Wanalyzer_use_of_uninitialized_value,
3859 "use of uninitialized value %qE",
3860 m_expr);
3861 }
3862 break;
3863 case POISON_KIND_FREED:
3864 {
3865 diagnostic_metadata m;
3866 m.add_cwe (416); /* "CWE-416: Use After Free". */
3867 return warning_meta (rich_loc, m,
3868 OPT_Wanalyzer_use_after_free,
3869 "use after %<free%> of %qE",
3870 m_expr);
3871 }
3872 break;
3873 case POISON_KIND_POPPED_STACK:
3874 {
3875 /* TODO: which CWE? */
3876 return warning_at (rich_loc,
3877 OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame,
3878 "use of pointer %qE within stale stack frame",
3879 m_expr);
3880 }
3881 break;
3882 }
3883 }
3884
3885 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
3886 {
3887 switch (m_pkind)
3888 {
3889 default:
3890 gcc_unreachable ();
3891 case POISON_KIND_UNINIT:
3892 return ev.formatted_print ("use of uninitialized value %qE here",
3893 m_expr);
3894 case POISON_KIND_FREED:
3895 return ev.formatted_print ("use after %<free%> of %qE here",
3896 m_expr);
3897 case POISON_KIND_POPPED_STACK:
3898 return ev.formatted_print
3899 ("use of pointer %qE within stale stack frame here",
3900 m_expr);
3901 }
3902 }
3903
3904 private:
3905 tree m_expr;
3906 enum poison_kind m_pkind;
3907 };
3908
3909 /* Determine if EXPR is poisoned, and if so, queue a diagnostic to CTXT. */
3910
3911 void
3912 region_model::check_for_poison (tree expr, region_model_context *ctxt)
3913 {
3914 if (!ctxt)
3915 return;
3916
3917 // TODO: this is disabled for now (too many false positives)
3918 return;
3919
3920 svalue_id expr_sid = get_rvalue (expr, ctxt);
3921 gcc_assert (!expr_sid.null_p ());
3922 svalue *expr_svalue = get_svalue (expr_sid);
3923 gcc_assert (expr_svalue);
3924 if (const poisoned_svalue *poisoned_sval
3925 = expr_svalue->dyn_cast_poisoned_svalue ())
3926 {
3927 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
3928 ctxt->warn (new poisoned_value_diagnostic (expr, pkind));
3929 }
3930 }
3931
3932 /* Update this model for the ASSIGN stmt, using CTXT to report any
3933 diagnostics. */
3934
3935 void
3936 region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
3937 {
3938 tree lhs = gimple_assign_lhs (assign);
3939 tree rhs1 = gimple_assign_rhs1 (assign);
3940
3941 region_id lhs_rid = get_lvalue (lhs, ctxt);
3942
3943 /* Check for uses of poisoned values. */
3944 switch (get_gimple_rhs_class (gimple_expr_code (assign)))
3945 {
3946 case GIMPLE_INVALID_RHS:
3947 gcc_unreachable ();
3948 break;
3949 case GIMPLE_TERNARY_RHS:
3950 check_for_poison (gimple_assign_rhs3 (assign), ctxt);
3951 /* Fallthru */
3952 case GIMPLE_BINARY_RHS:
3953 check_for_poison (gimple_assign_rhs2 (assign), ctxt);
3954 /* Fallthru */
3955 case GIMPLE_UNARY_RHS:
3956 case GIMPLE_SINGLE_RHS:
3957 check_for_poison (gimple_assign_rhs1 (assign), ctxt);
3958 }
3959
3960 if (lhs_rid.null_p ())
3961 return;
3962 // TODO: issue a warning for this case
3963
3964 enum tree_code op = gimple_assign_rhs_code (assign);
3965 switch (op)
3966 {
3967 default:
3968 {
3969 if (0)
3970 sorry_at (assign->location, "unhandled assignment op: %qs",
3971 get_tree_code_name (op));
3972 set_to_new_unknown_value (lhs_rid, TREE_TYPE (lhs), ctxt);
3973 }
3974 break;
3975
3976 case BIT_FIELD_REF:
3977 {
3978 // TODO
3979 }
3980 break;
3981
3982 case CONSTRUCTOR:
3983 {
3984 /* e.g. "x ={v} {CLOBBER};" */
3985 // TODO
3986 }
3987 break;
3988
3989 case POINTER_PLUS_EXPR:
3990 {
3991 /* e.g. "_1 = a_10(D) + 12;" */
3992 tree ptr = rhs1;
3993 tree offset = gimple_assign_rhs2 (assign);
3994
3995 svalue_id ptr_sid = get_rvalue (ptr, ctxt);
3996 svalue_id offset_sid = get_rvalue (offset, ctxt);
3997 region_id element_rid
3998 = get_or_create_pointer_plus_expr (TREE_TYPE (TREE_TYPE (ptr)),
3999 ptr_sid, offset_sid,
4000 ctxt);
4001 svalue_id element_ptr_sid
4002 = get_or_create_ptr_svalue (TREE_TYPE (ptr), element_rid);
4003 set_value (lhs_rid, element_ptr_sid, ctxt);
4004 }
4005 break;
4006
4007 case POINTER_DIFF_EXPR:
4008 {
4009 /* e.g. "_1 = p_2(D) - q_3(D);". */
4010
4011 /* TODO. */
4012
4013 set_to_new_unknown_value (lhs_rid, TREE_TYPE (lhs), ctxt);
4014 }
4015 break;
4016
4017 case ADDR_EXPR:
4018 {
4019 /* LHS = &RHS; */
4020 svalue_id ptr_sid = get_rvalue (rhs1, ctxt);
4021 set_value (lhs_rid, ptr_sid, ctxt);
4022 }
4023 break;
4024
4025 case MEM_REF:
4026 {
4027 region_id rhs_rid = get_lvalue (rhs1, ctxt);
4028 svalue_id rhs_sid
4029 = get_region (rhs_rid)->get_value (*this, true, ctxt);
4030 set_value (lhs_rid, rhs_sid, ctxt);
4031 }
4032 break;
4033
4034 case REAL_CST:
4035 case INTEGER_CST:
4036 case ARRAY_REF:
4037 {
4038 /* LHS = RHS; */
4039 svalue_id cst_sid = get_rvalue (rhs1, ctxt);
4040 set_value (lhs_rid, cst_sid, ctxt);
4041 }
4042 break;
4043
4044 case FIX_TRUNC_EXPR:
4045 case FLOAT_EXPR:
4046 case NOP_EXPR:
4047 // cast: TODO
4048 // fall though for now
4049 case SSA_NAME:
4050 case VAR_DECL:
4051 case PARM_DECL:
4052 {
4053 /* LHS = VAR; */
4054 svalue_id var_sid = get_rvalue (rhs1, ctxt);
4055 set_value (lhs_rid, var_sid, ctxt);
4056 }
4057 break;
4058
4059 case EQ_EXPR:
4060 case GE_EXPR:
4061 case LE_EXPR:
4062 case NE_EXPR:
4063 case GT_EXPR:
4064 case LT_EXPR:
4065 {
4066 tree rhs2 = gimple_assign_rhs2 (assign);
4067
4068 // TODO: constraints between svalues
4069 svalue_id rhs1_sid = get_rvalue (rhs1, ctxt);
4070 svalue_id rhs2_sid = get_rvalue (rhs2, ctxt);
4071
4072 tristate t = eval_condition (rhs1_sid, op, rhs2_sid);
4073 if (t.is_known ())
4074 set_value (lhs_rid,
4075 get_rvalue (t.is_true ()
4076 ? boolean_true_node
4077 : boolean_false_node,
4078 ctxt),
4079 ctxt);
4080 else
4081 set_to_new_unknown_value (lhs_rid, TREE_TYPE (lhs), ctxt);
4082 }
4083 break;
4084
4085 case NEGATE_EXPR:
4086 case BIT_NOT_EXPR:
4087 {
4088 // TODO: unary ops
4089
4090 // TODO: constant?
4091
4092 set_to_new_unknown_value (lhs_rid, TREE_TYPE (lhs), ctxt);
4093 }
4094 break;
4095
4096 case PLUS_EXPR:
4097 case MINUS_EXPR:
4098 case MULT_EXPR:
4099 case TRUNC_DIV_EXPR:
4100 case TRUNC_MOD_EXPR:
4101 case LSHIFT_EXPR:
4102 case RSHIFT_EXPR:
4103 case BIT_IOR_EXPR:
4104 case BIT_XOR_EXPR:
4105 case BIT_AND_EXPR:
4106 case MIN_EXPR:
4107 case MAX_EXPR:
4108 {
4109 /* Binary ops. */
4110 tree rhs2 = gimple_assign_rhs2 (assign);
4111
4112 svalue_id rhs1_sid = get_rvalue (rhs1, ctxt);
4113 svalue_id rhs2_sid = get_rvalue (rhs2, ctxt);
4114
4115 if (tree rhs1_cst = maybe_get_constant (rhs1_sid))
4116 if (tree rhs2_cst = maybe_get_constant (rhs2_sid))
4117 {
4118 tree result = fold_binary (op, TREE_TYPE (lhs),
4119 rhs1_cst, rhs2_cst);
4120 if (result && CONSTANT_CLASS_P (result))
4121 {
4122 svalue_id result_sid
4123 = get_or_create_constant_svalue (result);
4124 set_value (lhs_rid, result_sid, ctxt);
4125 return;
4126 }
4127 }
4128 set_to_new_unknown_value (lhs_rid, TREE_TYPE (lhs), ctxt);
4129 }
4130 break;
4131
4132 case COMPONENT_REF:
4133 {
4134 /* LHS = op0.op1; */
4135 region_id child_rid = get_lvalue (rhs1, ctxt);
4136 svalue_id child_sid
4137 = get_region (child_rid)->get_value (*this, true, ctxt);
4138 set_value (lhs_rid, child_sid, ctxt);
4139 }
4140 break;
4141 }
4142 }
4143
4144 /* Update this model for the CALL stmt, using CTXT to report any
4145 diagnostics - the first half.
4146
4147 Updates to the region_model that should be made *before* sm-states
4148 are updated are done here; other updates to the region_model are done
4149 in region_model::on_call_post.
4150
4151 Return true if the function call has unknown side effects (it wasn't
4152 recognized and we don't have a body for it, or are unable to tell which
4153 fndecl it is). */
4154
4155 bool
4156 region_model::on_call_pre (const gcall *call, region_model_context *ctxt)
4157 {
4158 region_id lhs_rid;
4159 tree lhs_type = NULL_TREE;
4160 if (tree lhs = gimple_call_lhs (call))
4161 {
4162 lhs_rid = get_lvalue (lhs, ctxt);
4163 lhs_type = TREE_TYPE (lhs);
4164 }
4165
4166 /* Check for uses of poisoned values.
4167 For now, special-case "free", to avoid warning about "use-after-free"
4168 when "double free" would be more precise. */
4169 if (!is_special_named_call_p (call, "free", 1))
4170 for (unsigned i = 0; i < gimple_call_num_args (call); i++)
4171 check_for_poison (gimple_call_arg (call, i), ctxt);
4172
4173 bool unknown_side_effects = false;
4174
4175 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
4176 {
4177 if (is_named_call_p (callee_fndecl, "malloc", call, 1))
4178 {
4179 // TODO: capture size as a svalue?
4180 region_id new_rid = add_new_malloc_region ();
4181 if (!lhs_rid.null_p ())
4182 {
4183 svalue_id ptr_sid
4184 = get_or_create_ptr_svalue (lhs_type, new_rid);
4185 set_value (lhs_rid, ptr_sid, ctxt);
4186 }
4187 return false;
4188 }
4189 else if (is_named_call_p (callee_fndecl, "__builtin_alloca", call, 1))
4190 {
4191 region_id frame_rid = get_current_frame_id ();
4192 region_id new_rid
4193 = add_region (new symbolic_region (frame_rid, NULL_TREE, false));
4194 if (!lhs_rid.null_p ())
4195 {
4196 svalue_id ptr_sid
4197 = get_or_create_ptr_svalue (lhs_type, new_rid);
4198 set_value (lhs_rid, ptr_sid, ctxt);
4199 }
4200 return false;
4201 }
4202 else if (is_named_call_p (callee_fndecl, "strlen", call, 1))
4203 {
4204 region_id buf_rid = deref_rvalue (gimple_call_arg (call, 0), ctxt);
4205 svalue_id buf_sid
4206 = get_region (buf_rid)->get_value (*this, true, ctxt);
4207 if (tree cst_expr = maybe_get_constant (buf_sid))
4208 {
4209 if (TREE_CODE (cst_expr) == STRING_CST
4210 && !lhs_rid.null_p ())
4211 {
4212 /* TREE_STRING_LENGTH is sizeof, not strlen. */
4213 int sizeof_cst = TREE_STRING_LENGTH (cst_expr);
4214 int strlen_cst = sizeof_cst - 1;
4215 tree t_cst = build_int_cst (lhs_type, strlen_cst);
4216 svalue_id result_sid
4217 = get_or_create_constant_svalue (t_cst);
4218 set_value (lhs_rid, result_sid, ctxt);
4219 return false;
4220 }
4221 }
4222 /* Otherwise an unknown value. */
4223 }
4224 else if (is_named_call_p (callee_fndecl,
4225 "__analyzer_dump_num_heap_regions", call, 0))
4226 {
4227 /* Handle the builtin "__analyzer_dump_num_heap_regions" by emitting
4228 a warning (for use in DejaGnu tests). */
4229 int num_heap_regions = 0;
4230 region_id heap_rid = get_root_region ()->ensure_heap_region (this);
4231 unsigned i;
4232 region *region;
4233 FOR_EACH_VEC_ELT (m_regions, i, region)
4234 if (region->get_parent () == heap_rid)
4235 num_heap_regions++;
4236 /* Use quotes to ensure the output isn't truncated. */
4237 warning_at (call->location, 0,
4238 "num heap regions: %qi", num_heap_regions);
4239 return false;
4240 }
4241 else if (!fndecl_has_gimple_body_p (callee_fndecl)
4242 && !DECL_PURE_P (callee_fndecl))
4243 unknown_side_effects = true;
4244 }
4245 else
4246 unknown_side_effects = true;
4247
4248 /* Unknown return value. */
4249 if (!lhs_rid.null_p ())
4250 set_to_new_unknown_value (lhs_rid, lhs_type, ctxt);
4251
4252 return unknown_side_effects;
4253 }
4254
4255 /* Update this model for the CALL stmt, using CTXT to report any
4256 diagnostics - the second half.
4257
4258 Updates to the region_model that should be made *after* sm-states
4259 are updated are done here; other updates to the region_model are done
4260 in region_model::on_call_pre.
4261
4262 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
4263 to purge state. */
4264
4265 void
4266 region_model::on_call_post (const gcall *call,
4267 bool unknown_side_effects,
4268 region_model_context *ctxt)
4269 {
4270 /* Update for "free" here, after sm-handling.
4271
4272 If the ptr points to an underlying heap region, delete the region,
4273 poisoning pointers to it and regions within it.
4274
4275 We delay this until after sm-state has been updated so that the
4276 sm-handling can transition all of the various casts of the pointer
4277 to a "freed" state *before* we delete the related region here.
4278
4279 This has to be done here so that the sm-handling can use the fact
4280 that they point to the same region to establish that they are equal
4281 (in region_model::eval_condition_without_cm), and thus transition
4282 all pointers to the region to the "freed" state together, regardless
4283 of casts. */
4284 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
4285 if (is_named_call_p (callee_fndecl, "free", call, 1))
4286 {
4287 tree ptr = gimple_call_arg (call, 0);
4288 svalue_id ptr_sid = get_rvalue (ptr, ctxt);
4289 svalue *ptr_sval = get_svalue (ptr_sid);
4290 if (region_svalue *ptr_to_region_sval
4291 = ptr_sval->dyn_cast_region_svalue ())
4292 {
4293 /* If the ptr points to an underlying heap region, delete it,
4294 poisoning pointers. */
4295 region_id pointee_rid = ptr_to_region_sval->get_pointee ();
4296 region_id heap_rid = get_root_region ()->ensure_heap_region (this);
4297 if (!pointee_rid.null_p ()
4298 && get_region (pointee_rid)->get_parent () == heap_rid)
4299 {
4300 purge_stats stats;
4301 delete_region_and_descendents (pointee_rid,
4302 POISON_KIND_FREED,
4303 &stats, ctxt->get_logger ());
4304 purge_unused_svalues (&stats, ctxt);
4305 validate ();
4306 // TODO: do anything with stats?
4307 }
4308 }
4309 return;
4310 }
4311
4312 if (unknown_side_effects)
4313 handle_unrecognized_call (call, ctxt);
4314 }
4315
4316 /* Helper class for region_model::handle_unrecognized_call, for keeping
4317 track of all regions that are reachable, and, of those, which are
4318 mutable. */
4319
4320 class reachable_regions
4321 {
4322 public:
4323 reachable_regions (region_model *model)
4324 : m_model (model), m_reachable_rids (), m_mutable_rids ()
4325 {}
4326
4327 /* Lazily mark RID as being reachable, recursively adding regions
4328 reachable from RID. */
4329 void add (region_id rid, bool is_mutable)
4330 {
4331 gcc_assert (!rid.null_p ());
4332
4333 unsigned idx = rid.as_int ();
4334 /* Bail out if this region is already in the sets at the IS_MUTABLE
4335 level of mutability. */
4336 if (!is_mutable && bitmap_bit_p (m_reachable_rids, idx))
4337 return;
4338 bitmap_set_bit (m_reachable_rids, idx);
4339
4340 if (is_mutable)
4341 {
4342 if (bitmap_bit_p (m_mutable_rids, idx))
4343 return;
4344 else
4345 bitmap_set_bit (m_mutable_rids, idx);
4346 }
4347
4348 /* If this region's value is a pointer, add the pointee. */
4349 region *reg = m_model->get_region (rid);
4350 svalue_id sid = reg->get_value_direct ();
4351 svalue *sval = m_model->get_svalue (sid);
4352 if (sval)
4353 if (region_svalue *ptr = sval->dyn_cast_region_svalue ())
4354 {
4355 region_id pointee_rid = ptr->get_pointee ();
4356 /* Use const-ness of pointer type to affect mutability. */
4357 bool ptr_is_mutable = true;
4358 if (ptr->get_type ()
4359 && TREE_CODE (ptr->get_type ()) == POINTER_TYPE
4360 && TYPE_READONLY (TREE_TYPE (ptr->get_type ())))
4361 ptr_is_mutable = false;
4362 add (pointee_rid, ptr_is_mutable);
4363 }
4364
4365 /* Add descendents of this region. */
4366 region_id_set descendents (m_model);
4367 m_model->get_descendents (rid, &descendents, region_id::null ());
4368 for (unsigned i = 0; i < m_model->get_num_regions (); i++)
4369 {
4370 region_id iter_rid = region_id::from_int (i);
4371 if (descendents.region_p (iter_rid))
4372 add (iter_rid, is_mutable);
4373 }
4374 }
4375
4376 bool mutable_p (region_id rid)
4377 {
4378 gcc_assert (!rid.null_p ());
4379 return bitmap_bit_p (m_mutable_rids, rid.as_int ());
4380 }
4381
4382 private:
4383 region_model *m_model;
4384
4385 /* The region ids already seen. This has to be an auto_bitmap rather than
4386 an auto_sbitmap as new regions can be created within the model during
4387 the traversal. */
4388 auto_bitmap m_reachable_rids;
4389
4390 /* The region_ids that can be changed (accessed via non-const pointers). */
4391 auto_bitmap m_mutable_rids;
4392 };
4393
4394 /* Handle a call CALL to a function with unknown behavior.
4395
4396 Traverse the regions in this model, determining what regions are
4397 reachable from pointer arguments to CALL and from global variables,
4398 recursively.
4399
4400 Set all reachable regions to new unknown values and purge sm-state
4401 from their values, and from values that point to them. */
4402
4403 void
4404 region_model::handle_unrecognized_call (const gcall *call,
4405 region_model_context *ctxt)
4406 {
4407 tree fndecl = get_fndecl_for_call (call, ctxt);
4408
4409 reachable_regions reachable_regions (this);
4410
4411 /* Determine the reachable regions and their mutability. */
4412 {
4413 /* Globals. */
4414 region_id globals_rid = get_globals_region_id ();
4415 if (!globals_rid.null_p ())
4416 reachable_regions.add (globals_rid, true);
4417
4418 /* Params that are pointers. */
4419 tree iter_param_types = NULL_TREE;
4420 if (fndecl)
4421 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4422 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
4423 {
4424 /* Track expected param type, where available. */
4425 tree param_type = NULL_TREE;
4426 if (iter_param_types)
4427 {
4428 param_type = TREE_VALUE (iter_param_types);
4429 gcc_assert (param_type);
4430 iter_param_types = TREE_CHAIN (iter_param_types);
4431 }
4432
4433 tree parm = gimple_call_arg (call, arg_idx);
4434 svalue_id parm_sid = get_rvalue (parm, NULL);
4435 svalue *parm_sval = get_svalue (parm_sid);
4436 if (parm_sval)
4437 if (region_svalue *parm_ptr = parm_sval->dyn_cast_region_svalue ())
4438 {
4439 region_id pointee_rid = parm_ptr->get_pointee ();
4440 bool is_mutable = true;
4441 if (param_type
4442 && TREE_CODE (param_type) == POINTER_TYPE
4443 && TYPE_READONLY (TREE_TYPE (param_type)))
4444 is_mutable = false;
4445 reachable_regions.add (pointee_rid, is_mutable);
4446 }
4447 // FIXME: what about compound parms that contain ptrs?
4448 }
4449 }
4450
4451 /* OK: we now have all reachable regions.
4452 Set them all to new unknown values. */
4453 for (unsigned i = 0; i < get_num_regions (); i++)
4454 {
4455 region_id iter_rid = region_id::from_int (i);
4456 if (reachable_regions.mutable_p (iter_rid))
4457 {
4458 region *reg = get_region (iter_rid);
4459
4460 /* Purge any sm-state for any underlying svalue. */
4461 svalue_id curr_sid = reg->get_value_direct ();
4462 if (!curr_sid.null_p ())
4463 ctxt->on_unknown_change (curr_sid);
4464
4465 set_to_new_unknown_value (iter_rid,
4466 reg->get_type (),
4467 ctxt);
4468 }
4469 }
4470
4471 /* Purge sm-state for any remaining svalues that point to regions that
4472 were reachable. This helps suppress leak false-positives.
4473
4474 For example, if we had a malloc call that was cast to a "foo *" type,
4475 we could have a temporary void * for the result of malloc which has its
4476 own svalue, not reachable from the function call, but for which the
4477 "foo *" svalue was reachable. If we don't purge it, the temporary will
4478 be reported as a leak. */
4479 int i;
4480 svalue *svalue;
4481 FOR_EACH_VEC_ELT (m_svalues, i, svalue)
4482 if (region_svalue *ptr = svalue->dyn_cast_region_svalue ())
4483 {
4484 region_id pointee_rid = ptr->get_pointee ();
4485 if (reachable_regions.mutable_p (pointee_rid))
4486 ctxt->on_unknown_change (svalue_id::from_int (i));
4487 }
4488
4489 validate ();
4490 }
4491
4492 /* Update this model for the RETURN_STMT, using CTXT to report any
4493 diagnostics. */
4494
4495 void
4496 region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
4497 {
4498 tree callee = get_current_function ()->decl;
4499 tree lhs = DECL_RESULT (callee);
4500 tree rhs = gimple_return_retval (return_stmt);
4501
4502 if (lhs && rhs)
4503 set_value (get_lvalue (lhs, ctxt), get_rvalue (rhs, ctxt), ctxt);
4504 }
4505
4506 /* Update this model for a call and return of setjmp/sigsetjmp at CALL within
4507 ENODE, using CTXT to report any diagnostics.
4508
4509 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
4510 0), as opposed to any second return due to longjmp/sigsetjmp. */
4511
4512 void
4513 region_model::on_setjmp (const gcall *call, const exploded_node *enode,
4514 region_model_context *ctxt)
4515 {
4516 region_id buf_rid = deref_rvalue (gimple_call_arg (call, 0), ctxt);
4517 region *buf = get_region (buf_rid);
4518
4519 /* Create a setjmp_svalue for this call and store it in BUF_RID's region. */
4520 if (buf)
4521 {
4522 setjmp_record r (enode, call);
4523 svalue *sval = new setjmp_svalue (r, buf->get_type ());
4524 svalue_id new_sid = add_svalue (sval);
4525 set_value (buf_rid, new_sid, ctxt);
4526 }
4527
4528 /* Direct calls to setjmp return 0. */
4529 if (tree lhs = gimple_call_lhs (call))
4530 {
4531 tree zero = build_int_cst (TREE_TYPE (lhs), 0);
4532 svalue_id new_sid = get_or_create_constant_svalue (zero);
4533 region_id lhs_rid = get_lvalue (lhs, ctxt);
4534 set_value (lhs_rid, new_sid, ctxt);
4535 }
4536 }
4537
4538 /* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
4539 to a "setjmp" at SETJMP_CALL where the final stack depth should be
4540 SETJMP_STACK_DEPTH. Purge any stack frames, potentially reporting on
4541 leaks to CTXT. */
4542
4543 void
4544 region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
4545 int setjmp_stack_depth,
4546 region_model_context *ctxt)
4547 {
4548 /* Evaluate the val, using the frame of the "longjmp". */
4549 tree fake_retval = gimple_call_arg (longjmp_call, 1);
4550 svalue_id fake_retval_sid = get_rvalue (fake_retval, ctxt);
4551
4552 /* Pop any frames until we reach the stack depth of the function where
4553 setjmp was called. */
4554 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
4555 while (get_stack_depth () > setjmp_stack_depth)
4556 {
4557 /* Don't purge unused svalues yet, as we're using fake_retval_sid. */
4558 pop_frame (false, NULL, ctxt);
4559 }
4560
4561 gcc_assert (get_stack_depth () == setjmp_stack_depth);
4562
4563 /* Assign to LHS of "setjmp" in new_state. */
4564 if (tree lhs = gimple_call_lhs (setjmp_call))
4565 {
4566 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
4567 tree t_zero = build_int_cst (TREE_TYPE (fake_retval), 0);
4568 svalue_id zero_sid = get_or_create_constant_svalue (t_zero);
4569 tristate eq_zero = eval_condition (fake_retval_sid, EQ_EXPR, zero_sid);
4570 /* If we have 0, use 1. */
4571 if (eq_zero.is_true ())
4572 {
4573 tree t_one = build_int_cst (TREE_TYPE (fake_retval), 1);
4574 svalue_id one_sid = get_or_create_constant_svalue (t_one);
4575 fake_retval_sid = one_sid;
4576 }
4577 else
4578 {
4579 /* Otherwise note that the value is nonzero. */
4580 m_constraints->add_constraint (fake_retval_sid, NE_EXPR, zero_sid);
4581 }
4582
4583 region_id lhs_rid = get_lvalue (lhs, ctxt);
4584 set_value (lhs_rid, fake_retval_sid, ctxt);
4585 }
4586
4587 /* Now that we've assigned the fake_retval, we can purge the unused
4588 svalues, which could detect leaks. */
4589 purge_unused_svalues (NULL, ctxt, NULL);
4590 validate ();
4591 }
4592
4593 /* Update this region_model for a phi stmt of the form
4594 LHS = PHI <...RHS...>.
4595 where RHS is for the appropriate edge. */
4596
4597 void
4598 region_model::handle_phi (const gphi *phi,
4599 tree lhs, tree rhs, bool is_back_edge,
4600 region_model_context *ctxt)
4601 {
4602 /* For now, don't bother tracking the .MEM SSA names. */
4603 if (tree var = SSA_NAME_VAR (lhs))
4604 if (TREE_CODE (var) == VAR_DECL)
4605 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
4606 return;
4607
4608 svalue_id rhs_sid = get_rvalue (rhs, ctxt);
4609
4610 if (is_back_edge && get_svalue (rhs_sid)->get_kind () != SK_UNKNOWN)
4611 {
4612 /* If we have a back edge, we probably have a loop.
4613 Use an unknown value, to avoid effectively unrolling the
4614 loop.
4615 To terminate, we need to avoid generating a series of
4616 models with an unbounded monotonically increasing number of
4617 redundant unknown values; hence we need to purge svalues
4618 before inserting the state into the exploded graph, to
4619 collect unused svalues. */
4620 set_to_new_unknown_value (get_lvalue (lhs, ctxt), TREE_TYPE (lhs), ctxt);
4621 }
4622 else
4623 set_value (get_lvalue (lhs, ctxt), rhs_sid, ctxt);
4624
4625 if (ctxt)
4626 ctxt->on_phi (phi, rhs);
4627 }
4628
4629 /* Implementation of region_model::get_lvalue; the latter adds type-checking.
4630
4631 Get the id of the region for PV within this region_model,
4632 emitting any diagnostics to CTXT. */
4633
4634 region_id
4635 region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt)
4636 {
4637 tree expr = pv.m_tree;
4638
4639 gcc_assert (expr);
4640
4641 switch (TREE_CODE (expr))
4642 {
4643 default:
4644 {
4645 /* If we see a tree code we we don't know how to handle, rather than
4646 ICE or generate bogus results, create a dummy region, and notify
4647 CTXT so that it can mark the new state as being not properly
4648 modelled. The exploded graph can then stop exploring that path,
4649 since any diagnostics we might issue will have questionable
4650 validity. */
4651 region_id new_rid
4652 = add_region (new symbolic_region (m_root_rid, NULL_TREE, false));
4653 ctxt->on_unknown_tree_code (pv, dump_location_t ());
4654 return new_rid;
4655 }
4656 break;
4657
4658 case ARRAY_REF:
4659 {
4660 tree array = TREE_OPERAND (expr, 0);
4661 tree index = TREE_OPERAND (expr, 1);
4662 #if 0
4663 // TODO: operands 2 and 3, if present:
4664 gcc_assert (TREE_OPERAND (expr, 2) == NULL_TREE);
4665 gcc_assert (TREE_OPERAND (expr, 3) == NULL_TREE);
4666 #endif
4667
4668 region_id array_rid = get_lvalue (array, ctxt);
4669 svalue_id index_sid = get_rvalue (index, ctxt);
4670 array_region *array_reg = get_region<array_region> (array_rid);
4671 return array_reg->get_element (this, array_rid, index_sid, ctxt);
4672 }
4673 break;
4674
4675 case BIT_FIELD_REF:
4676 {
4677 /* For now, create a view, as if a cast, ignoring the bit positions. */
4678 tree obj = TREE_OPERAND (expr, 0);
4679 return get_or_create_view (get_lvalue (obj, ctxt), TREE_TYPE (expr));
4680 };
4681 break;
4682
4683 case MEM_REF:
4684 {
4685 tree ptr = TREE_OPERAND (expr, 0);
4686 tree offset = TREE_OPERAND (expr, 1);
4687 svalue_id ptr_sid = get_rvalue (ptr, ctxt);
4688 svalue_id offset_sid = get_rvalue (offset, ctxt);
4689 return get_or_create_mem_ref (TREE_TYPE (expr), ptr_sid,
4690 offset_sid, ctxt);
4691 }
4692 break;
4693
4694 case VAR_DECL:
4695 /* Handle globals. */
4696 if (is_global_var (expr))
4697 {
4698 region_id globals_rid
4699 = get_root_region ()->ensure_globals_region (this);
4700 map_region *globals = get_region<map_region> (globals_rid);
4701 region_id var_rid = globals->get_or_create (this, globals_rid, expr,
4702 TREE_TYPE (expr));
4703 return var_rid;
4704 }
4705
4706 /* Fall through. */
4707
4708 case SSA_NAME:
4709 case PARM_DECL:
4710 case RESULT_DECL:
4711 {
4712 gcc_assert (TREE_CODE (expr) == SSA_NAME
4713 || TREE_CODE (expr) == PARM_DECL
4714 || TREE_CODE (expr) == VAR_DECL
4715 || TREE_CODE (expr) == RESULT_DECL);
4716
4717 int stack_depth = pv.m_stack_depth;
4718 stack_region *stack = get_root_region ()->get_stack_region (this);
4719 gcc_assert (stack);
4720 region_id frame_rid = stack->get_frame_rid (stack_depth);
4721 frame_region *frame = get_region <frame_region> (frame_rid);
4722 gcc_assert (frame);
4723 region_id child_rid = frame->get_or_create (this, frame_rid, expr,
4724 TREE_TYPE (expr));
4725 return child_rid;
4726 }
4727
4728 case COMPONENT_REF:
4729 {
4730 /* obj.field */
4731 tree obj = TREE_OPERAND (expr, 0);
4732 tree field = TREE_OPERAND (expr, 1);
4733 region_id obj_rid = get_lvalue (obj, ctxt);
4734 region_id struct_or_union_rid
4735 = get_or_create_view (obj_rid, TREE_TYPE (obj));
4736 return get_field_region (struct_or_union_rid, field);
4737 }
4738 break;
4739
4740 case CONST_DECL:
4741 {
4742 tree cst_type = TREE_TYPE (expr);
4743 region_id cst_rid = add_region_for_type (m_root_rid, cst_type);
4744 if (tree value = DECL_INITIAL (expr))
4745 {
4746 svalue_id sid = get_rvalue (value, ctxt);
4747 get_region (cst_rid)->set_value (*this, cst_rid, sid, ctxt);
4748 }
4749 return cst_rid;
4750 }
4751 break;
4752
4753 case STRING_CST:
4754 {
4755 tree cst_type = TREE_TYPE (expr);
4756 array_region *cst_region = new array_region (m_root_rid, cst_type);
4757 region_id cst_rid = add_region (cst_region);
4758 svalue_id cst_sid = get_or_create_constant_svalue (expr);
4759 cst_region->set_value (*this, cst_rid, cst_sid, ctxt);
4760 return cst_rid;
4761 }
4762 break;
4763
4764 case VIEW_CONVERT_EXPR:
4765 {
4766 tree obj = TREE_OPERAND (expr, 0);
4767 return get_or_create_view (get_lvalue (obj, ctxt), TREE_TYPE (expr));
4768 };
4769 break;
4770 }
4771 }
4772
4773 /* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
4774
4775 static void
4776 assert_compat_types (tree src_type, tree dst_type)
4777 {
4778 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
4779 gcc_checking_assert (useless_type_conversion_p (src_type, dst_type));
4780 }
4781
4782 /* Get the id of the region for PV within this region_model,
4783 emitting any diagnostics to CTXT. */
4784
4785 region_id
4786 region_model::get_lvalue (path_var pv, region_model_context *ctxt)
4787 {
4788 if (pv.m_tree == NULL_TREE)
4789 return region_id::null ();
4790
4791 region_id result_rid = get_lvalue_1 (pv, ctxt);
4792 assert_compat_types (get_region (result_rid)->get_type (),
4793 TREE_TYPE (pv.m_tree));
4794 return result_rid;
4795 }
4796
4797 /* Get the region_id for EXPR within this region_model (assuming the most
4798 recent stack frame if it's a local). */
4799
4800 region_id
4801 region_model::get_lvalue (tree expr, region_model_context *ctxt)
4802 {
4803 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
4804 }
4805
4806 /* Implementation of region_model::get_rvalue; the latter adds type-checking.
4807
4808 Get the value of PV within this region_model,
4809 emitting any diagnostics to CTXT. */
4810
4811 svalue_id
4812 region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt)
4813 {
4814 gcc_assert (pv.m_tree);
4815
4816 switch (TREE_CODE (pv.m_tree))
4817 {
4818 default:
4819 {
4820 svalue *unknown_sval = new unknown_svalue (TREE_TYPE (pv.m_tree));
4821 return add_svalue (unknown_sval);
4822 }
4823 break;
4824
4825 case ADDR_EXPR:
4826 {
4827 /* "&EXPR". */
4828 tree expr = pv.m_tree;
4829 tree op0 = TREE_OPERAND (expr, 0);
4830 if (TREE_CODE (op0) == FUNCTION_DECL)
4831 return get_svalue_for_fndecl (TREE_TYPE (expr), op0);
4832 else if (TREE_CODE (op0) == LABEL_DECL)
4833 return get_svalue_for_label (TREE_TYPE (expr), op0);
4834 region_id expr_rid = get_lvalue (op0, ctxt);
4835 return get_or_create_ptr_svalue (TREE_TYPE (expr), expr_rid);
4836 }
4837 break;
4838
4839 case ARRAY_REF:
4840 {
4841 region_id element_rid = get_lvalue (pv, ctxt);
4842 return get_region (element_rid)->get_value (*this, true, ctxt);
4843 }
4844
4845 case INTEGER_CST:
4846 case REAL_CST:
4847 case STRING_CST:
4848 return get_or_create_constant_svalue (pv.m_tree);
4849
4850 case COMPONENT_REF:
4851 case MEM_REF:
4852 case SSA_NAME:
4853 case VAR_DECL:
4854 case PARM_DECL:
4855 case RESULT_DECL:
4856 {
4857 region_id var_rid = get_lvalue (pv, ctxt);
4858 return get_region (var_rid)->get_value (*this, true, ctxt);
4859 }
4860 }
4861 }
4862
4863 /* Get the value of PV within this region_model,
4864 emitting any diagnostics to CTXT. */
4865
4866 svalue_id
4867 region_model::get_rvalue (path_var pv, region_model_context *ctxt)
4868 {
4869 if (pv.m_tree == NULL_TREE)
4870 return svalue_id::null ();
4871 svalue_id result_sid = get_rvalue_1 (pv, ctxt);
4872
4873 assert_compat_types (get_svalue (result_sid)->get_type (),
4874 TREE_TYPE (pv.m_tree));
4875
4876 return result_sid;
4877 }
4878
4879 /* Get the value of EXPR within this region_model (assuming the most
4880 recent stack frame if it's a local). */
4881
4882 svalue_id
4883 region_model::get_rvalue (tree expr, region_model_context *ctxt)
4884 {
4885 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
4886 }
4887
4888 /* Return an svalue_id for a pointer to RID of type PTR_TYPE, reusing
4889 existing pointer values if one is available. */
4890
4891 svalue_id
4892 region_model::get_or_create_ptr_svalue (tree ptr_type, region_id rid)
4893 {
4894 /* Reuse existing region_svalue, if one of the right type is
4895 available. */
4896 /* In theory we could stash a svalue_id in "region", but differing
4897 pointer types muddles things.
4898 For now, just do a linear search through all existing svalues. */
4899 int i;
4900 svalue *svalue;
4901 FOR_EACH_VEC_ELT (m_svalues, i, svalue)
4902 if (region_svalue *ptr_svalue = svalue->dyn_cast_region_svalue ())
4903 if (ptr_svalue->get_pointee () == rid
4904 && ptr_svalue->get_type () == ptr_type)
4905 return svalue_id::from_int (i);
4906
4907 return add_svalue (new region_svalue (ptr_type, rid));
4908 }
4909
4910 /* Return an svalue_id for a constant_svalue for CST_EXPR,
4911 creating the constant_svalue if necessary.
4912 The constant_svalue instances are reused, based on pointer equality
4913 of trees */
4914
4915 svalue_id
4916 region_model::get_or_create_constant_svalue (tree cst_expr)
4917 {
4918 gcc_assert (cst_expr);
4919
4920 /* Reuse one if it already exists. */
4921 // TODO: maybe store a map, rather than do linear search?
4922 int i;
4923 svalue *svalue;
4924 FOR_EACH_VEC_ELT (m_svalues, i, svalue)
4925 if (svalue->maybe_get_constant () == cst_expr)
4926 return svalue_id::from_int (i);
4927
4928 svalue_id cst_sid = add_svalue (new constant_svalue (cst_expr));
4929 return cst_sid;
4930 }
4931
4932 /* Return an svalue_id for a region_svalue for FNDECL,
4933 creating the function_region if necessary. */
4934
4935 svalue_id
4936 region_model::get_svalue_for_fndecl (tree ptr_type, tree fndecl)
4937 {
4938 gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL);
4939 region_id function_rid = get_region_for_fndecl (fndecl);
4940 return get_or_create_ptr_svalue (ptr_type, function_rid);
4941 }
4942
4943 /* Return a region_id for a function_region for FNDECL,
4944 creating it if necessary. */
4945
4946 region_id
4947 region_model::get_region_for_fndecl (tree fndecl)
4948 {
4949 gcc_assert (TREE_CODE (fndecl) == FUNCTION_DECL);
4950
4951 region_id code_rid = get_root_region ()->ensure_code_region (this);
4952 code_region *code = get_root_region ()->get_code_region (this);
4953
4954 return code->get_or_create (this, code_rid, fndecl, TREE_TYPE (fndecl));
4955 }
4956
4957 /* Return an svalue_id for a region_svalue for LABEL,
4958 creating the label_region if necessary. */
4959
4960 svalue_id
4961 region_model::get_svalue_for_label (tree ptr_type, tree label)
4962 {
4963 gcc_assert (TREE_CODE (label) == LABEL_DECL);
4964 region_id label_rid = get_region_for_label (label);
4965 return get_or_create_ptr_svalue (ptr_type, label_rid);
4966 }
4967
4968 /* Return a region_id for a label_region for LABEL,
4969 creating it if necessary. */
4970
4971 region_id
4972 region_model::get_region_for_label (tree label)
4973 {
4974 gcc_assert (TREE_CODE (label) == LABEL_DECL);
4975
4976 tree fndecl = DECL_CONTEXT (label);
4977 gcc_assert (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL);
4978
4979 region_id func_rid = get_region_for_fndecl (fndecl);
4980 function_region *func_reg = get_region <function_region> (func_rid);
4981 return func_reg->get_or_create (this, func_rid, label, TREE_TYPE (label));
4982 }
4983
4984 /* Build a cast of SRC_EXPR to DST_TYPE, or return NULL_TREE.
4985
4986 Adapted from gcc::jit::playback::context::build_cast, which in turn is
4987 adapted from
4988 - c/c-typeck.c:build_c_cast
4989 - c/c-convert.c: convert
4990 - convert.h
4991 Only some kinds of cast are currently supported here. */
4992
4993 static tree
4994 build_cast (tree dst_type, tree src_expr)
4995 {
4996 tree result = targetm.convert_to_type (dst_type, src_expr);
4997 if (result)
4998 return result;
4999 enum tree_code dst_code = TREE_CODE (dst_type);
5000 switch (dst_code)
5001 {
5002 case INTEGER_TYPE:
5003 case ENUMERAL_TYPE:
5004 result = convert_to_integer (dst_type, src_expr);
5005 goto maybe_fold;
5006
5007 case BOOLEAN_TYPE:
5008 /* Compare with c_objc_common_truthvalue_conversion and
5009 c_common_truthvalue_conversion. */
5010 /* For now, convert to: (src_expr != 0) */
5011 result = build2 (NE_EXPR, dst_type,
5012 src_expr,
5013 build_int_cst (TREE_TYPE (src_expr), 0));
5014 goto maybe_fold;
5015
5016 case REAL_TYPE:
5017 result = convert_to_real (dst_type, src_expr);
5018 goto maybe_fold;
5019
5020 case POINTER_TYPE:
5021 result = build1 (NOP_EXPR, dst_type, src_expr);
5022 goto maybe_fold;
5023
5024 default:
5025 return NULL_TREE;
5026
5027 maybe_fold:
5028 if (TREE_CODE (result) != C_MAYBE_CONST_EXPR)
5029 result = fold (result);
5030 return result;
5031 }
5032 }
5033
5034 /* If the type of SID's underlying value is DST_TYPE, return SID.
5035 Otherwise, attempt to create (or reuse) an svalue representing an access
5036 of SID as a DST_TYPE and return that value's svalue_id. */
5037
5038 svalue_id
5039 region_model::maybe_cast_1 (tree dst_type, svalue_id sid)
5040 {
5041 svalue *sval = get_svalue (sid);
5042 tree src_type = sval->get_type ();
5043 if (src_type == dst_type)
5044 return sid;
5045
5046 if (POINTER_TYPE_P (dst_type)
5047 || POINTER_TYPE_P (src_type))
5048 {
5049 /* Pointer to region. */
5050 if (region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
5051 return get_or_create_ptr_svalue (dst_type, ptr_sval->get_pointee ());
5052
5053 /* Unknown pointer? Get or create a new unknown pointer of the
5054 correct type, preserving the equality between the pointers. */
5055 if (sval->dyn_cast_unknown_svalue ())
5056 {
5057 equiv_class &ec = m_constraints->get_equiv_class (sid);
5058
5059 /* Look for an existing pointer of the correct type within the EC. */
5060 int i;
5061 svalue_id *equiv_sid;
5062 FOR_EACH_VEC_ELT (ec.m_vars, i, equiv_sid)
5063 {
5064 svalue *equiv_val = get_svalue (*equiv_sid);
5065 if (equiv_val->get_type () == dst_type)
5066 return *equiv_sid;
5067 }
5068
5069 /* Otherwise, create a new unknown pointer of the correct type. */
5070 svalue *unknown_sval = new unknown_svalue (dst_type);
5071 svalue_id new_ptr_sid = add_svalue (unknown_sval);
5072 m_constraints->add_constraint (sid, EQ_EXPR, new_ptr_sid);
5073 return new_ptr_sid;
5074 }
5075 }
5076
5077 /* Attempt to cast constants. */
5078 if (tree src_cst = sval->maybe_get_constant ())
5079 {
5080 tree dst = build_cast (dst_type, src_cst);
5081 gcc_assert (dst != NULL_TREE);
5082 if (CONSTANT_CLASS_P (dst))
5083 return get_or_create_constant_svalue (dst);
5084 }
5085
5086 /* Otherwise, return a new unknown value. */
5087 svalue *unknown_sval = new unknown_svalue (dst_type);
5088 return add_svalue (unknown_sval);
5089 }
5090
5091 /* If the type of SID's underlying value is DST_TYPE, return SID.
5092 Otherwise, attempt to create (or reuse) an svalue representing an access
5093 of SID as a DST_TYPE and return that value's svalue_id.
5094
5095 If the result != SID, then call CTXT's on_cast vfunc (if CTXT is non-NULL),
5096 so that sm-state can be propagated from SID to the result. */
5097
5098 svalue_id
5099 region_model::maybe_cast (tree dst_type, svalue_id sid,
5100 region_model_context *ctxt)
5101 {
5102 svalue_id result = maybe_cast_1 (dst_type, sid);
5103 if (result != sid)
5104 if (ctxt)
5105 {
5106 /* Notify ctxt about a cast, so any sm-state can be copied. */
5107 ctxt->on_cast (sid, result);
5108 }
5109 return result;
5110 }
5111
5112 /* Ensure that the region for OBJ_RID has a child region for FIELD;
5113 return the child region's region_id. */
5114
5115 region_id
5116 region_model::get_field_region (region_id struct_or_union_rid, tree field)
5117 {
5118 struct_or_union_region *sou_reg
5119 = get_region<struct_or_union_region> (struct_or_union_rid);
5120
5121 /* Inherit constness from parent type. */
5122 const int qual_mask = TYPE_QUAL_CONST;
5123 int sou_quals = TYPE_QUALS (sou_reg->get_type ()) & qual_mask;
5124 tree field_type = TREE_TYPE (field);
5125 tree field_type_with_quals = build_qualified_type (field_type, sou_quals);
5126
5127 // TODO: maybe convert to a vfunc?
5128 if (sou_reg->get_kind () == RK_UNION)
5129 {
5130 /* Union.
5131 Get a view of the union as a whole, with the type of the field. */
5132 region_id view_rid
5133 = get_or_create_view (struct_or_union_rid, field_type_with_quals);
5134 return view_rid;
5135 }
5136 else
5137 {
5138 /* Struct. */
5139 region_id child_rid
5140 = sou_reg->get_or_create (this, struct_or_union_rid, field,
5141 field_type_with_quals);
5142 return child_rid;
5143 }
5144 }
5145
5146 /* Get a region_id for referencing PTR_SID, creating a region if need be, and
5147 potentially generating warnings via CTXT. */
5148
5149 region_id
5150 region_model::deref_rvalue (svalue_id ptr_sid, region_model_context *ctxt)
5151 {
5152 gcc_assert (!ptr_sid.null_p ());
5153 svalue *ptr_svalue = get_svalue (ptr_sid);
5154 gcc_assert (ptr_svalue);
5155
5156 switch (ptr_svalue->get_kind ())
5157 {
5158 case SK_REGION:
5159 {
5160 region_svalue *region_sval = as_a <region_svalue *> (ptr_svalue);
5161 return region_sval->get_pointee ();
5162 }
5163
5164 case SK_CONSTANT:
5165 goto create_symbolic_region;
5166
5167 case SK_POISONED:
5168 {
5169 if (ctxt)
5170 if (tree ptr = get_representative_tree (ptr_sid))
5171 {
5172 poisoned_svalue *poisoned_sval
5173 = as_a <poisoned_svalue *> (ptr_svalue);
5174 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
5175 ctxt->warn (new poisoned_value_diagnostic (ptr, pkind));
5176 }
5177 goto create_symbolic_region;
5178 }
5179
5180 case SK_UNKNOWN:
5181 {
5182 create_symbolic_region:
5183 /* We need a symbolic_region to represent this unknown region.
5184 We don't know if it on the heap, stack, or a global,
5185 so use the root region as parent. */
5186 region_id new_rid
5187 = add_region (new symbolic_region (m_root_rid, NULL_TREE, false));
5188
5189 /* We need to write the region back into the pointer,
5190 or we'll get a new, different region each time.
5191 We do this by changing the meaning of ptr_sid, replacing
5192 the unknown value with the ptr to the new region.
5193 We replace the meaning of the ID rather than simply writing
5194 to PTR's lvalue since there could be several places sharing
5195 the same unknown ptr value. */
5196 svalue *ptr_val
5197 = new region_svalue (ptr_svalue->get_type (), new_rid);
5198 replace_svalue (ptr_sid, ptr_val);
5199
5200 return new_rid;
5201 }
5202
5203 case SK_SETJMP:
5204 goto create_symbolic_region;
5205 }
5206
5207 gcc_unreachable ();
5208 }
5209
5210 /* Get a region_id for referencing PTR, creating a region if need be, and
5211 potentially generating warnings via CTXT. */
5212
5213 region_id
5214 region_model::deref_rvalue (tree ptr, region_model_context *ctxt)
5215 {
5216 svalue_id ptr_sid = get_rvalue (ptr, ctxt);
5217 return deref_rvalue (ptr_sid, ctxt);
5218 }
5219
5220 /* Set the value of the region given by LHS_RID to the value given
5221 by RHS_SID. */
5222
5223 void
5224 region_model::set_value (region_id lhs_rid, svalue_id rhs_sid,
5225 region_model_context *ctxt)
5226 {
5227 gcc_assert (!lhs_rid.null_p ());
5228 gcc_assert (!rhs_sid.null_p ());
5229 get_region (lhs_rid)->set_value (*this, lhs_rid, rhs_sid, ctxt);
5230 }
5231
5232 /* Determine what is known about the condition "LHS_SID OP RHS_SID" within
5233 this model. */
5234
5235 tristate
5236 region_model::eval_condition (svalue_id lhs_sid,
5237 enum tree_code op,
5238 svalue_id rhs_sid) const
5239 {
5240 svalue *lhs = get_svalue (lhs_sid);
5241 svalue *rhs = get_svalue (rhs_sid);
5242
5243 /* For now, make no attempt to capture constraints on floating-point
5244 values. */
5245 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
5246 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
5247 return tristate::unknown ();
5248
5249 tristate ts = eval_condition_without_cm (lhs_sid, op, rhs_sid);
5250
5251 if (ts.is_known ())
5252 return ts;
5253
5254 /* Otherwise, try constraints. */
5255 return m_constraints->eval_condition (lhs_sid, op, rhs_sid);
5256 }
5257
5258 /* Determine what is known about the condition "LHS_SID OP RHS_SID" within
5259 this model, without resorting to the constraint_manager.
5260
5261 This is exposed so that impl_region_model_context::on_state_leak can
5262 check for equality part-way through region_model::purge_unused_svalues
5263 without risking creating new ECs. */
5264
5265 tristate
5266 region_model::eval_condition_without_cm (svalue_id lhs_sid,
5267 enum tree_code op,
5268 svalue_id rhs_sid) const
5269 {
5270 svalue *lhs = get_svalue (lhs_sid);
5271 svalue *rhs = get_svalue (rhs_sid);
5272 gcc_assert (lhs);
5273 gcc_assert (rhs);
5274
5275 /* See what we know based on the values. */
5276 if (lhs && rhs)
5277 {
5278 /* For now, make no attempt to capture constraints on floating-point
5279 values. */
5280 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
5281 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
5282 return tristate::unknown ();
5283
5284 if (lhs == rhs)
5285 {
5286 /* If we have the same svalue, then we have equality
5287 (apart from NaN-handling).
5288 TODO: should this definitely be the case for poisoned values? */
5289 switch (op)
5290 {
5291 case EQ_EXPR:
5292 case GE_EXPR:
5293 case LE_EXPR:
5294 return tristate::TS_TRUE;
5295
5296 case NE_EXPR:
5297 case GT_EXPR:
5298 case LT_EXPR:
5299 return tristate::TS_FALSE;
5300
5301 default:
5302 /* For other ops, use the logic below. */
5303 break;
5304 }
5305 }
5306
5307 /* If we have a pair of region_svalues, compare them. */
5308 if (region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
5309 if (region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
5310 {
5311 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
5312 if (res.is_known ())
5313 return res;
5314 /* Otherwise, only known through constraints. */
5315 }
5316
5317 /* If we have a pair of constants, compare them. */
5318 if (constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
5319 if (constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
5320 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
5321
5322 /* Handle comparison of a region_svalue against zero. */
5323 if (region_svalue *ptr = lhs->dyn_cast_region_svalue ())
5324 if (constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
5325 if (zerop (cst_rhs->get_constant ()))
5326 {
5327 /* A region_svalue is a non-NULL pointer, except in certain
5328 special cases (see the comment for region::non_null_p. */
5329 region *pointee = get_region (ptr->get_pointee ());
5330 if (pointee->non_null_p (*this))
5331 {
5332 switch (op)
5333 {
5334 default:
5335 gcc_unreachable ();
5336
5337 case EQ_EXPR:
5338 case GE_EXPR:
5339 case LE_EXPR:
5340 return tristate::TS_FALSE;
5341
5342 case NE_EXPR:
5343 case GT_EXPR:
5344 case LT_EXPR:
5345 return tristate::TS_TRUE;
5346 }
5347 }
5348 }
5349 }
5350
5351 return tristate::TS_UNKNOWN;
5352 }
5353
5354 /* Attempt to add the constraint "LHS OP RHS" to this region_model.
5355 If it is consistent with existing constraints, add it, and return true.
5356 Return false if it contradicts existing constraints.
5357 Use CTXT for reporting any diagnostics associated with the accesses. */
5358
5359 bool
5360 region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
5361 region_model_context *ctxt)
5362 {
5363 /* For now, make no attempt to capture constraints on floating-point
5364 values. */
5365 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
5366 return true;
5367
5368 svalue_id lhs_sid = get_rvalue (lhs, ctxt);
5369 svalue_id rhs_sid = get_rvalue (rhs, ctxt);
5370
5371 tristate t_cond = eval_condition (lhs_sid, op, rhs_sid);
5372
5373 /* If we already have the condition, do nothing. */
5374 if (t_cond.is_true ())
5375 return true;
5376
5377 /* Reject a constraint that would contradict existing knowledge, as
5378 unsatisfiable. */
5379 if (t_cond.is_false ())
5380 return false;
5381
5382 /* Store the constraint. */
5383 m_constraints->add_constraint (lhs_sid, op, rhs_sid);
5384
5385 add_any_constraints_from_ssa_def_stmt (lhs, op, rhs, ctxt);
5386
5387 /* Notify the context, if any. This exists so that the state machines
5388 in a program_state can be notified about the condition, and so can
5389 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
5390 when synthesizing constraints as above. */
5391 if (ctxt)
5392 ctxt->on_condition (lhs, op, rhs);
5393
5394 return true;
5395 }
5396
5397 /* Subroutine of region_model::add_constraint for handling optimized
5398 && and || conditionals.
5399
5400 If we have an SSA_NAME for a boolean compared against 0,
5401 look at anything implied by the def stmt and call add_constraint
5402 for it (which could recurse).
5403
5404 For example, if we have
5405 _1 = p_6 == 0B;
5406 _2 = p_8 == 0B
5407 _3 = _1 | _2
5408 and add the constraint
5409 (_3 == 0),
5410 then the def stmt for _3 implies that _1 and _2 are both false,
5411 and hence we can add the constraints:
5412 p_6 != 0B
5413 p_8 != 0B. */
5414
5415 void
5416 region_model::add_any_constraints_from_ssa_def_stmt (tree lhs,
5417 enum tree_code op,
5418 tree rhs,
5419 region_model_context *ctxt)
5420 {
5421 if (TREE_CODE (lhs) != SSA_NAME)
5422 return;
5423
5424 if (rhs != boolean_false_node)
5425 return;
5426
5427 if (op != NE_EXPR && op != EQ_EXPR)
5428 return;
5429
5430 /* We have either
5431 - "LHS != false" (i.e. LHS is true), or
5432 - "LHS == false" (i.e. LHS is false). */
5433 bool is_true = op == NE_EXPR;
5434
5435 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
5436 gassign *assign = dyn_cast<gassign *> (def_stmt);
5437 if (!assign)
5438 return;
5439
5440 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
5441
5442 switch (rhs_code)
5443 {
5444 default:
5445 break;
5446 case BIT_AND_EXPR:
5447 {
5448 if (is_true)
5449 {
5450 /* ...and "LHS == (rhs1 & rhs2) i.e. "(rhs1 & rhs2)" is true
5451 then both rhs1 and rhs2 must be true. */
5452 tree rhs1 = gimple_assign_rhs1 (assign);
5453 tree rhs2 = gimple_assign_rhs2 (assign);
5454 add_constraint (rhs1, NE_EXPR, boolean_false_node, ctxt);
5455 add_constraint (rhs2, NE_EXPR, boolean_false_node, ctxt);
5456 }
5457 }
5458 break;
5459
5460 case BIT_IOR_EXPR:
5461 {
5462 if (!is_true)
5463 {
5464 /* ...and "LHS == (rhs1 | rhs2)
5465 i.e. "(rhs1 | rhs2)" is false
5466 then both rhs1 and rhs2 must be false. */
5467 tree rhs1 = gimple_assign_rhs1 (assign);
5468 tree rhs2 = gimple_assign_rhs2 (assign);
5469 add_constraint (rhs1, EQ_EXPR, boolean_false_node, ctxt);
5470 add_constraint (rhs2, EQ_EXPR, boolean_false_node, ctxt);
5471 }
5472 }
5473 break;
5474
5475 case EQ_EXPR:
5476 case NE_EXPR:
5477 {
5478 /* ...and "LHS == (rhs1 OP rhs2)"
5479 then rhs1 OP rhs2 must have the same logical value as LHS. */
5480 tree rhs1 = gimple_assign_rhs1 (assign);
5481 tree rhs2 = gimple_assign_rhs2 (assign);
5482 if (!is_true)
5483 rhs_code
5484 = invert_tree_comparison (rhs_code, false /* honor_nans */);
5485 add_constraint (rhs1, rhs_code, rhs2, ctxt);
5486 }
5487 break;
5488 }
5489 }
5490
5491 /* Determine what is known about the condition "LHS OP RHS" within
5492 this model.
5493 Use CTXT for reporting any diagnostics associated with the accesses. */
5494
5495 tristate
5496 region_model::eval_condition (tree lhs,
5497 enum tree_code op,
5498 tree rhs,
5499 region_model_context *ctxt)
5500 {
5501 /* For now, make no attempt to model constraints on floating-point
5502 values. */
5503 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
5504 return tristate::unknown ();
5505
5506 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
5507 }
5508
5509 /* If SID is a constant value, return the underlying tree constant.
5510 Otherwise, return NULL_TREE. */
5511
5512 tree
5513 region_model::maybe_get_constant (svalue_id sid) const
5514 {
5515 gcc_assert (!sid.null_p ());
5516 svalue *sval = get_svalue (sid);
5517 return sval->maybe_get_constant ();
5518 }
5519
5520 /* Create a new child region of the heap (creating the heap region if
5521 necessary).
5522 Return the region_id of the new child region. */
5523
5524 region_id
5525 region_model::add_new_malloc_region ()
5526 {
5527 region_id heap_rid
5528 = get_root_region ()->ensure_heap_region (this);
5529 return add_region (new symbolic_region (heap_rid, NULL_TREE, true));
5530 }
5531
5532 /* Attempt to return a tree that represents SID, or return NULL_TREE.
5533 Find the first region that stores the value (e.g. a local) and
5534 generate a representative tree for it. */
5535
5536 tree
5537 region_model::get_representative_tree (svalue_id sid) const
5538 {
5539 if (sid.null_p ())
5540 return NULL_TREE;
5541
5542 unsigned i;
5543 region *region;
5544 FOR_EACH_VEC_ELT (m_regions, i, region)
5545 if (sid == region->get_value_direct ())
5546 {
5547 path_var pv = get_representative_path_var (region_id::from_int (i));
5548 if (pv.m_tree)
5549 return pv.m_tree;
5550 }
5551
5552 return maybe_get_constant (sid);
5553 }
5554
5555 /* Attempt to return a path_var that represents the region, or return
5556 the NULL path_var.
5557 For example, a region for a field of a local would be a path_var
5558 wrapping a COMPONENT_REF. */
5559
5560 path_var
5561 region_model::get_representative_path_var (region_id rid) const
5562 {
5563 region *reg = get_region (rid);
5564 region *parent_region = get_region (reg->get_parent ());
5565 region_id stack_rid = get_stack_region_id ();
5566 if (!stack_rid.null_p ())
5567 if (parent_region->get_parent () == stack_rid)
5568 {
5569 frame_region *parent_frame = (frame_region *)parent_region;
5570 tree t = parent_frame->get_tree_for_child_region (rid);
5571 return path_var (t, parent_frame->get_depth ());
5572 }
5573 if (reg->get_parent () == get_globals_region_id ())
5574 {
5575 map_region *globals = get_root_region ()->get_globals_region (this);
5576 if (globals)
5577 return path_var (globals->get_tree_for_child_region (rid), -1);
5578 }
5579
5580 /* Handle e.g. fields of a local by recursing. */
5581 region_id parent_rid = reg->get_parent ();
5582 region *parent_reg = get_region (parent_rid);
5583 if (parent_reg)
5584 {
5585 if (parent_reg->get_kind () == RK_STRUCT)
5586 {
5587 map_region *parent_map_region = (map_region *)parent_reg;
5588 /* This can fail if we have a view, rather than a field. */
5589 if (tree child_key
5590 = parent_map_region->get_tree_for_child_region (rid))
5591 {
5592 path_var parent_pv = get_representative_path_var (parent_rid);
5593 if (parent_pv.m_tree && TREE_CODE (child_key) == FIELD_DECL)
5594 return path_var (build3 (COMPONENT_REF,
5595 TREE_TYPE (child_key),
5596 parent_pv.m_tree, child_key,
5597 NULL_TREE),
5598 parent_pv.m_stack_depth);
5599 }
5600 }
5601 }
5602
5603 return path_var (NULL_TREE, 0);
5604 }
5605
5606 /* Locate all regions that directly have value SID and append representative
5607 path_var instances for them into *OUT. */
5608
5609 void
5610 region_model::get_path_vars_for_svalue (svalue_id sid, vec<path_var> *out) const
5611 {
5612 unsigned i;
5613 region *region;
5614 FOR_EACH_VEC_ELT (m_regions, i, region)
5615 if (sid == region->get_value_direct ())
5616 {
5617 path_var pv = get_representative_path_var (region_id::from_int (i));
5618 if (pv.m_tree)
5619 out->safe_push (pv);
5620 }
5621 }
5622
5623 /* Set DST_RID value to be a new unknown value of type TYPE. */
5624
5625 svalue_id
5626 region_model::set_to_new_unknown_value (region_id dst_rid, tree type,
5627 region_model_context *ctxt)
5628 {
5629 gcc_assert (!dst_rid.null_p ());
5630 svalue_id new_sid = add_svalue (new unknown_svalue (type));
5631 set_value (dst_rid, new_sid, ctxt);
5632
5633 // TODO: presumably purge all child regions too (but do this in set_value?)
5634
5635 return new_sid;
5636 }
5637
5638 /* Update this model for any phis in SNODE, assuming we came from
5639 LAST_CFG_SUPEREDGE. */
5640
5641 void
5642 region_model::update_for_phis (const supernode *snode,
5643 const cfg_superedge *last_cfg_superedge,
5644 region_model_context *ctxt)
5645 {
5646 gcc_assert (last_cfg_superedge);
5647
5648 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
5649 !gsi_end_p (gpi); gsi_next (&gpi))
5650 {
5651 gphi *phi = gpi.phi ();
5652
5653 tree src = last_cfg_superedge->get_phi_arg (phi);
5654 tree lhs = gimple_phi_result (phi);
5655
5656 /* Update next_state based on phi. */
5657 bool is_back_edge = last_cfg_superedge->back_edge_p ();
5658 handle_phi (phi, lhs, src, is_back_edge, ctxt);
5659 }
5660 }
5661
5662 /* Attempt to update this model for taking EDGE (where the last statement
5663 was LAST_STMT), returning true if the edge can be taken, false
5664 otherwise.
5665
5666 For CFG superedges where LAST_STMT is a conditional or a switch
5667 statement, attempt to add the relevant conditions for EDGE to this
5668 model, returning true if they are feasible, or false if they are
5669 impossible.
5670
5671 For call superedges, push frame information and store arguments
5672 into parameters.
5673
5674 For return superedges, pop frame information and store return
5675 values into any lhs.
5676
5677 Rejection of call/return superedges happens elsewhere, in
5678 program_point::on_edge (i.e. based on program point, rather
5679 than program state). */
5680
5681 bool
5682 region_model::maybe_update_for_edge (const superedge &edge,
5683 const gimple *last_stmt,
5684 region_model_context *ctxt)
5685 {
5686 /* Handle frame updates for interprocedural edges. */
5687 switch (edge.m_kind)
5688 {
5689 default:
5690 break;
5691
5692 case SUPEREDGE_CALL:
5693 {
5694 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
5695 update_for_call_superedge (*call_edge, ctxt);
5696 }
5697 break;
5698
5699 case SUPEREDGE_RETURN:
5700 {
5701 const return_superedge *return_edge
5702 = as_a <const return_superedge *> (&edge);
5703 update_for_return_superedge (*return_edge, ctxt);
5704 }
5705 break;
5706
5707 case SUPEREDGE_INTRAPROCEDURAL_CALL:
5708 {
5709 const callgraph_superedge *cg_sedge
5710 = as_a <const callgraph_superedge *> (&edge);
5711 update_for_call_summary (*cg_sedge, ctxt);
5712 }
5713 break;
5714 }
5715
5716 if (last_stmt == NULL)
5717 return true;
5718
5719 /* Apply any constraints for conditionals/switch statements. */
5720
5721 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
5722 {
5723 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
5724 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt);
5725 }
5726
5727 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
5728 {
5729 const switch_cfg_superedge *switch_sedge
5730 = as_a <const switch_cfg_superedge *> (&edge);
5731 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt, ctxt);
5732 }
5733
5734 return true;
5735 }
5736
5737 /* Push a new frame_region on to the stack region.
5738 Populate the frame_region with child regions for the function call's
5739 parameters, using values from the arguments at the callsite in the
5740 caller's frame. */
5741
5742 void
5743 region_model::update_for_call_superedge (const call_superedge &call_edge,
5744 region_model_context *ctxt)
5745 {
5746 /* Build a vec of argument svalue_id, using the current top
5747 frame for resolving tree expressions. */
5748 const gcall *call_stmt = call_edge.get_call_stmt ();
5749 auto_vec<svalue_id> arg_sids (gimple_call_num_args (call_stmt));
5750
5751 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
5752 {
5753 tree arg = gimple_call_arg (call_stmt, i);
5754 arg_sids.quick_push (get_rvalue (arg, ctxt));
5755 }
5756
5757 push_frame (call_edge.get_callee_function (), &arg_sids, ctxt);
5758 }
5759
5760 /* Pop the top-most frame_region from the stack, and store the svalue
5761 for any returned value into the region for the lvalue of the LHS of
5762 the call (if any). */
5763
5764 void
5765 region_model::update_for_return_superedge (const return_superedge &return_edge,
5766 region_model_context *ctxt)
5767 {
5768 purge_stats stats;
5769 svalue_id result_sid = pop_frame (true, &stats, ctxt);
5770 // TODO: do something with the stats?
5771
5772 if (result_sid.null_p ())
5773 return;
5774
5775 /* Set the result of the call, within the caller frame. */
5776 const gcall *call_stmt = return_edge.get_call_stmt ();
5777 tree lhs = gimple_call_lhs (call_stmt);
5778 if (lhs)
5779 set_value (get_lvalue (lhs, ctxt), result_sid, ctxt);
5780 else
5781 {
5782 /* This could be a leak; try purging again, but this time,
5783 don't special-case the result_sid. */
5784 purge_stats stats;
5785 purge_unused_svalues (&stats, ctxt);
5786 }
5787 }
5788
5789 /* Update this region_model with a summary of the effect of calling
5790 and returning from CG_SEDGE.
5791
5792 TODO: Currently this is extremely simplistic: we merely set the
5793 return value to "unknown". A proper implementation would e.g. update
5794 sm-state, and presumably be reworked to support multiple outcomes. */
5795
5796 void
5797 region_model::update_for_call_summary (const callgraph_superedge &cg_sedge,
5798 region_model_context *ctxt)
5799 {
5800 /* For now, set any return value to "unknown". */
5801 const gcall *call_stmt = cg_sedge.get_call_stmt ();
5802 tree lhs = gimple_call_lhs (call_stmt);
5803 if (lhs)
5804 set_to_new_unknown_value (get_lvalue (lhs, ctxt), TREE_TYPE (lhs), ctxt);
5805
5806 // TODO: actually implement some kind of summary here
5807 }
5808
5809 /* Given a true or false edge guarded by conditional statement COND_STMT,
5810 determine appropriate constraints for the edge to be taken.
5811
5812 If they are feasible, add the constraints and return true.
5813
5814 Return false if the constraints contradict existing knowledge
5815 (and so the edge should not be taken). */
5816
5817 bool
5818 region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
5819 const gcond *cond_stmt,
5820 region_model_context *ctxt)
5821 {
5822 ::edge cfg_edge = sedge.get_cfg_edge ();
5823 gcc_assert (cfg_edge != NULL);
5824 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
5825
5826 enum tree_code op = gimple_cond_code (cond_stmt);
5827 tree lhs = gimple_cond_lhs (cond_stmt);
5828 tree rhs = gimple_cond_rhs (cond_stmt);
5829 if (cfg_edge->flags & EDGE_FALSE_VALUE)
5830 op = invert_tree_comparison (op, false /* honor_nans */);
5831 return add_constraint (lhs, op, rhs, ctxt);
5832 }
5833
5834 /* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
5835 for the edge to be taken.
5836
5837 If they are feasible, add the constraints and return true.
5838
5839 Return false if the constraints contradict existing knowledge
5840 (and so the edge should not be taken). */
5841
5842 bool
5843 region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
5844 const gswitch *switch_stmt,
5845 region_model_context *ctxt)
5846 {
5847 tree index = gimple_switch_index (switch_stmt);
5848 tree case_label = edge.get_case_label ();
5849 gcc_assert (TREE_CODE (case_label) == CASE_LABEL_EXPR);
5850 tree lower_bound = CASE_LOW (case_label);
5851 tree upper_bound = CASE_HIGH (case_label);
5852 if (lower_bound)
5853 {
5854 if (upper_bound)
5855 {
5856 /* Range. */
5857 if (!add_constraint (index, GE_EXPR, lower_bound, ctxt))
5858 return false;
5859 return add_constraint (index, LE_EXPR, upper_bound, ctxt);
5860 }
5861 else
5862 /* Single-value. */
5863 return add_constraint (index, EQ_EXPR, lower_bound, ctxt);
5864 }
5865 else
5866 {
5867 /* The default case.
5868 Add exclusions based on the other cases. */
5869 for (unsigned other_idx = 1;
5870 other_idx < gimple_switch_num_labels (switch_stmt);
5871 other_idx++)
5872 {
5873 tree other_label = gimple_switch_label (switch_stmt,
5874 other_idx);
5875 tree other_lower_bound = CASE_LOW (other_label);
5876 tree other_upper_bound = CASE_HIGH (other_label);
5877 gcc_assert (other_lower_bound);
5878 if (other_upper_bound)
5879 {
5880 /* Exclude this range-valued case.
5881 For now, we just exclude the boundary values.
5882 TODO: exclude the values within the region. */
5883 if (!add_constraint (index, NE_EXPR, other_lower_bound, ctxt))
5884 return false;
5885 if (!add_constraint (index, NE_EXPR, other_upper_bound, ctxt))
5886 return false;
5887 }
5888 else
5889 /* Exclude this single-valued case. */
5890 if (!add_constraint (index, NE_EXPR, other_lower_bound, ctxt))
5891 return false;
5892 }
5893 return true;
5894 }
5895 }
5896
5897 /* Get the root_region within this model (guaranteed to be non-null). */
5898
5899 root_region *
5900 region_model::get_root_region () const
5901 {
5902 return get_region<root_region> (m_root_rid);
5903 }
5904
5905 /* Get the region_id of this model's stack region (if any). */
5906
5907 region_id
5908 region_model::get_stack_region_id () const
5909 {
5910 return get_root_region ()->get_stack_region_id ();
5911 }
5912
5913 /* Create a new frame_region for a call to FUN and push it onto
5914 the stack.
5915
5916 If ARG_SIDS is non-NULL, use it to populate the parameters
5917 in the new frame.
5918 Otherwise, populate them with unknown values.
5919
5920 Return the region_id of the new frame_region. */
5921
5922 region_id
5923 region_model::push_frame (function *fun, vec<svalue_id> *arg_sids,
5924 region_model_context *ctxt)
5925 {
5926 return get_root_region ()->push_frame (this, fun, arg_sids, ctxt);
5927 }
5928
5929 /* Get the region_id of the top-most frame in this region_model's stack,
5930 if any. */
5931
5932 region_id
5933 region_model::get_current_frame_id () const
5934 {
5935 return get_root_region ()->get_current_frame_id (*this);
5936 }
5937
5938 /* Get the function of the top-most frame in this region_model's stack.
5939 There must be such a frame. */
5940
5941 function *
5942 region_model::get_current_function () const
5943 {
5944 region_id frame_id = get_current_frame_id ();
5945 frame_region *frame = get_region<frame_region> (frame_id);
5946 return frame->get_function ();
5947 }
5948
5949 /* Pop the topmost frame_region from this region_model's stack;
5950 see the comment for stack_region::pop_frame. */
5951
5952 svalue_id
5953 region_model::pop_frame (bool purge, purge_stats *out,
5954 region_model_context *ctxt)
5955 {
5956 return get_root_region ()->pop_frame (this, purge, out, ctxt);
5957 }
5958
5959 /* Get the number of frames in this region_model's stack. */
5960
5961 int
5962 region_model::get_stack_depth () const
5963 {
5964 stack_region *stack = get_root_region ()->get_stack_region (this);
5965 if (stack)
5966 return stack->get_num_frames ();
5967 else
5968 return 0;
5969 }
5970
5971 /* Get the function * at DEPTH within the call stack. */
5972
5973 function *
5974 region_model::get_function_at_depth (unsigned depth) const
5975 {
5976 stack_region *stack = get_root_region ()->get_stack_region (this);
5977 gcc_assert (stack);
5978 region_id frame_rid = stack->get_frame_rid (depth);
5979 frame_region *frame = get_region <frame_region> (frame_rid);
5980 return frame->get_function ();
5981 }
5982
5983 /* Get the region_id of this model's globals region (if any). */
5984
5985 region_id
5986 region_model::get_globals_region_id () const
5987 {
5988 return get_root_region ()->get_globals_region_id ();
5989 }
5990
5991 /* Add SVAL to this model, taking ownership, and returning its new
5992 svalue_id. */
5993
5994 svalue_id
5995 region_model::add_svalue (svalue *sval)
5996 {
5997 gcc_assert (sval);
5998 m_svalues.safe_push (sval);
5999 return svalue_id::from_int (m_svalues.length () - 1);
6000 }
6001
6002 /* Change the meaning of SID to be NEW_SVAL
6003 (e.g. when deferencing an unknown pointer, the pointer
6004 becomes a pointer to a symbolic region, so that all users
6005 of the former unknown pointer are now effectively pointing
6006 at the same region). */
6007
6008 void
6009 region_model::replace_svalue (svalue_id sid, svalue *new_sval)
6010 {
6011 gcc_assert (!sid.null_p ());
6012 int idx = sid.as_int ();
6013
6014 gcc_assert (m_svalues[idx]);
6015 gcc_assert (m_svalues[idx]->get_type () == new_sval->get_type ());
6016 delete m_svalues[idx];
6017
6018 m_svalues[idx] = new_sval;
6019 }
6020
6021 /* Add region R to this model, taking ownership, and returning its new
6022 region_id. */
6023
6024 region_id
6025 region_model::add_region (region *r)
6026 {
6027 gcc_assert (r);
6028 m_regions.safe_push (r);
6029 return region_id::from_int (m_regions.length () - 1);
6030 }
6031
6032 /* Return the svalue with id SVAL_ID, or NULL for a null id. */
6033
6034 svalue *
6035 region_model::get_svalue (svalue_id sval_id) const
6036 {
6037 if (sval_id.null_p ())
6038 return NULL;
6039 return m_svalues[sval_id.as_int ()];
6040 }
6041
6042 /* Return the region with id RID, or NULL for a null id. */
6043
6044 region *
6045 region_model::get_region (region_id rid) const
6046 {
6047 if (rid.null_p ())
6048 return NULL;
6049 return m_regions[rid.as_int ()];
6050 }
6051
6052 /* Make a region of an appropriate subclass for TYPE,
6053 with parent PARENT_RID. */
6054
6055 static region *
6056 make_region_for_type (region_id parent_rid, tree type)
6057 {
6058 gcc_assert (TYPE_P (type));
6059
6060 if (INTEGRAL_TYPE_P (type)
6061 || SCALAR_FLOAT_TYPE_P (type)
6062 || POINTER_TYPE_P (type)
6063 || TREE_CODE (type) == COMPLEX_TYPE
6064 || TREE_CODE (type) == VECTOR_TYPE)
6065 return new primitive_region (parent_rid, type);
6066
6067 if (TREE_CODE (type) == RECORD_TYPE)
6068 return new struct_region (parent_rid, type);
6069
6070 if (TREE_CODE (type) == ARRAY_TYPE)
6071 return new array_region (parent_rid, type);
6072
6073 if (TREE_CODE (type) == UNION_TYPE)
6074 return new union_region (parent_rid, type);
6075
6076 if (FUNC_OR_METHOD_TYPE_P (type))
6077 return new function_region (parent_rid, type);
6078
6079 /* If we have a void *, make a new symbolic region. */
6080 if (VOID_TYPE_P (type))
6081 return new symbolic_region (parent_rid, type, false);
6082
6083 gcc_unreachable ();
6084 }
6085
6086 /* Add a region with type TYPE and parent PARENT_RID. */
6087
6088 region_id
6089 region_model::add_region_for_type (region_id parent_rid, tree type)
6090 {
6091 gcc_assert (TYPE_P (type));
6092
6093 region *new_region = make_region_for_type (parent_rid, type);
6094 return add_region (new_region);
6095 }
6096
6097 /* Helper class for region_model::purge_unused_svalues. */
6098
6099 class restrict_to_used_svalues : public purge_criteria
6100 {
6101 public:
6102 restrict_to_used_svalues (const auto_sbitmap &used) : m_used (used) {}
6103
6104 bool should_purge_p (svalue_id sid) const FINAL OVERRIDE
6105 {
6106 gcc_assert (!sid.null_p ());
6107 return !bitmap_bit_p (m_used, sid.as_int ());
6108 }
6109
6110 private:
6111 const auto_sbitmap &m_used;
6112 };
6113
6114 /* Remove unused svalues from this model, accumulating stats into STATS.
6115 Unused svalues are deleted. Doing so could reorder the svalues, and
6116 thus change the meaning of svalue_ids.
6117
6118 If CTXT is non-NULL, then it is notified about svalue_id remappings,
6119 and about svalue_ids that are about to be deleted. This allows e.g.
6120 for warning about resource leaks, for the case where the svalue
6121 represents a resource handle in the user code (e.g. a FILE * or a malloc
6122 buffer).
6123
6124 Amongst other things, removing unused svalues is important for ensuring
6125 that the analysis of loops terminates. Otherwise, we could generate a
6126 succession of models with unreferenced "unknown" values, where the
6127 number of redundant unknown values could grow without bounds, and each
6128 such model would be treated as distinct.
6129
6130 If KNOWN_USED is non-NULL, treat *KNOWN_USED as used (this is for
6131 handling values being returned from functions as their frame is popped,
6132 since otherwise we'd have to simultaneously determine both the rvalue
6133 of the return expr in the callee frame and the lvalue for the gcall's
6134 assignment in the caller frame, and it seems cleaner to express all
6135 lvalue and rvalue lookups implicitly relative to a "current" frame). */
6136
6137 void
6138 region_model::purge_unused_svalues (purge_stats *stats,
6139 region_model_context *ctxt,
6140 svalue_id *known_used_sid)
6141 {
6142 // TODO: might want to avoid a vfunc call just to do logging here:
6143 logger *logger = ctxt ? ctxt->get_logger () : NULL;
6144
6145 LOG_SCOPE (logger);
6146
6147 auto_sbitmap used (m_svalues.length ());
6148 bitmap_clear (used);
6149
6150 if (known_used_sid)
6151 if (!known_used_sid->null_p ())
6152 bitmap_set_bit (used, known_used_sid->as_int ());
6153
6154 /* Walk the regions, marking sids that are used. */
6155 unsigned i;
6156 region *r;
6157 FOR_EACH_VEC_ELT (m_regions, i, r)
6158 {
6159 svalue_id sid = r->get_value_direct ();
6160 if (!sid.null_p ())
6161 bitmap_set_bit (used, sid.as_int ());
6162 }
6163
6164 /* Now purge any constraints involving svalues we don't care about. */
6165 restrict_to_used_svalues criterion (used);
6166 m_constraints->purge (criterion, stats);
6167
6168 /* Mark any sids that are in constraints that survived. */
6169 {
6170 equiv_class *ec;
6171 FOR_EACH_VEC_ELT (m_constraints->m_equiv_classes, i, ec)
6172 {
6173 int j;
6174 svalue_id *sid;
6175 FOR_EACH_VEC_ELT (ec->m_vars, j, sid)
6176 {
6177 gcc_assert (!sid->null_p ());
6178 bitmap_set_bit (used, sid->as_int ());
6179 }
6180 }
6181 }
6182
6183 /* Build a mapping from old-sid to new-sid so that we can preserve
6184 order of the used IDs and move all redundant ones to the end.
6185 Iterate though svalue IDs, adding used ones to the front of
6186 the new list, and unused ones to the back. */
6187 svalue_id_map map (m_svalues.length ());
6188 int next_used_new_sid = 0;
6189 int after_next_unused_new_sid = m_svalues.length ();
6190 for (unsigned i = 0; i < m_svalues.length (); i++)
6191 {
6192 svalue_id src (svalue_id::from_int (i));
6193 if (bitmap_bit_p (used, i))
6194 {
6195 if (logger)
6196 logger->log ("sv%i is used", i);
6197 map.put (src, svalue_id::from_int (next_used_new_sid++));
6198 }
6199 else
6200 {
6201 if (logger)
6202 logger->log ("sv%i is unused", i);
6203 map.put (src, svalue_id::from_int (--after_next_unused_new_sid));
6204 }
6205 }
6206 /* The two insertion points should have met. */
6207 gcc_assert (next_used_new_sid == after_next_unused_new_sid);
6208
6209 /* Now walk the regions and the constraints, remapping sids,
6210 so that all the redundant svalues are at the end. */
6211 remap_svalue_ids (map);
6212
6213 if (logger)
6214 {
6215 logger->start_log_line ();
6216 logger->log_partial ("map: ");
6217 map.dump_to_pp (logger->get_printer ());
6218 logger->end_log_line ();
6219 }
6220
6221 /* Notify any client about the remapping and pending deletion.
6222 Potentially this could trigger leak warnings. */
6223 if (ctxt)
6224 {
6225 ctxt->remap_svalue_ids (map);
6226 int num_client_items_purged
6227 = ctxt->on_svalue_purge (svalue_id::from_int (next_used_new_sid), map);
6228 if (stats)
6229 stats->m_num_client_items += num_client_items_purged;
6230 }
6231
6232 /* Drop the redundant svalues from the end of the vector. */
6233 while ((signed)m_svalues.length () > next_used_new_sid)
6234 {
6235 if (logger)
6236 {
6237 svalue_id victim = svalue_id::from_int (m_svalues.length () - 1);
6238 logger->log ("deleting sv%i (was sv%i)",
6239 victim.as_int (),
6240 map.get_src_for_dst (victim).as_int ());
6241 }
6242 delete m_svalues.pop ();
6243 if (stats)
6244 stats->m_num_svalues++;
6245 }
6246
6247 if (known_used_sid)
6248 map.update (known_used_sid);
6249
6250 validate ();
6251 }
6252
6253 /* Renumber the svalues within this model according to MAP. */
6254
6255 void
6256 region_model::remap_svalue_ids (const svalue_id_map &map)
6257 {
6258 /* Update IDs within regions. */
6259 unsigned i;
6260 region *r;
6261 FOR_EACH_VEC_ELT (m_regions, i, r)
6262 r->remap_svalue_ids (map);
6263
6264 /* Update IDs within ECs within constraints. */
6265 m_constraints->remap_svalue_ids (map);
6266
6267 /* Build a reordered svalues vector. */
6268 auto_vec<svalue *> new_svalues (m_svalues.length ());
6269 for (unsigned i = 0; i < m_svalues.length (); i++)
6270 {
6271 svalue_id dst (svalue_id::from_int (i));
6272 svalue_id src = map.get_src_for_dst (dst);
6273 new_svalues.quick_push (get_svalue (src));
6274 }
6275
6276 /* Copy over the reordered vec to m_svalues. */
6277 m_svalues.truncate (0);
6278 gcc_assert (m_svalues.space (new_svalues.length ()));
6279 svalue *sval;
6280 FOR_EACH_VEC_ELT (new_svalues, i, sval)
6281 m_svalues.quick_push (sval);
6282 }
6283
6284 /* Renumber the regions within this model according to MAP. */
6285
6286 void
6287 region_model::remap_region_ids (const region_id_map &map)
6288 {
6289 /* Update IDs within regions. */
6290 unsigned i;
6291 region *r;
6292 FOR_EACH_VEC_ELT (m_regions, i, r)
6293 r->remap_region_ids (map);
6294
6295 /* Update IDs within svalues. */
6296 svalue *sval;
6297 FOR_EACH_VEC_ELT (m_svalues, i, sval)
6298 sval->remap_region_ids (map);
6299
6300 /* Build a reordered regions vector. */
6301 auto_vec<region *> new_regions (m_regions.length ());
6302 for (unsigned i = 0; i < m_regions.length (); i++)
6303 {
6304 region_id dst (region_id::from_int (i));
6305 region_id src = map.get_src_for_dst (dst);
6306 new_regions.quick_push (get_region (src));
6307 }
6308
6309 /* Copy over the reordered vec to m_regions. */
6310 m_regions.truncate (0);
6311 gcc_assert (m_regions.space (new_regions.length ()));
6312 FOR_EACH_VEC_ELT (new_regions, i, r)
6313 m_regions.quick_push (r);
6314 }
6315
6316 /* Delete all regions within SET_TO_PURGE, remapping region IDs for
6317 other regions. It's required that there are no uses of the
6318 regions within the set (or the region IDs will become invalid).
6319
6320 Accumulate stats to STATS. */
6321
6322 void
6323 region_model::purge_regions (const region_id_set &set_to_purge,
6324 purge_stats *stats,
6325 logger *)
6326 {
6327 /* Build a mapping from old-rid to new-rid so that we can preserve
6328 order of the used IDs and move all redundant ones to the end.
6329 Iterate though region IDs, adding used ones to the front of
6330 the new list, and unused ones to the back. */
6331 region_id_map map (m_regions.length ());
6332 int next_used_new_rid = 0;
6333 int after_next_unused_new_rid = m_regions.length ();
6334 for (unsigned i = 0; i < m_regions.length (); i++)
6335 {
6336 region_id src (region_id::from_int (i));
6337 if (set_to_purge.region_p (src))
6338 map.put (src, region_id::from_int (--after_next_unused_new_rid));
6339 else
6340 map.put (src, region_id::from_int (next_used_new_rid++));
6341 }
6342 /* The two insertion points should have met. */
6343 gcc_assert (next_used_new_rid == after_next_unused_new_rid);
6344
6345 /* Now walk the regions and svalues, remapping rids,
6346 so that all the redundant regions are at the end. */
6347 remap_region_ids (map);
6348
6349 /* Drop the redundant regions from the end of the vector. */
6350 while ((signed)m_regions.length () > next_used_new_rid)
6351 {
6352 delete m_regions.pop ();
6353 if (stats)
6354 stats->m_num_regions++;
6355 }
6356 }
6357
6358 /* Populate *OUT with RID and all of its descendents.
6359 If EXCLUDE_RID is non-null, then don't add it or its descendents. */
6360
6361 void
6362 region_model::get_descendents (region_id rid, region_id_set *out,
6363 region_id exclude_rid) const
6364 {
6365 out->add_region (rid);
6366
6367 bool changed = true;
6368 while (changed)
6369 {
6370 changed = false;
6371 unsigned i;
6372 region *r;
6373 FOR_EACH_VEC_ELT (m_regions, i, r)
6374 {
6375 region_id iter_rid = region_id::from_int (i);
6376 if (iter_rid == exclude_rid)
6377 continue;
6378 if (!out->region_p (iter_rid))
6379 {
6380 region_id parent_rid = r->get_parent ();
6381 if (!parent_rid.null_p ())
6382 if (out->region_p (parent_rid))
6383 {
6384 out->add_region (iter_rid);
6385 changed = true;
6386 }
6387 }
6388 }
6389 }
6390 }
6391
6392 /* Delete RID and all descendent regions.
6393 Find any pointers to such regions; convert convert them to
6394 poisoned values of kind PKIND.
6395 Accumulate stats on purged entities into STATS. */
6396
6397 void
6398 region_model::delete_region_and_descendents (region_id rid,
6399 enum poison_kind pkind,
6400 purge_stats *stats,
6401 logger *logger)
6402 {
6403 /* Find all child and descendent regions. */
6404 region_id_set descendents (this);
6405 get_descendents (rid, &descendents, region_id::null ());
6406
6407 /* Find any pointers to such regions; convert to poisoned. */
6408 poison_any_pointers_to_bad_regions (descendents, pkind);
6409
6410 /* Delete all such regions. */
6411 purge_regions (descendents, stats, logger);
6412 }
6413
6414 /* Find any pointers to regions within BAD_REGIONS; convert them to
6415 poisoned values of kind PKIND. */
6416
6417 void
6418 region_model::poison_any_pointers_to_bad_regions (const region_id_set &
6419 bad_regions,
6420 enum poison_kind pkind)
6421 {
6422 int i;
6423 svalue *sval;
6424 FOR_EACH_VEC_ELT (m_svalues, i, sval)
6425 if (region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
6426 {
6427 region_id ptr_dst = ptr_sval->get_pointee ();
6428 if (!ptr_dst.null_p ())
6429 if (bad_regions.region_p (ptr_dst))
6430 replace_svalue
6431 (svalue_id::from_int (i),
6432 new poisoned_svalue (pkind, sval->get_type ()));
6433 }
6434 }
6435
6436 /* Attempt to merge THIS with OTHER_MODEL, writing the result
6437 to OUT_MODEL, and populating SID_MAPPING. */
6438
6439 bool
6440 region_model::can_merge_with_p (const region_model &other_model,
6441 region_model *out_model,
6442 svalue_id_merger_mapping *sid_mapping) const
6443 {
6444 gcc_assert (m_root_rid == other_model.m_root_rid);
6445 gcc_assert (m_root_rid.as_int () == 0);
6446 gcc_assert (sid_mapping);
6447 gcc_assert (out_model);
6448
6449 model_merger merger (this, &other_model, out_model, sid_mapping);
6450
6451 if (!root_region::can_merge_p (get_root_region (),
6452 other_model.get_root_region (),
6453 out_model->get_root_region (),
6454 &merger))
6455 return false;
6456
6457 /* Merge constraints. */
6458 constraint_manager::merge (*m_constraints,
6459 *other_model.m_constraints,
6460 out_model->m_constraints,
6461 merger);
6462
6463 out_model->validate ();
6464
6465 /* The merged model should be simpler (or as simple) as the inputs. */
6466 #if 0
6467 gcc_assert (out_model->m_svalues.length () <= m_svalues.length ());
6468 gcc_assert (out_model->m_svalues.length ()
6469 <= other_model.m_svalues.length ());
6470 #endif
6471 gcc_assert (out_model->m_regions.length () <= m_regions.length ());
6472 gcc_assert (out_model->m_regions.length ()
6473 <= other_model.m_regions.length ());
6474 // TODO: same, for constraints
6475
6476 return true;
6477 }
6478
6479 /* As above, but supply a placeholder svalue_id_merger_mapping
6480 instance to be used and receive output. For use in selftests. */
6481
6482 bool
6483 region_model::can_merge_with_p (const region_model &other_model,
6484 region_model *out_model) const
6485 {
6486 svalue_id_merger_mapping sid_mapping (*this, other_model);
6487 return can_merge_with_p (other_model, out_model, &sid_mapping);
6488 }
6489
6490 /* For debugging purposes: look for a region within this region_model
6491 for a decl named NAME (or an SSA_NAME for such a decl),
6492 returning its value, or svalue_id::null if none are found. */
6493
6494 svalue_id
6495 region_model::get_value_by_name (const char *name) const
6496 {
6497 gcc_assert (name);
6498 tree identifier = get_identifier (name);
6499 return get_root_region ()->get_value_by_name (identifier, *this);
6500 }
6501
6502 /* Generate or reuse an svalue_id within this model for an index
6503 into an array of type PTR_TYPE, based on OFFSET_SID. */
6504
6505 svalue_id
6506 region_model::convert_byte_offset_to_array_index (tree ptr_type,
6507 svalue_id offset_sid)
6508 {
6509 gcc_assert (POINTER_TYPE_P (ptr_type));
6510
6511 if (tree offset_cst = maybe_get_constant (offset_sid))
6512 {
6513 tree elem_type = TREE_TYPE (ptr_type);
6514
6515 /* Arithmetic on void-pointers is a GNU C extension, treating the size
6516 of a void as 1.
6517 https://gcc.gnu.org/onlinedocs/gcc/Pointer-Arith.html
6518
6519 Returning early for this case avoids a diagnostic from within the
6520 call to size_in_bytes. */
6521 if (TREE_CODE (elem_type) == VOID_TYPE)
6522 return offset_sid;
6523
6524 /* This might not be a constant. */
6525 tree byte_size = size_in_bytes (elem_type);
6526
6527 /* Try to get a constant by dividing, ensuring that we're in a
6528 signed representation first. */
6529 tree index
6530 = fold_binary (TRUNC_DIV_EXPR, ssizetype,
6531 fold_convert (ssizetype, offset_cst),
6532 fold_convert (ssizetype, byte_size));
6533 if (index && TREE_CODE (index) == INTEGER_CST)
6534 return get_or_create_constant_svalue (index);
6535 }
6536
6537 /* Otherwise, we don't know the array index; generate a new unknown value.
6538 TODO: do we need to capture the relationship between two unknown
6539 values (the offset and the index)? */
6540 return add_svalue (new unknown_svalue (integer_type_node));
6541 }
6542
6543 /* Get a region of type TYPE for PTR_SID[OFFSET_SID/sizeof (*PTR_SID)].
6544
6545 If OFFSET_SID is known to be zero, then dereference PTR_SID.
6546 Otherwise, impose a view of "typeof(*PTR_SID)[]" on *PTR_SID,
6547 and then get a view of type TYPE on the relevant array element. */
6548
6549 region_id
6550 region_model::get_or_create_mem_ref (tree type,
6551 svalue_id ptr_sid,
6552 svalue_id offset_sid,
6553 region_model_context *ctxt)
6554 {
6555 svalue *ptr_sval = get_svalue (ptr_sid);
6556 tree ptr_type = ptr_sval->get_type ();
6557 gcc_assert (ptr_type);
6558
6559 region_id raw_rid = deref_rvalue (ptr_sid, ctxt);
6560
6561 svalue *offset_sval = get_svalue (offset_sid);
6562 tree offset_type = offset_sval->get_type ();
6563 gcc_assert (offset_type);
6564
6565 if (constant_svalue *cst_sval = offset_sval->dyn_cast_constant_svalue ())
6566 {
6567 if (zerop (cst_sval->get_constant ()))
6568 {
6569 /* Handle the zero offset case. */
6570 return get_or_create_view (raw_rid, type);
6571 }
6572
6573 /* If we're already within an array of the correct type,
6574 then we want to reuse that array, rather than starting
6575 a new view.
6576 If so, figure out our raw_rid's offset from its parent,
6577 if we can, and use that to offset OFFSET_SID, and create
6578 the element within the parent region. */
6579 region *raw_reg = get_region (raw_rid);
6580 region_id parent_rid = raw_reg->get_parent ();
6581 tree parent_type = get_region (parent_rid)->get_type ();
6582 if (parent_type
6583 && TREE_CODE (parent_type) == ARRAY_TYPE)
6584 {
6585 // TODO: check we have the correct parent type
6586 array_region *parent_array = get_region <array_region> (parent_rid);
6587 array_region::key_t key_for_raw_rid;
6588 if (parent_array->get_key_for_child_region (raw_rid,
6589 &key_for_raw_rid))
6590 {
6591 /* Convert from offset to index. */
6592 svalue_id index_sid
6593 = convert_byte_offset_to_array_index (ptr_type, offset_sid);
6594 if (tree index_cst
6595 = get_svalue (index_sid)->maybe_get_constant ())
6596 {
6597 array_region::key_t index_offset
6598 = array_region::key_from_constant (index_cst);
6599 array_region::key_t index_rel_to_parent
6600 = key_for_raw_rid + index_offset;
6601 tree index_rel_to_parent_cst
6602 = wide_int_to_tree (integer_type_node,
6603 index_rel_to_parent);
6604 svalue_id index_sid
6605 = get_or_create_constant_svalue (index_rel_to_parent_cst);
6606
6607 /* Carry on, using the parent region and adjusted index. */
6608 region_id element_rid
6609 = parent_array->get_element (this, raw_rid, index_sid,
6610 ctxt);
6611 return get_or_create_view (element_rid, type);
6612 }
6613 }
6614 }
6615 }
6616
6617 tree array_type = build_array_type (TREE_TYPE (ptr_type),
6618 integer_type_node);
6619 region_id array_view_rid = get_or_create_view (raw_rid, array_type);
6620 array_region *array_reg = get_region <array_region> (array_view_rid);
6621
6622 svalue_id index_sid
6623 = convert_byte_offset_to_array_index (ptr_type, offset_sid);
6624
6625 region_id element_rid
6626 = array_reg->get_element (this, array_view_rid, index_sid, ctxt);
6627
6628 return get_or_create_view (element_rid, type);
6629 }
6630
6631 /* Get a region of type TYPE for PTR_SID + OFFSET_SID.
6632
6633 If OFFSET_SID is known to be zero, then dereference PTR_SID.
6634 Otherwise, impose a view of "typeof(*PTR_SID)[]" on *PTR_SID,
6635 and then get a view of type TYPE on the relevant array element. */
6636
6637 region_id
6638 region_model::get_or_create_pointer_plus_expr (tree type,
6639 svalue_id ptr_sid,
6640 svalue_id offset_in_bytes_sid,
6641 region_model_context *ctxt)
6642 {
6643 return get_or_create_mem_ref (type,
6644 ptr_sid,
6645 offset_in_bytes_sid,
6646 ctxt);
6647 }
6648
6649 /* Get or create a view of type TYPE of the region with id RAW_ID.
6650 Return the id of the view (or RAW_ID if it of the same type). */
6651
6652 region_id
6653 region_model::get_or_create_view (region_id raw_rid, tree type)
6654 {
6655 region *raw_region = get_region (raw_rid);
6656
6657 gcc_assert (TYPE_P (type));
6658 if (type != raw_region->get_type ())
6659 {
6660 /* If the region already has a view of the requested type,
6661 reuse it. */
6662 region_id existing_view_rid = raw_region->get_view (type, this);
6663 if (!existing_view_rid.null_p ())
6664 return existing_view_rid;
6665
6666 /* Otherwise, make one (adding it to the region_model and
6667 to the viewed region). */
6668 region_id view_rid = add_region_for_type (raw_rid, type);
6669 raw_region->add_view (view_rid, this);
6670 // TODO: something to signify that this is a "view"
6671 return view_rid;
6672 }
6673
6674 return raw_rid;
6675 }
6676
6677 /* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
6678 otherwise. */
6679
6680 tree
6681 region_model::get_fndecl_for_call (const gcall *call,
6682 region_model_context *ctxt)
6683 {
6684 tree fn_ptr = gimple_call_fn (call);
6685 if (fn_ptr == NULL_TREE)
6686 return NULL_TREE;
6687 svalue_id fn_ptr_sid = get_rvalue (fn_ptr, ctxt);
6688 svalue *fn_ptr_sval = get_svalue (fn_ptr_sid);
6689 if (region_svalue *fn_ptr_ptr = fn_ptr_sval->dyn_cast_region_svalue ())
6690 {
6691 region_id fn_rid = fn_ptr_ptr->get_pointee ();
6692 code_region *code = get_root_region ()->get_code_region (this);
6693 if (code)
6694 {
6695 tree fn_decl = code->get_tree_for_child_region (fn_rid);
6696 const cgraph_node *ultimate_node
6697 = cgraph_node::get (fn_decl)->ultimate_alias_target ();
6698 if (ultimate_node)
6699 return ultimate_node->decl;
6700 }
6701 }
6702
6703 return NULL_TREE;
6704 }
6705
6706 /* struct model_merger. */
6707
6708 /* Dump a multiline representation of this merger to PP. */
6709
6710 void
6711 model_merger::dump_to_pp (pretty_printer *pp) const
6712 {
6713 pp_string (pp, "model A:");
6714 pp_newline (pp);
6715 m_model_a->dump_to_pp (pp, false);
6716 pp_newline (pp);
6717
6718 pp_string (pp, "model B:");
6719 pp_newline (pp);
6720 m_model_b->dump_to_pp (pp, false);
6721 pp_newline (pp);
6722
6723 pp_string (pp, "merged model:");
6724 pp_newline (pp);
6725 m_merged_model->dump_to_pp (pp, false);
6726 pp_newline (pp);
6727
6728 pp_string (pp, "region map: model A to merged model:");
6729 pp_newline (pp);
6730 m_map_regions_from_a_to_m.dump_to_pp (pp);
6731 pp_newline (pp);
6732
6733 pp_string (pp, "region map: model B to merged model:");
6734 pp_newline (pp);
6735 m_map_regions_from_b_to_m.dump_to_pp (pp);
6736 pp_newline (pp);
6737
6738 m_sid_mapping->dump_to_pp (pp);
6739 }
6740
6741 /* Dump a multiline representation of this merger to FILE. */
6742
6743 void
6744 model_merger::dump (FILE *fp) const
6745 {
6746 pretty_printer pp;
6747 pp_format_decoder (&pp) = default_tree_printer;
6748 pp_show_color (&pp) = pp_show_color (global_dc->printer);
6749 pp.buffer->stream = fp;
6750 dump_to_pp (&pp);
6751 pp_flush (&pp);
6752 }
6753
6754 /* Dump a multiline representation of this merger to stderr. */
6755
6756 DEBUG_FUNCTION void
6757 model_merger::dump () const
6758 {
6759 dump (stderr);
6760 }
6761
6762 /* Attempt to merge the svalues of SID_A and SID_B (from their
6763 respective models), writing the id of the resulting svalue
6764 into *MERGED_SID.
6765 Return true if the merger is possible, false otherwise. */
6766
6767 bool
6768 model_merger::can_merge_values_p (svalue_id sid_a,
6769 svalue_id sid_b,
6770 svalue_id *merged_sid)
6771 {
6772 gcc_assert (merged_sid);
6773 svalue *sval_a = m_model_a->get_svalue (sid_a);
6774 svalue *sval_b = m_model_b->get_svalue (sid_b);
6775
6776 /* If both are NULL, then the "values" are trivially mergeable. */
6777 if (!sval_a && !sval_b)
6778 return true;
6779
6780 /* If one is NULL and the other non-NULL, then the "values"
6781 are not mergeable. */
6782 if (!(sval_a && sval_b))
6783 return false;
6784
6785 /* Have they both already been mapped to the same new svalue_id?
6786 If so, use it. */
6787 svalue_id sid_a_in_m
6788 = m_sid_mapping->m_map_from_a_to_m.get_dst_for_src (sid_a);
6789 svalue_id sid_b_in_m
6790 = m_sid_mapping->m_map_from_b_to_m.get_dst_for_src (sid_b);
6791 if (!sid_a_in_m.null_p ()
6792 && !sid_b_in_m.null_p ()
6793 && sid_a_in_m == sid_b_in_m)
6794 {
6795 *merged_sid = sid_a_in_m;
6796 return true;
6797 }
6798
6799 tree type = sval_a->get_type ();
6800 if (type == NULL_TREE)
6801 type = sval_b->get_type ();
6802
6803 /* If the values have different kinds, or are both unknown,
6804 then merge as "unknown". */
6805 if (sval_a->get_kind () != sval_b->get_kind ()
6806 || sval_a->get_kind () == SK_UNKNOWN)
6807 {
6808 svalue *merged_sval = new unknown_svalue (type);
6809 *merged_sid = m_merged_model->add_svalue (merged_sval);
6810 record_svalues (sid_a, sid_b, *merged_sid);
6811 return true;
6812 }
6813
6814 gcc_assert (sval_a->get_kind () == sval_b->get_kind ());
6815
6816 switch (sval_a->get_kind ())
6817 {
6818 default:
6819 case SK_UNKNOWN: /* SK_UNKNOWN handled above. */
6820 gcc_unreachable ();
6821
6822 case SK_REGION:
6823 {
6824 /* If we have two region pointers, then we can merge (possibly to
6825 "unknown"). */
6826 const region_svalue &region_sval_a = *as_a <region_svalue *> (sval_a);
6827 const region_svalue &region_sval_b = *as_a <region_svalue *> (sval_b);
6828 region_svalue::merge_values (region_sval_a, region_sval_b,
6829 merged_sid, type,
6830 this);
6831 record_svalues (sid_a, sid_b, *merged_sid);
6832 return true;
6833 }
6834 break;
6835 case SK_CONSTANT:
6836 {
6837 /* If we have two constants, then we can merge. */
6838 const constant_svalue &cst_sval_a = *as_a <constant_svalue *> (sval_a);
6839 const constant_svalue &cst_sval_b = *as_a <constant_svalue *> (sval_b);
6840 constant_svalue::merge_values (cst_sval_a, cst_sval_b,
6841 merged_sid, this);
6842 record_svalues (sid_a, sid_b, *merged_sid);
6843 return true;
6844 }
6845 break;
6846
6847 case SK_POISONED:
6848 case SK_SETJMP:
6849 return false;
6850 }
6851 }
6852
6853 /* Record that A_RID in model A and B_RID in model B
6854 correspond to MERGED_RID in the merged model, so
6855 that pointers can be accurately merged. */
6856
6857 void
6858 model_merger::record_regions (region_id a_rid,
6859 region_id b_rid,
6860 region_id merged_rid)
6861 {
6862 m_map_regions_from_a_to_m.put (a_rid, merged_rid);
6863 m_map_regions_from_b_to_m.put (b_rid, merged_rid);
6864 }
6865
6866 /* Record that A_SID in model A and B_SID in model B
6867 correspond to MERGED_SID in the merged model. */
6868
6869 void
6870 model_merger::record_svalues (svalue_id a_sid,
6871 svalue_id b_sid,
6872 svalue_id merged_sid)
6873 {
6874 gcc_assert (m_sid_mapping);
6875 m_sid_mapping->m_map_from_a_to_m.put (a_sid, merged_sid);
6876 m_sid_mapping->m_map_from_b_to_m.put (b_sid, merged_sid);
6877 }
6878
6879 /* struct svalue_id_merger_mapping. */
6880
6881 /* svalue_id_merger_mapping's ctor. */
6882
6883 svalue_id_merger_mapping::svalue_id_merger_mapping (const region_model &a,
6884 const region_model &b)
6885 : m_map_from_a_to_m (a.get_num_svalues ()),
6886 m_map_from_b_to_m (b.get_num_svalues ())
6887 {
6888 }
6889
6890 /* Dump a multiline representation of this to PP. */
6891
6892 void
6893 svalue_id_merger_mapping::dump_to_pp (pretty_printer *pp) const
6894 {
6895 pp_string (pp, "svalue_id map: model A to merged model:");
6896 pp_newline (pp);
6897 m_map_from_a_to_m.dump_to_pp (pp);
6898 pp_newline (pp);
6899
6900 pp_string (pp, "svalue_id map: model B to merged model:");
6901 pp_newline (pp);
6902 m_map_from_b_to_m.dump_to_pp (pp);
6903 pp_newline (pp);
6904 }
6905
6906 /* Dump a multiline representation of this to FILE. */
6907
6908 void
6909 svalue_id_merger_mapping::dump (FILE *fp) const
6910 {
6911 pretty_printer pp;
6912 pp_format_decoder (&pp) = default_tree_printer;
6913 pp_show_color (&pp) = pp_show_color (global_dc->printer);
6914 pp.buffer->stream = fp;
6915 dump_to_pp (&pp);
6916 pp_flush (&pp);
6917 }
6918
6919 /* Dump a multiline representation of this to stderr. */
6920
6921 DEBUG_FUNCTION void
6922 svalue_id_merger_mapping::dump () const
6923 {
6924 dump (stderr);
6925 }
6926
6927 /* struct canonicalization. */
6928
6929 /* canonicalization's ctor. */
6930
6931 canonicalization::canonicalization (const region_model &model)
6932 : m_model (model),
6933 m_rid_map (model.get_num_regions ()),
6934 m_sid_map (model.get_num_svalues ()),
6935 m_next_rid_int (0),
6936 m_next_sid_int (0)
6937 {
6938 }
6939
6940 /* If we've not seen RID yet, assign it a canonicalized region_id,
6941 and walk the region's svalue and then the region. */
6942
6943 void
6944 canonicalization::walk_rid (region_id rid)
6945 {
6946 /* Stop if we've already seen RID. */
6947 if (!m_rid_map.get_dst_for_src (rid).null_p ())
6948 return;
6949
6950 region *region = m_model.get_region (rid);
6951 if (region)
6952 {
6953 m_rid_map.put (rid, region_id::from_int (m_next_rid_int++));
6954 walk_sid (region->get_value_direct ());
6955 region->walk_for_canonicalization (this);
6956 }
6957 }
6958
6959 /* If we've not seen SID yet, assign it a canonicalized svalue_id,
6960 and walk the svalue (and potentially regions e.g. for ptr values). */
6961
6962 void
6963 canonicalization::walk_sid (svalue_id sid)
6964 {
6965 /* Stop if we've already seen SID. */
6966 if (!m_sid_map.get_dst_for_src (sid).null_p ())
6967 return;
6968
6969 svalue *sval = m_model.get_svalue (sid);
6970 if (sval)
6971 {
6972 m_sid_map.put (sid, svalue_id::from_int (m_next_sid_int++));
6973 /* Potentially walk regions e.g. for ptrs. */
6974 sval->walk_for_canonicalization (this);
6975 }
6976 }
6977
6978 /* Dump a multiline representation of this to PP. */
6979
6980 void
6981 canonicalization::dump_to_pp (pretty_printer *pp) const
6982 {
6983 pp_string (pp, "region_id map:");
6984 pp_newline (pp);
6985 m_rid_map.dump_to_pp (pp);
6986 pp_newline (pp);
6987
6988 pp_string (pp, "svalue_id map:");
6989 pp_newline (pp);
6990 m_sid_map.dump_to_pp (pp);
6991 pp_newline (pp);
6992 }
6993
6994 /* Dump a multiline representation of this to FILE. */
6995
6996 void
6997 canonicalization::dump (FILE *fp) const
6998 {
6999 pretty_printer pp;
7000 pp_format_decoder (&pp) = default_tree_printer;
7001 pp_show_color (&pp) = pp_show_color (global_dc->printer);
7002 pp.buffer->stream = fp;
7003 dump_to_pp (&pp);
7004 pp_flush (&pp);
7005 }
7006
7007 /* Dump a multiline representation of this to stderr. */
7008
7009 DEBUG_FUNCTION void
7010 canonicalization::dump () const
7011 {
7012 dump (stderr);
7013 }
7014
7015 } // namespace ana
7016
7017 /* Update HSTATE with a hash of SID. */
7018
7019 void
7020 inchash::add (svalue_id sid, inchash::hash &hstate)
7021 {
7022 hstate.add_int (sid.as_int ());
7023 }
7024
7025 /* Update HSTATE with a hash of RID. */
7026
7027 void
7028 inchash::add (region_id rid, inchash::hash &hstate)
7029 {
7030 hstate.add_int (rid.as_int ());
7031 }
7032
7033 /* Dump RMODEL fully to stderr (i.e. without summarization). */
7034
7035 DEBUG_FUNCTION void
7036 debug (const region_model &rmodel)
7037 {
7038 rmodel.dump (false);
7039 }
7040
7041 namespace ana {
7042
7043 #if CHECKING_P
7044
7045 namespace selftest {
7046
7047 /* Build a constant tree of the given type from STR. */
7048
7049 static tree
7050 build_real_cst_from_string (tree type, const char *str)
7051 {
7052 REAL_VALUE_TYPE real;
7053 real_from_string (&real, str);
7054 return build_real (type, real);
7055 }
7056
7057 /* Append various "interesting" constants to OUT (e.g. NaN). */
7058
7059 static void
7060 append_interesting_constants (auto_vec<tree> *out)
7061 {
7062 out->safe_push (build_int_cst (integer_type_node, 0));
7063 out->safe_push (build_int_cst (integer_type_node, 42));
7064 out->safe_push (build_int_cst (unsigned_type_node, 0));
7065 out->safe_push (build_int_cst (unsigned_type_node, 42));
7066 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
7067 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
7068 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
7069 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
7070 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
7071 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
7072 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
7073 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
7074 }
7075
7076 /* Verify that tree_cmp is a well-behaved comparator for qsort, even
7077 if the underlying constants aren't comparable. */
7078
7079 static void
7080 test_tree_cmp_on_constants ()
7081 {
7082 auto_vec<tree> csts;
7083 append_interesting_constants (&csts);
7084
7085 /* Try sorting every triple. */
7086 const unsigned num = csts.length ();
7087 for (unsigned i = 0; i < num; i++)
7088 for (unsigned j = 0; j < num; j++)
7089 for (unsigned k = 0; k < num; k++)
7090 {
7091 auto_vec<tree> v (3);
7092 v.quick_push (csts[i]);
7093 v.quick_push (csts[j]);
7094 v.quick_push (csts[k]);
7095 v.qsort (tree_cmp);
7096 }
7097 }
7098
7099 /* Implementation detail of the ASSERT_CONDITION_* macros. */
7100
7101 void
7102 assert_condition (const location &loc,
7103 region_model &model,
7104 tree lhs, tree_code op, tree rhs,
7105 tristate expected)
7106 {
7107 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
7108 ASSERT_EQ_AT (loc, actual, expected);
7109 }
7110
7111 /* Implementation detail of ASSERT_DUMP_EQ. */
7112
7113 static void
7114 assert_dump_eq (const location &loc,
7115 const region_model &model,
7116 bool summarize,
7117 const char *expected)
7118 {
7119 auto_fix_quotes sentinel;
7120 pretty_printer pp;
7121 pp_format_decoder (&pp) = default_tree_printer;
7122 model.dump_to_pp (&pp, summarize);
7123 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
7124 }
7125
7126 /* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
7127
7128 #define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
7129 SELFTEST_BEGIN_STMT \
7130 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
7131 SELFTEST_END_STMT
7132
7133 /* Smoketest for region_model::dump_to_pp. */
7134
7135 static void
7136 test_dump ()
7137 {
7138 region_model model;
7139 model.get_root_region ()->ensure_stack_region (&model);
7140 model.get_root_region ()->ensure_globals_region (&model);
7141 model.get_root_region ()->ensure_heap_region (&model);
7142
7143 ASSERT_DUMP_EQ (model, false,
7144 "r0: {kind: `root', parent: null, sval: null}\n"
7145 "|-stack: r1: {kind: `stack', parent: r0, sval: sv0}\n"
7146 "| |: sval: sv0: {poisoned: uninit}\n"
7147 "|-globals: r2: {kind: `globals', parent: r0, sval: null, map: {}}\n"
7148 "`-heap: r3: {kind: `heap', parent: r0, sval: sv1}\n"
7149 " |: sval: sv1: {poisoned: uninit}\n"
7150 "svalues:\n"
7151 " sv0: {poisoned: uninit}\n"
7152 " sv1: {poisoned: uninit}\n"
7153 "constraint manager:\n"
7154 " equiv classes:\n"
7155 " constraints:\n");
7156 ASSERT_DUMP_EQ (model, true, "");
7157 }
7158
7159 /* Verify that calling region_model::get_rvalue repeatedly on the same
7160 tree constant retrieves the same svalue_id. */
7161
7162 static void
7163 test_unique_constants ()
7164 {
7165 tree int_0 = build_int_cst (integer_type_node, 0);
7166 tree int_42 = build_int_cst (integer_type_node, 42);
7167
7168 test_region_model_context ctxt;
7169 region_model model;
7170 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
7171 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
7172 model.get_rvalue (int_42, &ctxt));
7173 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
7174 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
7175 }
7176
7177 /* Check that operator== and hashing works as expected for the
7178 various svalue subclasses. */
7179
7180 static void
7181 test_svalue_equality ()
7182 {
7183 tree int_42 = build_int_cst (integer_type_node, 42);
7184 tree int_0 = build_int_cst (integer_type_node, 0);
7185
7186 /* Create pairs instances of the various subclasses of svalue,
7187 testing for hash and equality between (this, this) and
7188 (this, other of same subclass). */
7189 svalue *ptr_to_r0
7190 = new region_svalue (ptr_type_node, region_id::from_int (0));
7191 svalue *ptr_to_r1
7192 = new region_svalue (ptr_type_node, region_id::from_int (1));
7193
7194 ASSERT_EQ (ptr_to_r0->hash (), ptr_to_r0->hash ());
7195 ASSERT_EQ (*ptr_to_r0, *ptr_to_r0);
7196
7197 ASSERT_NE (ptr_to_r0->hash (), ptr_to_r1->hash ());
7198 ASSERT_NE (*ptr_to_r0, *ptr_to_r1);
7199
7200 svalue *cst_int_42 = new constant_svalue (int_42);
7201 svalue *cst_int_0 = new constant_svalue (int_0);
7202
7203 ASSERT_EQ (cst_int_42->hash (), cst_int_42->hash ());
7204 ASSERT_EQ (*cst_int_42, *cst_int_42);
7205
7206 ASSERT_NE (cst_int_42->hash (), cst_int_0->hash ());
7207 ASSERT_NE (*cst_int_42, *cst_int_0);
7208
7209 svalue *uninit = new poisoned_svalue (POISON_KIND_UNINIT, NULL_TREE);
7210 svalue *freed = new poisoned_svalue (POISON_KIND_FREED, NULL_TREE);
7211
7212 ASSERT_EQ (uninit->hash (), uninit->hash ());
7213 ASSERT_EQ (*uninit, *uninit);
7214
7215 ASSERT_NE (uninit->hash (), freed->hash ());
7216 ASSERT_NE (*uninit, *freed);
7217
7218 svalue *unknown_0 = new unknown_svalue (ptr_type_node);
7219 svalue *unknown_1 = new unknown_svalue (ptr_type_node);
7220 ASSERT_EQ (unknown_0->hash (), unknown_0->hash ());
7221 ASSERT_EQ (*unknown_0, *unknown_0);
7222 ASSERT_EQ (*unknown_1, *unknown_1);
7223
7224 /* Comparisons between different kinds of svalue. */
7225 ASSERT_NE (*ptr_to_r0, *cst_int_42);
7226 ASSERT_NE (*ptr_to_r0, *uninit);
7227 ASSERT_NE (*ptr_to_r0, *unknown_0);
7228 ASSERT_NE (*cst_int_42, *ptr_to_r0);
7229 ASSERT_NE (*cst_int_42, *uninit);
7230 ASSERT_NE (*cst_int_42, *unknown_0);
7231 ASSERT_NE (*uninit, *ptr_to_r0);
7232 ASSERT_NE (*uninit, *cst_int_42);
7233 ASSERT_NE (*uninit, *unknown_0);
7234 ASSERT_NE (*unknown_0, *ptr_to_r0);
7235 ASSERT_NE (*unknown_0, *cst_int_42);
7236 ASSERT_NE (*unknown_0, *uninit);
7237
7238 delete ptr_to_r0;
7239 delete ptr_to_r1;
7240 delete cst_int_42;
7241 delete cst_int_0;
7242 delete uninit;
7243 delete freed;
7244 delete unknown_0;
7245 delete unknown_1;
7246 }
7247
7248 /* Check that operator== and hashing works as expected for the
7249 various region subclasses. */
7250
7251 static void
7252 test_region_equality ()
7253 {
7254 region *r0
7255 = new primitive_region (region_id::from_int (3), integer_type_node);
7256 region *r1
7257 = new primitive_region (region_id::from_int (4), integer_type_node);
7258
7259 ASSERT_EQ (*r0, *r0);
7260 ASSERT_EQ (r0->hash (), r0->hash ());
7261 ASSERT_NE (*r0, *r1);
7262 ASSERT_NE (r0->hash (), r1->hash ());
7263
7264 delete r0;
7265 delete r1;
7266
7267 // TODO: test coverage for the map within a map_region
7268 }
7269
7270 /* A subclass of purge_criteria for selftests: purge all svalue_id instances. */
7271
7272 class purge_all_svalue_ids : public purge_criteria
7273 {
7274 public:
7275 bool should_purge_p (svalue_id) const FINAL OVERRIDE
7276 {
7277 return true;
7278 }
7279 };
7280
7281 /* A subclass of purge_criteria: purge a specific svalue_id. */
7282
7283 class purge_one_svalue_id : public purge_criteria
7284 {
7285 public:
7286 purge_one_svalue_id (svalue_id victim) : m_victim (victim) {}
7287
7288 purge_one_svalue_id (region_model model, tree expr)
7289 : m_victim (model.get_rvalue (expr, NULL)) {}
7290
7291 bool should_purge_p (svalue_id sid) const FINAL OVERRIDE
7292 {
7293 return sid == m_victim;
7294 }
7295
7296 private:
7297 svalue_id m_victim;
7298 };
7299
7300 /* Check that constraint_manager::purge works for individual svalue_ids. */
7301
7302 static void
7303 test_purging_by_criteria ()
7304 {
7305 tree int_42 = build_int_cst (integer_type_node, 42);
7306 tree int_0 = build_int_cst (integer_type_node, 0);
7307
7308 tree x = build_global_decl ("x", integer_type_node);
7309 tree y = build_global_decl ("y", integer_type_node);
7310
7311 {
7312 region_model model0;
7313 region_model model1;
7314
7315 ADD_SAT_CONSTRAINT (model1, x, EQ_EXPR, y);
7316 ASSERT_NE (model0, model1);
7317
7318 purge_stats stats_for_px;
7319 purge_one_svalue_id px (model1, x);
7320 model1.get_constraints ()->purge (px, &stats_for_px);
7321 ASSERT_EQ (stats_for_px.m_num_equiv_classes, 0);
7322
7323 purge_stats stats_for_py;
7324 purge_one_svalue_id py (model1.get_rvalue (y, NULL));
7325 model1.get_constraints ()->purge (py, &stats_for_py);
7326 ASSERT_EQ (stats_for_py.m_num_equiv_classes, 1);
7327
7328 ASSERT_EQ (*model0.get_constraints (), *model1.get_constraints ());
7329 }
7330
7331 {
7332 region_model model0;
7333 region_model model1;
7334
7335 ADD_SAT_CONSTRAINT (model1, x, EQ_EXPR, int_42);
7336 ASSERT_NE (model0, model1);
7337 ASSERT_CONDITION_TRUE (model1, x, EQ_EXPR, int_42);
7338
7339 purge_stats stats;
7340 model1.get_constraints ()->purge (purge_one_svalue_id (model1, x), &stats);
7341
7342 ASSERT_CONDITION_UNKNOWN (model1, x, EQ_EXPR, int_42);
7343 }
7344
7345 {
7346 region_model model0;
7347 region_model model1;
7348
7349 ADD_SAT_CONSTRAINT (model1, x, GE_EXPR, int_0);
7350 ADD_SAT_CONSTRAINT (model1, x, LE_EXPR, int_42);
7351 ASSERT_NE (model0, model1);
7352
7353 ASSERT_CONDITION_TRUE (model1, x, GE_EXPR, int_0);
7354 ASSERT_CONDITION_TRUE (model1, x, LE_EXPR, int_42);
7355
7356 purge_stats stats;
7357 model1.get_constraints ()->purge (purge_one_svalue_id (model1, x), &stats);
7358
7359 ASSERT_CONDITION_UNKNOWN (model1, x, GE_EXPR, int_0);
7360 ASSERT_CONDITION_UNKNOWN (model1, x, LE_EXPR, int_42);
7361 }
7362
7363 {
7364 region_model model0;
7365 region_model model1;
7366
7367 ADD_SAT_CONSTRAINT (model1, x, NE_EXPR, int_42);
7368 ADD_SAT_CONSTRAINT (model1, y, NE_EXPR, int_0);
7369 ASSERT_NE (model0, model1);
7370 ASSERT_CONDITION_TRUE (model1, x, NE_EXPR, int_42);
7371 ASSERT_CONDITION_TRUE (model1, y, NE_EXPR, int_0);
7372
7373 purge_stats stats;
7374 model1.get_constraints ()->purge (purge_one_svalue_id (model1, x), &stats);
7375 ASSERT_NE (model0, model1);
7376
7377 ASSERT_CONDITION_UNKNOWN (model1, x, NE_EXPR, int_42);
7378 ASSERT_CONDITION_TRUE (model1, y, NE_EXPR, int_0);
7379 }
7380
7381 {
7382 region_model model0;
7383 region_model model1;
7384
7385 ADD_SAT_CONSTRAINT (model1, x, NE_EXPR, int_42);
7386 ADD_SAT_CONSTRAINT (model1, y, NE_EXPR, int_0);
7387 ASSERT_NE (model0, model1);
7388 ASSERT_CONDITION_TRUE (model1, x, NE_EXPR, int_42);
7389 ASSERT_CONDITION_TRUE (model1, y, NE_EXPR, int_0);
7390
7391 purge_stats stats;
7392 model1.get_constraints ()->purge (purge_all_svalue_ids (), &stats);
7393 ASSERT_CONDITION_UNKNOWN (model1, x, NE_EXPR, int_42);
7394 ASSERT_CONDITION_UNKNOWN (model1, y, NE_EXPR, int_0);
7395 }
7396
7397 }
7398
7399 /* Test that region_model::purge_unused_svalues works as expected. */
7400
7401 static void
7402 test_purge_unused_svalues ()
7403 {
7404 tree int_42 = build_int_cst (integer_type_node, 42);
7405 tree int_0 = build_int_cst (integer_type_node, 0);
7406 tree x = build_global_decl ("x", integer_type_node);
7407 tree y = build_global_decl ("y", integer_type_node);
7408
7409 test_region_model_context ctxt;
7410 region_model model;
7411 model.set_to_new_unknown_value (model.get_lvalue (x, &ctxt), TREE_TYPE (x),
7412 &ctxt);
7413 model.set_to_new_unknown_value (model.get_lvalue (x, &ctxt), TREE_TYPE (x),
7414 &ctxt);
7415 model.set_to_new_unknown_value (model.get_lvalue (x, &ctxt), TREE_TYPE (x),
7416 &ctxt);
7417 model.add_constraint (x, NE_EXPR, int_42, &ctxt);
7418
7419 model.set_value (model.get_lvalue (x, &ctxt),
7420 model.get_rvalue (int_42, &ctxt),
7421 &ctxt);
7422 model.add_constraint (y, GT_EXPR, int_0, &ctxt);
7423
7424 /* The redundant unknown values should have been purged. */
7425 purge_stats purged;
7426 model.purge_unused_svalues (&purged, NULL);
7427 ASSERT_EQ (purged.m_num_svalues, 3);
7428
7429 /* and the redundant constraint on an old, unknown value for x should
7430 have been purged. */
7431 ASSERT_EQ (purged.m_num_equiv_classes, 1);
7432 ASSERT_EQ (purged.m_num_constraints, 1);
7433 ASSERT_EQ (model.get_constraints ()->m_constraints.length (), 2);
7434
7435 /* ...but we should still have x == 42. */
7436 ASSERT_EQ (model.eval_condition (x, EQ_EXPR, int_42, &ctxt),
7437 tristate::TS_TRUE);
7438
7439 /* ...and we should still have the constraint on y. */
7440 ASSERT_EQ (model.eval_condition (y, GT_EXPR, int_0, &ctxt),
7441 tristate::TS_TRUE);
7442
7443 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
7444 }
7445
7446 /* Verify that simple assignments work as expected. */
7447
7448 static void
7449 test_assignment ()
7450 {
7451 tree int_0 = build_int_cst (integer_type_node, 0);
7452 tree x = build_global_decl ("x", integer_type_node);
7453 tree y = build_global_decl ("y", integer_type_node);
7454
7455 /* "x == 0", then use of y, then "y = 0;". */
7456 region_model model;
7457 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
7458 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
7459 model.set_value (model.get_lvalue (y, NULL),
7460 model.get_rvalue (int_0, NULL),
7461 NULL);
7462 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
7463 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
7464
7465 ASSERT_DUMP_EQ (model, true, "y: 0, {x}: unknown, x == y");
7466 }
7467
7468 /* Verify the details of pushing and popping stack frames. */
7469
7470 static void
7471 test_stack_frames ()
7472 {
7473 tree int_42 = build_int_cst (integer_type_node, 42);
7474 tree int_10 = build_int_cst (integer_type_node, 10);
7475 tree int_5 = build_int_cst (integer_type_node, 5);
7476 tree int_0 = build_int_cst (integer_type_node, 0);
7477
7478 auto_vec <tree> param_types;
7479 tree parent_fndecl = make_fndecl (integer_type_node,
7480 "parent_fn",
7481 param_types);
7482 allocate_struct_function (parent_fndecl, true);
7483
7484 tree child_fndecl = make_fndecl (integer_type_node,
7485 "child_fn",
7486 param_types);
7487 allocate_struct_function (child_fndecl, true);
7488
7489 /* "a" and "b" in the parent frame. */
7490 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7491 get_identifier ("a"),
7492 integer_type_node);
7493 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7494 get_identifier ("b"),
7495 integer_type_node);
7496 /* "x" and "y" in a child frame. */
7497 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7498 get_identifier ("x"),
7499 integer_type_node);
7500 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7501 get_identifier ("y"),
7502 integer_type_node);
7503
7504 /* "p" global. */
7505 tree p = build_global_decl ("p", ptr_type_node);
7506
7507 /* "q" global. */
7508 tree q = build_global_decl ("q", ptr_type_node);
7509
7510 test_region_model_context ctxt;
7511 region_model model;
7512
7513 /* Push stack frame for "parent_fn". */
7514 region_id parent_frame_rid
7515 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl), NULL, &ctxt);
7516 ASSERT_EQ (model.get_current_frame_id (), parent_frame_rid);
7517 region_id a_in_parent_rid = model.get_lvalue (a, &ctxt);
7518 model.set_value (a_in_parent_rid, model.get_rvalue (int_42, &ctxt), &ctxt);
7519 model.set_to_new_unknown_value (model.get_lvalue (b, &ctxt),
7520 integer_type_node, &ctxt);
7521 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
7522 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
7523 tristate (tristate::TS_TRUE));
7524
7525 /* Push stack frame for "child_fn". */
7526 region_id child_frame_rid
7527 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
7528 ASSERT_EQ (model.get_current_frame_id (), child_frame_rid);
7529 region_id x_in_child_rid = model.get_lvalue (x, &ctxt);
7530 model.set_value (x_in_child_rid, model.get_rvalue (int_0, &ctxt), &ctxt);
7531 model.set_to_new_unknown_value (model.get_lvalue (y, &ctxt),
7532 integer_type_node, &ctxt);
7533 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
7534 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
7535 tristate (tristate::TS_TRUE));
7536
7537 /* Point a global pointer at a local in the child frame: p = &x. */
7538 region_id p_in_globals_rid = model.get_lvalue (p, &ctxt);
7539 model.set_value (p_in_globals_rid,
7540 model.get_or_create_ptr_svalue (ptr_type_node,
7541 x_in_child_rid),
7542 &ctxt);
7543
7544 /* Point another global pointer at p: q = &p. */
7545 region_id q_in_globals_rid = model.get_lvalue (q, &ctxt);
7546 model.set_value (q_in_globals_rid,
7547 model.get_or_create_ptr_svalue (ptr_type_node,
7548 p_in_globals_rid),
7549 &ctxt);
7550
7551 /* Test get_descendents. */
7552 region_id_set descendents (&model);
7553 model.get_descendents (child_frame_rid, &descendents, region_id::null ());
7554 ASSERT_TRUE (descendents.region_p (child_frame_rid));
7555 ASSERT_TRUE (descendents.region_p (x_in_child_rid));
7556 ASSERT_FALSE (descendents.region_p (a_in_parent_rid));
7557 ASSERT_EQ (descendents.num_regions (), 3);
7558 #if 0
7559 auto_vec<region_id> test_vec;
7560 for (region_id_set::iterator_t iter = descendents.begin ();
7561 iter != descendents.end ();
7562 ++iter)
7563 test_vec.safe_push (*iter);
7564 gcc_unreachable (); // TODO
7565 //ASSERT_EQ ();
7566 #endif
7567
7568 ASSERT_DUMP_EQ (model, true,
7569 "x: 0, {y}: unknown, p: &x, q: &p, b < 10, y != 5");
7570
7571 /* Pop the "child_fn" frame from the stack. */
7572 purge_stats purged;
7573 model.pop_frame (true, &purged, &ctxt);
7574
7575 /* We should have purged the unknown values for x and y. */
7576 ASSERT_EQ (purged.m_num_svalues, 2);
7577
7578 /* We should have purged the frame region and the regions for x and y. */
7579 ASSERT_EQ (purged.m_num_regions, 3);
7580
7581 /* We should have purged the constraint on y. */
7582 ASSERT_EQ (purged.m_num_equiv_classes, 1);
7583 ASSERT_EQ (purged.m_num_constraints, 1);
7584
7585 /* Verify that p (which was pointing at the local "x" in the popped
7586 frame) has been poisoned. */
7587 svalue *new_p_sval = model.get_svalue (model.get_rvalue (p, &ctxt));
7588 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
7589 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
7590 POISON_KIND_POPPED_STACK);
7591
7592 /* Verify that q still points to p, in spite of the region
7593 renumbering. */
7594 svalue *new_q_sval = model.get_svalue (model.get_rvalue (q, &ctxt));
7595 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
7596 ASSERT_EQ (new_q_sval->dyn_cast_region_svalue ()->get_pointee (),
7597 model.get_lvalue (p, &ctxt));
7598
7599 /* Verify that top of stack has been updated. */
7600 ASSERT_EQ (model.get_current_frame_id (), parent_frame_rid);
7601
7602 /* Verify locals in parent frame. */
7603 /* Verify "a" still has its value. */
7604 svalue *new_a_sval = model.get_svalue (model.get_rvalue (a, &ctxt));
7605 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
7606 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
7607 int_42);
7608 /* Verify "b" still has its constraint. */
7609 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
7610 tristate (tristate::TS_TRUE));
7611 }
7612
7613 /* Verify that get_representative_path_var works as expected, that
7614 we can map from region ids to parms and back within a recursive call
7615 stack. */
7616
7617 static void
7618 test_get_representative_path_var ()
7619 {
7620 auto_vec <tree> param_types;
7621 tree fndecl = make_fndecl (integer_type_node,
7622 "factorial",
7623 param_types);
7624 allocate_struct_function (fndecl, true);
7625
7626 /* Parm "n". */
7627 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7628 get_identifier ("n"),
7629 integer_type_node);
7630
7631 region_model model;
7632
7633 /* Push 5 stack frames for "factorial", each with a param */
7634 auto_vec<region_id> parm_rids;
7635 auto_vec<svalue_id> parm_sids;
7636 for (int depth = 0; depth < 5; depth++)
7637 {
7638 region_id frame_rid
7639 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, NULL);
7640 region_id rid_n = model.get_lvalue (path_var (n, depth), NULL);
7641 parm_rids.safe_push (rid_n);
7642
7643 ASSERT_EQ (model.get_region (rid_n)->get_parent (), frame_rid);
7644
7645 svalue_id sid_n
7646 = model.set_to_new_unknown_value (rid_n, integer_type_node, NULL);
7647 parm_sids.safe_push (sid_n);
7648 }
7649
7650 /* Verify that we can recognize that the regions are the parms,
7651 at every depth. */
7652 for (int depth = 0; depth < 5; depth++)
7653 {
7654 ASSERT_EQ (model.get_representative_path_var (parm_rids[depth]),
7655 path_var (n, depth));
7656 /* ...and that we can lookup lvalues for locals for all frames,
7657 not just the top. */
7658 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
7659 parm_rids[depth]);
7660 /* ...and that we can locate the svalues. */
7661 auto_vec<path_var> pvs;
7662 model.get_path_vars_for_svalue (parm_sids[depth], &pvs);
7663 ASSERT_EQ (pvs.length (), 1);
7664 ASSERT_EQ (pvs[0], path_var (n, depth));
7665 }
7666 }
7667
7668 /* Verify that the core regions within a region_model are in a consistent
7669 order after canonicalization. */
7670
7671 static void
7672 test_canonicalization_1 ()
7673 {
7674 region_model model0;
7675 model0.get_root_region ()->ensure_stack_region (&model0);
7676 model0.get_root_region ()->ensure_globals_region (&model0);
7677
7678 region_model model1;
7679 model1.get_root_region ()->ensure_globals_region (&model1);
7680 model1.get_root_region ()->ensure_stack_region (&model1);
7681
7682 model0.canonicalize (NULL);
7683 model1.canonicalize (NULL);
7684 ASSERT_EQ (model0, model1);
7685 }
7686
7687 /* Verify that region models for
7688 x = 42; y = 113;
7689 and
7690 y = 113; x = 42;
7691 are equal after canonicalization. */
7692
7693 static void
7694 test_canonicalization_2 ()
7695 {
7696 tree int_42 = build_int_cst (integer_type_node, 42);
7697 tree int_113 = build_int_cst (integer_type_node, 113);
7698 tree x = build_global_decl ("x", integer_type_node);
7699 tree y = build_global_decl ("y", integer_type_node);
7700
7701 region_model model0;
7702 model0.set_value (model0.get_lvalue (x, NULL),
7703 model0.get_rvalue (int_42, NULL),
7704 NULL);
7705 model0.set_value (model0.get_lvalue (y, NULL),
7706 model0.get_rvalue (int_113, NULL),
7707 NULL);
7708
7709 region_model model1;
7710 model1.set_value (model1.get_lvalue (y, NULL),
7711 model1.get_rvalue (int_113, NULL),
7712 NULL);
7713 model1.set_value (model1.get_lvalue (x, NULL),
7714 model1.get_rvalue (int_42, NULL),
7715 NULL);
7716
7717 model0.canonicalize (NULL);
7718 model1.canonicalize (NULL);
7719 ASSERT_EQ (model0, model1);
7720 }
7721
7722 /* Verify that constraints for
7723 x > 3 && y > 42
7724 and
7725 y > 42 && x > 3
7726 are equal after canonicalization. */
7727
7728 static void
7729 test_canonicalization_3 ()
7730 {
7731 tree int_3 = build_int_cst (integer_type_node, 3);
7732 tree int_42 = build_int_cst (integer_type_node, 42);
7733 tree x = build_global_decl ("x", integer_type_node);
7734 tree y = build_global_decl ("y", integer_type_node);
7735
7736 region_model model0;
7737 model0.add_constraint (x, GT_EXPR, int_3, NULL);
7738 model0.add_constraint (y, GT_EXPR, int_42, NULL);
7739
7740 region_model model1;
7741 model1.add_constraint (y, GT_EXPR, int_42, NULL);
7742 model1.add_constraint (x, GT_EXPR, int_3, NULL);
7743
7744 model0.canonicalize (NULL);
7745 model1.canonicalize (NULL);
7746 ASSERT_EQ (model0, model1);
7747 }
7748
7749 /* Verify that we can canonicalize a model containing NaN and other real
7750 constants. */
7751
7752 static void
7753 test_canonicalization_4 ()
7754 {
7755 auto_vec<tree> csts;
7756 append_interesting_constants (&csts);
7757
7758 region_model model;
7759
7760 unsigned i;
7761 tree cst;
7762 FOR_EACH_VEC_ELT (csts, i, cst)
7763 model.get_rvalue (cst, NULL);
7764
7765 model.canonicalize (NULL);
7766 }
7767
7768 /* Assert that if we have two region_model instances
7769 with values VAL_A and VAL_B for EXPR that they are
7770 mergable. Write the merged model to *OUT_MERGED_MODEL,
7771 and the merged svalue ptr to *OUT_MERGED_SVALUE.
7772 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
7773 for that region_model. */
7774
7775 static void
7776 assert_region_models_merge (tree expr, tree val_a, tree val_b,
7777 region_model *out_merged_model,
7778 svalue **out_merged_svalue)
7779 {
7780 test_region_model_context ctxt;
7781 region_model model0;
7782 region_model model1;
7783 if (val_a)
7784 model0.set_value (model0.get_lvalue (expr, &ctxt),
7785 model0.get_rvalue (val_a, &ctxt),
7786 &ctxt);
7787 if (val_b)
7788 model1.set_value (model1.get_lvalue (expr, &ctxt),
7789 model1.get_rvalue (val_b, &ctxt),
7790 &ctxt);
7791
7792 /* They should be mergeable. */
7793 ASSERT_TRUE (model0.can_merge_with_p (model1, out_merged_model));
7794
7795 svalue_id merged_svalue_sid = out_merged_model->get_rvalue (expr, &ctxt);
7796 *out_merged_svalue = out_merged_model->get_svalue (merged_svalue_sid);
7797 }
7798
7799 /* Verify that we can merge region_model instances. */
7800
7801 static void
7802 test_state_merging ()
7803 {
7804 tree int_42 = build_int_cst (integer_type_node, 42);
7805 tree int_113 = build_int_cst (integer_type_node, 113);
7806 tree x = build_global_decl ("x", integer_type_node);
7807 tree y = build_global_decl ("y", integer_type_node);
7808 tree z = build_global_decl ("z", integer_type_node);
7809 tree p = build_global_decl ("p", ptr_type_node);
7810
7811 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
7812 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
7813
7814 auto_vec <tree> param_types;
7815 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
7816 allocate_struct_function (test_fndecl, true);
7817
7818 /* Param "a". */
7819 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7820 get_identifier ("a"),
7821 integer_type_node);
7822 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
7823
7824 /* Param "q", a pointer. */
7825 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7826 get_identifier ("q"),
7827 ptr_type_node);
7828
7829 {
7830 region_model model0;
7831 region_model model1;
7832 region_model merged;
7833 /* Verify empty models can be merged. */
7834 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
7835 ASSERT_EQ (model0, merged);
7836 }
7837
7838 /* Verify that we can merge two contradictory constraints on the
7839 value for a global. */
7840 /* TODO: verify that the merged model doesn't have a value for
7841 the global */
7842 {
7843 region_model model0;
7844 region_model model1;
7845 region_model merged;
7846 test_region_model_context ctxt;
7847 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7848 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
7849 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
7850 ASSERT_NE (model0, merged);
7851 ASSERT_NE (model1, merged);
7852 }
7853
7854 /* Verify handling of a PARM_DECL. */
7855 {
7856 test_region_model_context ctxt;
7857 region_model model0;
7858 region_model model1;
7859 ASSERT_EQ (model0.get_stack_depth (), 0);
7860 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7861 ASSERT_EQ (model0.get_stack_depth (), 1);
7862 ASSERT_EQ (model0.get_function_at_depth (0),
7863 DECL_STRUCT_FUNCTION (test_fndecl));
7864 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7865
7866 svalue_id sid_a
7867 = model0.set_to_new_unknown_value (model0.get_lvalue (a, &ctxt),
7868 integer_type_node, &ctxt);
7869 model1.set_to_new_unknown_value (model1.get_lvalue (a, &ctxt),
7870 integer_type_node, &ctxt);
7871 ASSERT_EQ (model0, model1);
7872
7873 /* Check that get_value_by_name works for locals. */
7874 ASSERT_EQ (model0.get_value_by_name ("a"), sid_a);
7875
7876 /* They should be mergeable, and the result should be the same. */
7877 region_model merged;
7878 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
7879 ASSERT_EQ (model0, merged);
7880 /* In particular, there should be an unknown value for "a". */
7881 svalue *merged_a_sval = merged.get_svalue (merged.get_rvalue (a, &ctxt));
7882 ASSERT_EQ (merged_a_sval->get_kind (), SK_UNKNOWN);
7883 }
7884
7885 /* Verify handling of a global. */
7886 {
7887 test_region_model_context ctxt;
7888 region_model model0;
7889 region_model model1;
7890 svalue_id sid_x
7891 = model0.set_to_new_unknown_value (model0.get_lvalue (x, &ctxt),
7892 integer_type_node, &ctxt);
7893 model1.set_to_new_unknown_value (model1.get_lvalue (x, &ctxt),
7894 integer_type_node, &ctxt);
7895 ASSERT_EQ (model0, model1);
7896
7897 /* Check that get_value_by_name works for globals. */
7898 ASSERT_EQ (model0.get_value_by_name ("x"), sid_x);
7899
7900 /* They should be mergeable, and the result should be the same. */
7901 region_model merged;
7902 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
7903 ASSERT_EQ (model0, merged);
7904 /* In particular, there should be an unknown value for "x". */
7905 svalue *merged_x_sval = merged.get_svalue (merged.get_rvalue (x, &ctxt));
7906 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7907 }
7908
7909 /* Use global-handling to verify various combinations of values. */
7910
7911 /* Two equal constant values. */
7912 {
7913 region_model merged;
7914 svalue *merged_x_sval;
7915 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
7916
7917 /* In particular, there should be a constant value for "x". */
7918 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
7919 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
7920 int_42);
7921 }
7922
7923 /* Two non-equal constant values. */
7924 {
7925 region_model merged;
7926 svalue *merged_x_sval;
7927 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
7928
7929 /* In particular, there should be an unknown value for "x". */
7930 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7931 }
7932
7933 /* Uninit and constant. */
7934 {
7935 region_model merged;
7936 svalue *merged_x_sval;
7937 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
7938
7939 /* In particular, there should be an unknown value for "x". */
7940 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7941 }
7942
7943 /* Constant and uninit. */
7944 {
7945 region_model merged;
7946 svalue *merged_x_sval;
7947 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
7948
7949 /* In particular, there should be an unknown value for "x". */
7950 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7951 }
7952
7953 /* Unknown and constant. */
7954 // TODO
7955
7956 /* Pointers: NULL and NULL. */
7957 // TODO
7958
7959 /* Pointers: NULL and non-NULL. */
7960 // TODO
7961
7962 /* Pointers: non-NULL and non-NULL: ptr to a local. */
7963 {
7964 region_model model0;
7965 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7966 model0.set_to_new_unknown_value (model0.get_lvalue (a, NULL),
7967 integer_type_node, NULL);
7968 model0.set_value (model0.get_lvalue (p, NULL),
7969 model0.get_rvalue (addr_of_a, NULL), NULL);
7970
7971 region_model model1 (model0);
7972 ASSERT_EQ (model0, model1);
7973
7974 /* They should be mergeable, and the result should be the same. */
7975 region_model merged;
7976 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
7977 ASSERT_EQ (model0, merged);
7978 }
7979
7980 /* Pointers: non-NULL and non-NULL: ptr to a global. */
7981 {
7982 region_model merged;
7983 /* p == &y in both input models. */
7984 svalue *merged_p_sval;
7985 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
7986 &merged_p_sval);
7987
7988 /* We should get p == &y in the merged model. */
7989 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
7990 region_svalue *merged_p_ptr = merged_p_sval->dyn_cast_region_svalue ();
7991 region_id merged_p_star_rid = merged_p_ptr->get_pointee ();
7992 ASSERT_EQ (merged_p_star_rid, merged.get_lvalue (y, NULL));
7993 }
7994
7995 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
7996 {
7997 region_model merged;
7998 /* x == &y vs x == &z in the input models. */
7999 svalue *merged_x_sval;
8000 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
8001 &merged_x_sval);
8002
8003 /* We should get x == unknown in the merged model. */
8004 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
8005 }
8006
8007 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
8008 {
8009 test_region_model_context ctxt;
8010 region_model model0;
8011 region_id new_rid = model0.add_new_malloc_region ();
8012 svalue_id ptr_sid
8013 = model0.get_or_create_ptr_svalue (ptr_type_node, new_rid);
8014 model0.set_value (model0.get_lvalue (p, &ctxt),
8015 ptr_sid, &ctxt);
8016 model0.canonicalize (&ctxt);
8017
8018 region_model model1 (model0);
8019
8020 ASSERT_EQ (model0, model1);
8021
8022 region_model merged;
8023 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
8024
8025 merged.canonicalize (&ctxt);
8026
8027 /* The merged model ought to be identical (after canonicalization,
8028 at least). */
8029 ASSERT_EQ (model0, merged);
8030 }
8031
8032 /* Two regions sharing the same unknown svalue should continue sharing
8033 an unknown svalue after self-merger. */
8034 {
8035 test_region_model_context ctxt;
8036 region_model model0;
8037 svalue_id sid
8038 = model0.set_to_new_unknown_value (model0.get_lvalue (x, &ctxt),
8039 integer_type_node, &ctxt);
8040 model0.set_value (model0.get_lvalue (y, &ctxt), sid, &ctxt);
8041 region_model model1 (model0);
8042
8043 /* They should be mergeable, and the result should be the same. */
8044 region_model merged;
8045 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
8046 ASSERT_EQ (model0, merged);
8047
8048 /* In particular, we should have x == y. */
8049 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
8050 tristate (tristate::TS_TRUE));
8051 }
8052
8053 #if 0
8054 {
8055 region_model model0;
8056 region_model model1;
8057 test_region_model_context ctxt;
8058 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
8059 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
8060 ASSERT_TRUE (model0.can_merge_with_p (model1));
8061 }
8062
8063 {
8064 region_model model0;
8065 region_model model1;
8066 test_region_model_context ctxt;
8067 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
8068 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
8069 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
8070 ASSERT_TRUE (model0.can_merge_with_p (model1));
8071 }
8072 #endif
8073
8074 // TODO: what can't we merge? need at least one such test
8075
8076 /* TODO: various things
8077 - heap regions
8078 - value merging:
8079 - every combination, but in particular
8080 - pairs of regions
8081 */
8082
8083 /* Views. */
8084 {
8085 test_region_model_context ctxt;
8086 region_model model0;
8087
8088 region_id x_rid = model0.get_lvalue (x, &ctxt);
8089 region_id x_as_ptr = model0.get_or_create_view (x_rid, ptr_type_node);
8090 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
8091
8092 region_model model1 (model0);
8093 ASSERT_EQ (model1, model0);
8094
8095 /* They should be mergeable, and the result should be the same. */
8096 region_model merged;
8097 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
8098 }
8099
8100 /* Verify that we can merge a model in which a local in an older stack
8101 frame points to a local in a more recent stack frame. */
8102 {
8103 region_model model0;
8104 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
8105 region_id q_in_first_frame = model0.get_lvalue (q, NULL);
8106
8107 /* Push a second frame. */
8108 region_id rid_2nd_frame
8109 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
8110
8111 /* Have a pointer in the older frame point to a local in the
8112 more recent frame. */
8113 svalue_id sid_ptr = model0.get_rvalue (addr_of_a, NULL);
8114 model0.set_value (q_in_first_frame, sid_ptr, NULL);
8115
8116 /* Verify that it's pointing at the newer frame. */
8117 region_id rid_pointee
8118 = model0.get_svalue (sid_ptr)->dyn_cast_region_svalue ()->get_pointee ();
8119 ASSERT_EQ (model0.get_region (rid_pointee)->get_parent (), rid_2nd_frame);
8120
8121 model0.canonicalize (NULL);
8122
8123 region_model model1 (model0);
8124 ASSERT_EQ (model0, model1);
8125
8126 /* They should be mergeable, and the result should be the same
8127 (after canonicalization, at least). */
8128 region_model merged;
8129 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
8130 merged.canonicalize (NULL);
8131 ASSERT_EQ (model0, merged);
8132 }
8133
8134 /* Verify that we can merge a model in which a local points to a global. */
8135 {
8136 region_model model0;
8137 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
8138 model0.set_value (model0.get_lvalue (q, NULL),
8139 model0.get_rvalue (addr_of_y, NULL), NULL);
8140
8141 model0.canonicalize (NULL);
8142
8143 region_model model1 (model0);
8144 ASSERT_EQ (model0, model1);
8145
8146 /* They should be mergeable, and the result should be the same
8147 (after canonicalization, at least). */
8148 region_model merged;
8149 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
8150 merged.canonicalize (NULL);
8151 ASSERT_EQ (model0, merged);
8152 }
8153 }
8154
8155 /* Verify that constraints are correctly merged when merging region_model
8156 instances. */
8157
8158 static void
8159 test_constraint_merging ()
8160 {
8161 tree int_0 = build_int_cst (integer_type_node, 0);
8162 tree int_5 = build_int_cst (integer_type_node, 5);
8163 tree x = build_global_decl ("x", integer_type_node);
8164 tree y = build_global_decl ("y", integer_type_node);
8165 tree z = build_global_decl ("z", integer_type_node);
8166 tree n = build_global_decl ("n", integer_type_node);
8167
8168 test_region_model_context ctxt;
8169
8170 /* model0: 0 <= (x == y) < n. */
8171 region_model model0;
8172 model0.set_to_new_unknown_value (model0.get_lvalue (x, &ctxt),
8173 integer_type_node, &ctxt);
8174 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
8175 model0.add_constraint (x, GE_EXPR, int_0, NULL);
8176 model0.add_constraint (x, LT_EXPR, n, NULL);
8177
8178 /* model1: z != 5 && (0 <= x < n). */
8179 region_model model1;
8180 model1.set_to_new_unknown_value (model1.get_lvalue (x, &ctxt),
8181 integer_type_node, &ctxt);
8182 model1.add_constraint (z, NE_EXPR, int_5, NULL);
8183 model1.add_constraint (x, GE_EXPR, int_0, NULL);
8184 model1.add_constraint (x, LT_EXPR, n, NULL);
8185
8186 /* They should be mergeable; the merged constraints should
8187 be: (0 <= x < n). */
8188 region_model merged;
8189 ASSERT_TRUE (model0.can_merge_with_p (model1, &merged));
8190
8191 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
8192 tristate (tristate::TS_TRUE));
8193 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
8194 tristate (tristate::TS_TRUE));
8195
8196 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
8197 tristate (tristate::TS_UNKNOWN));
8198 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
8199 tristate (tristate::TS_UNKNOWN));
8200 }
8201
8202 /* Run all of the selftests within this file. */
8203
8204 void
8205 analyzer_region_model_cc_tests ()
8206 {
8207 test_tree_cmp_on_constants ();
8208 test_dump ();
8209 test_unique_constants ();
8210 test_svalue_equality ();
8211 test_region_equality ();
8212 test_purging_by_criteria ();
8213 test_purge_unused_svalues ();
8214 test_assignment ();
8215 test_stack_frames ();
8216 test_get_representative_path_var ();
8217 test_canonicalization_1 ();
8218 test_canonicalization_2 ();
8219 test_canonicalization_3 ();
8220 test_canonicalization_4 ();
8221 test_state_merging ();
8222 test_constraint_merging ();
8223 }
8224
8225 } // namespace selftest
8226
8227 #endif /* CHECKING_P */
8228
8229 } // namespace ana
8230
8231 #endif /* #if ENABLE_ANALYZER */