]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/analyzer/region-model.cc
analyzer: add regression test [PR103685]
[thirdparty/gcc.git] / gcc / analyzer / region-model.cc
CommitLineData
757bf1df 1/* Classes for modeling the state of memory.
7adcbafe 2 Copyright (C) 2019-2022 Free Software Foundation, Inc.
757bf1df
DM
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tree.h"
25#include "function.h"
26#include "basic-block.h"
27#include "gimple.h"
28#include "gimple-iterator.h"
7892ff37 29#include "diagnostic-core.h"
757bf1df
DM
30#include "graphviz.h"
31#include "options.h"
32#include "cgraph.h"
33#include "tree-dfa.h"
34#include "stringpool.h"
35#include "convert.h"
36#include "target.h"
37#include "fold-const.h"
38#include "tree-pretty-print.h"
39#include "diagnostic-color.h"
40#include "diagnostic-metadata.h"
757bf1df 41#include "tristate.h"
ef7827b0 42#include "bitmap.h"
757bf1df
DM
43#include "selftest.h"
44#include "function.h"
809192e7 45#include "json.h"
757bf1df
DM
46#include "analyzer/analyzer.h"
47#include "analyzer/analyzer-logging.h"
48#include "ordered-hash-map.h"
49#include "options.h"
50#include "cgraph.h"
51#include "cfg.h"
52#include "digraph.h"
53#include "analyzer/supergraph.h"
54#include "sbitmap.h"
808f4dfe
DM
55#include "analyzer/call-string.h"
56#include "analyzer/program-point.h"
57#include "analyzer/store.h"
757bf1df
DM
58#include "analyzer/region-model.h"
59#include "analyzer/constraint-manager.h"
60#include "diagnostic-event-id.h"
61#include "analyzer/sm.h"
62#include "diagnostic-event-id.h"
63#include "analyzer/sm.h"
64#include "analyzer/pending-diagnostic.h"
808f4dfe 65#include "analyzer/region-model-reachability.h"
757bf1df 66#include "analyzer/analyzer-selftests.h"
f573d351 67#include "analyzer/program-state.h"
884d9141 68#include "stor-layout.h"
c7e276b8 69#include "attribs.h"
9a2c9579 70#include "tree-object-size.h"
757bf1df
DM
71
72#if ENABLE_ANALYZER
73
75038aa6
DM
74namespace ana {
75
757bf1df
DM
76/* Dump T to PP in language-independent form, for debugging/logging/dumping
77 purposes. */
78
757bf1df 79void
808f4dfe 80dump_tree (pretty_printer *pp, tree t)
757bf1df 81{
808f4dfe 82 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
757bf1df
DM
83}
84
808f4dfe
DM
85/* Dump T to PP in language-independent form in quotes, for
86 debugging/logging/dumping purposes. */
757bf1df
DM
87
88void
808f4dfe 89dump_quoted_tree (pretty_printer *pp, tree t)
757bf1df 90{
808f4dfe
DM
91 pp_begin_quote (pp, pp_show_color (pp));
92 dump_tree (pp, t);
93 pp_end_quote (pp, pp_show_color (pp));
757bf1df
DM
94}
95
808f4dfe
DM
96/* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
97 calls within other pp_printf calls.
757bf1df 98
808f4dfe
DM
99 default_tree_printer handles 'T' and some other codes by calling
100 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
101 dump_generic_node calls pp_printf in various places, leading to
102 garbled output.
757bf1df 103
808f4dfe
DM
104 Ideally pp_printf could be made to be reentrant, but in the meantime
105 this function provides a workaround. */
6969ac30
DM
106
107void
808f4dfe 108print_quoted_type (pretty_printer *pp, tree t)
6969ac30 109{
808f4dfe
DM
110 pp_begin_quote (pp, pp_show_color (pp));
111 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
112 pp_end_quote (pp, pp_show_color (pp));
6969ac30
DM
113}
114
d726a57b
DM
115/* class region_to_value_map. */
116
117/* Assignment operator for region_to_value_map. */
118
119region_to_value_map &
120region_to_value_map::operator= (const region_to_value_map &other)
121{
122 m_hash_map.empty ();
123 for (auto iter : other.m_hash_map)
124 {
125 const region *reg = iter.first;
126 const svalue *sval = iter.second;
127 m_hash_map.put (reg, sval);
128 }
129 return *this;
130}
131
132/* Equality operator for region_to_value_map. */
133
134bool
135region_to_value_map::operator== (const region_to_value_map &other) const
136{
137 if (m_hash_map.elements () != other.m_hash_map.elements ())
138 return false;
139
140 for (auto iter : *this)
141 {
142 const region *reg = iter.first;
143 const svalue *sval = iter.second;
144 const svalue * const *other_slot = other.get (reg);
145 if (other_slot == NULL)
146 return false;
147 if (sval != *other_slot)
148 return false;
149 }
150
151 return true;
152}
153
154/* Dump this object to PP. */
155
156void
157region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
158 bool multiline) const
159{
160 auto_vec<const region *> regs;
161 for (iterator iter = begin (); iter != end (); ++iter)
162 regs.safe_push ((*iter).first);
163 regs.qsort (region::cmp_ptr_ptr);
164 if (multiline)
165 pp_newline (pp);
166 else
167 pp_string (pp, " {");
168 unsigned i;
169 const region *reg;
170 FOR_EACH_VEC_ELT (regs, i, reg)
171 {
172 if (multiline)
173 pp_string (pp, " ");
174 else if (i > 0)
175 pp_string (pp, ", ");
176 reg->dump_to_pp (pp, simple);
177 pp_string (pp, ": ");
178 const svalue *sval = *get (reg);
179 sval->dump_to_pp (pp, true);
180 if (multiline)
181 pp_newline (pp);
182 }
183 if (!multiline)
184 pp_string (pp, "}");
185}
186
187/* Dump this object to stderr. */
188
189DEBUG_FUNCTION void
190region_to_value_map::dump (bool simple) const
191{
192 pretty_printer pp;
193 pp_format_decoder (&pp) = default_tree_printer;
194 pp_show_color (&pp) = pp_show_color (global_dc->printer);
195 pp.buffer->stream = stderr;
196 dump_to_pp (&pp, simple, true);
197 pp_newline (&pp);
198 pp_flush (&pp);
199}
200
201
202/* Attempt to merge THIS with OTHER, writing the result
203 to OUT.
204
205 For now, write (region, value) mappings that are in common between THIS
206 and OTHER to OUT, effectively taking the intersection, rather than
207 rejecting differences. */
208
209bool
210region_to_value_map::can_merge_with_p (const region_to_value_map &other,
211 region_to_value_map *out) const
212{
213 for (auto iter : *this)
214 {
215 const region *iter_reg = iter.first;
216 const svalue *iter_sval = iter.second;
217 const svalue * const * other_slot = other.get (iter_reg);
218 if (other_slot)
219 if (iter_sval == *other_slot)
220 out->put (iter_reg, iter_sval);
221 }
222 return true;
223}
224
33255ad3
DM
225/* Purge any state involving SVAL. */
226
227void
228region_to_value_map::purge_state_involving (const svalue *sval)
229{
230 auto_vec<const region *> to_purge;
231 for (auto iter : *this)
232 {
233 const region *iter_reg = iter.first;
234 const svalue *iter_sval = iter.second;
235 if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
236 to_purge.safe_push (iter_reg);
237 }
238 for (auto iter : to_purge)
239 m_hash_map.remove (iter);
240}
241
757bf1df
DM
242/* class region_model. */
243
808f4dfe 244/* Ctor for region_model: construct an "empty" model. */
757bf1df 245
808f4dfe 246region_model::region_model (region_model_manager *mgr)
9a2c9579
DM
247: m_mgr (mgr), m_store (), m_current_frame (NULL),
248 m_dynamic_extents ()
757bf1df 249{
808f4dfe 250 m_constraints = new constraint_manager (mgr);
757bf1df
DM
251}
252
253/* region_model's copy ctor. */
254
255region_model::region_model (const region_model &other)
808f4dfe
DM
256: m_mgr (other.m_mgr), m_store (other.m_store),
257 m_constraints (new constraint_manager (*other.m_constraints)),
9a2c9579
DM
258 m_current_frame (other.m_current_frame),
259 m_dynamic_extents (other.m_dynamic_extents)
757bf1df 260{
757bf1df
DM
261}
262
263/* region_model's dtor. */
264
265region_model::~region_model ()
266{
267 delete m_constraints;
268}
269
270/* region_model's assignment operator. */
271
272region_model &
273region_model::operator= (const region_model &other)
274{
808f4dfe
DM
275 /* m_mgr is const. */
276 gcc_assert (m_mgr == other.m_mgr);
757bf1df 277
808f4dfe 278 m_store = other.m_store;
757bf1df
DM
279
280 delete m_constraints;
808f4dfe 281 m_constraints = new constraint_manager (*other.m_constraints);
757bf1df 282
808f4dfe 283 m_current_frame = other.m_current_frame;
757bf1df 284
9a2c9579
DM
285 m_dynamic_extents = other.m_dynamic_extents;
286
757bf1df
DM
287 return *this;
288}
289
290/* Equality operator for region_model.
291
808f4dfe
DM
292 Amongst other things this directly compares the stores and the constraint
293 managers, so for this to be meaningful both this and OTHER should
757bf1df
DM
294 have been canonicalized. */
295
296bool
297region_model::operator== (const region_model &other) const
298{
808f4dfe
DM
299 /* We can only compare instances that use the same manager. */
300 gcc_assert (m_mgr == other.m_mgr);
757bf1df 301
808f4dfe 302 if (m_store != other.m_store)
757bf1df
DM
303 return false;
304
305 if (*m_constraints != *other.m_constraints)
306 return false;
307
808f4dfe
DM
308 if (m_current_frame != other.m_current_frame)
309 return false;
757bf1df 310
9a2c9579
DM
311 if (m_dynamic_extents != other.m_dynamic_extents)
312 return false;
313
757bf1df
DM
314 gcc_checking_assert (hash () == other.hash ());
315
316 return true;
317}
318
319/* Generate a hash value for this region_model. */
320
321hashval_t
808f4dfe
DM
322region_model::hash () const
323{
324 hashval_t result = m_store.hash ();
325 result ^= m_constraints->hash ();
326 return result;
757bf1df
DM
327}
328
808f4dfe
DM
329/* Dump a representation of this model to PP, showing the
330 stack, the store, and any constraints.
331 Use SIMPLE to control how svalues and regions are printed. */
757bf1df
DM
332
333void
808f4dfe
DM
334region_model::dump_to_pp (pretty_printer *pp, bool simple,
335 bool multiline) const
757bf1df 336{
808f4dfe
DM
337 /* Dump stack. */
338 pp_printf (pp, "stack depth: %i", get_stack_depth ());
339 if (multiline)
340 pp_newline (pp);
341 else
342 pp_string (pp, " {");
343 for (const frame_region *iter_frame = m_current_frame; iter_frame;
344 iter_frame = iter_frame->get_calling_frame ())
345 {
346 if (multiline)
347 pp_string (pp, " ");
348 else if (iter_frame != m_current_frame)
349 pp_string (pp, ", ");
350 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
351 iter_frame->dump_to_pp (pp, simple);
352 if (multiline)
353 pp_newline (pp);
354 }
355 if (!multiline)
356 pp_string (pp, "}");
357
358 /* Dump store. */
359 if (!multiline)
360 pp_string (pp, ", {");
361 m_store.dump_to_pp (pp, simple, multiline,
362 m_mgr->get_store_manager ());
363 if (!multiline)
364 pp_string (pp, "}");
365
366 /* Dump constraints. */
367 pp_string (pp, "constraint_manager:");
368 if (multiline)
369 pp_newline (pp);
370 else
371 pp_string (pp, " {");
372 m_constraints->dump_to_pp (pp, multiline);
373 if (!multiline)
374 pp_string (pp, "}");
9a2c9579
DM
375
376 /* Dump sizes of dynamic regions, if any are known. */
377 if (!m_dynamic_extents.is_empty ())
378 {
379 pp_string (pp, "dynamic_extents:");
380 m_dynamic_extents.dump_to_pp (pp, simple, multiline);
381 }
808f4dfe 382}
757bf1df 383
808f4dfe 384/* Dump a representation of this model to FILE. */
757bf1df 385
808f4dfe
DM
386void
387region_model::dump (FILE *fp, bool simple, bool multiline) const
388{
389 pretty_printer pp;
390 pp_format_decoder (&pp) = default_tree_printer;
391 pp_show_color (&pp) = pp_show_color (global_dc->printer);
392 pp.buffer->stream = fp;
393 dump_to_pp (&pp, simple, multiline);
394 pp_newline (&pp);
395 pp_flush (&pp);
757bf1df
DM
396}
397
808f4dfe 398/* Dump a multiline representation of this model to stderr. */
757bf1df 399
808f4dfe
DM
400DEBUG_FUNCTION void
401region_model::dump (bool simple) const
402{
403 dump (stderr, simple, true);
404}
757bf1df 405
808f4dfe 406/* Dump a multiline representation of this model to stderr. */
757bf1df 407
808f4dfe
DM
408DEBUG_FUNCTION void
409region_model::debug () const
757bf1df 410{
808f4dfe 411 dump (true);
757bf1df
DM
412}
413
e61ffa20
DM
414/* Assert that this object is valid. */
415
416void
417region_model::validate () const
418{
419 m_store.validate ();
420}
421
808f4dfe
DM
422/* Canonicalize the store and constraints, to maximize the chance of
423 equality between region_model instances. */
757bf1df
DM
424
425void
808f4dfe 426region_model::canonicalize ()
757bf1df 427{
808f4dfe
DM
428 m_store.canonicalize (m_mgr->get_store_manager ());
429 m_constraints->canonicalize ();
757bf1df
DM
430}
431
432/* Return true if this region_model is in canonical form. */
433
434bool
435region_model::canonicalized_p () const
436{
437 region_model copy (*this);
808f4dfe 438 copy.canonicalize ();
757bf1df
DM
439 return *this == copy;
440}
441
808f4dfe
DM
442/* See the comment for store::loop_replay_fixup. */
443
444void
445region_model::loop_replay_fixup (const region_model *dst_state)
446{
447 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
448}
449
757bf1df
DM
450/* A subclass of pending_diagnostic for complaining about uses of
451 poisoned values. */
452
453class poisoned_value_diagnostic
454: public pending_diagnostic_subclass<poisoned_value_diagnostic>
455{
456public:
457 poisoned_value_diagnostic (tree expr, enum poison_kind pkind)
458 : m_expr (expr), m_pkind (pkind)
459 {}
460
461 const char *get_kind () const FINAL OVERRIDE { return "poisoned_value_diagnostic"; }
462
33255ad3
DM
463 bool use_of_uninit_p () const FINAL OVERRIDE
464 {
465 return m_pkind == POISON_KIND_UNINIT;
466 }
467
757bf1df
DM
468 bool operator== (const poisoned_value_diagnostic &other) const
469 {
470 return m_expr == other.m_expr;
471 }
472
473 bool emit (rich_location *rich_loc) FINAL OVERRIDE
474 {
475 switch (m_pkind)
476 {
477 default:
478 gcc_unreachable ();
33255ad3
DM
479 case POISON_KIND_UNINIT:
480 {
481 diagnostic_metadata m;
482 m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
483 return warning_meta (rich_loc, m,
484 OPT_Wanalyzer_use_of_uninitialized_value,
485 "use of uninitialized value %qE",
486 m_expr);
487 }
488 break;
757bf1df
DM
489 case POISON_KIND_FREED:
490 {
491 diagnostic_metadata m;
492 m.add_cwe (416); /* "CWE-416: Use After Free". */
6c8e5844
DM
493 return warning_meta (rich_loc, m,
494 OPT_Wanalyzer_use_after_free,
495 "use after %<free%> of %qE",
496 m_expr);
757bf1df
DM
497 }
498 break;
499 case POISON_KIND_POPPED_STACK:
500 {
757bf1df 501 /* TODO: which CWE? */
808f4dfe
DM
502 return warning_at
503 (rich_loc,
504 OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame,
505 "dereferencing pointer %qE to within stale stack frame",
506 m_expr);
757bf1df
DM
507 }
508 break;
509 }
510 }
511
512 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
513 {
514 switch (m_pkind)
515 {
516 default:
517 gcc_unreachable ();
33255ad3
DM
518 case POISON_KIND_UNINIT:
519 return ev.formatted_print ("use of uninitialized value %qE here",
520 m_expr);
757bf1df
DM
521 case POISON_KIND_FREED:
522 return ev.formatted_print ("use after %<free%> of %qE here",
523 m_expr);
524 case POISON_KIND_POPPED_STACK:
525 return ev.formatted_print
808f4dfe 526 ("dereferencing pointer %qE to within stale stack frame",
757bf1df
DM
527 m_expr);
528 }
529 }
530
531private:
532 tree m_expr;
533 enum poison_kind m_pkind;
534};
535
5e00ad3f
DM
536/* A subclass of pending_diagnostic for complaining about shifts
537 by negative counts. */
538
539class shift_count_negative_diagnostic
540: public pending_diagnostic_subclass<shift_count_negative_diagnostic>
541{
542public:
543 shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
544 : m_assign (assign), m_count_cst (count_cst)
545 {}
546
547 const char *get_kind () const FINAL OVERRIDE
548 {
549 return "shift_count_negative_diagnostic";
550 }
551
552 bool operator== (const shift_count_negative_diagnostic &other) const
553 {
554 return (m_assign == other.m_assign
555 && same_tree_p (m_count_cst, other.m_count_cst));
556 }
557
558 bool emit (rich_location *rich_loc) FINAL OVERRIDE
559 {
560 return warning_at (rich_loc, OPT_Wanalyzer_shift_count_negative,
561 "shift by negative count (%qE)", m_count_cst);
562 }
563
564 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
565 {
566 return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst);
567 }
568
569private:
570 const gassign *m_assign;
571 tree m_count_cst;
572};
573
574/* A subclass of pending_diagnostic for complaining about shifts
575 by counts >= the width of the operand type. */
576
577class shift_count_overflow_diagnostic
578: public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
579{
580public:
581 shift_count_overflow_diagnostic (const gassign *assign,
582 int operand_precision,
583 tree count_cst)
584 : m_assign (assign), m_operand_precision (operand_precision),
585 m_count_cst (count_cst)
586 {}
587
588 const char *get_kind () const FINAL OVERRIDE
589 {
590 return "shift_count_overflow_diagnostic";
591 }
592
593 bool operator== (const shift_count_overflow_diagnostic &other) const
594 {
595 return (m_assign == other.m_assign
596 && m_operand_precision == other.m_operand_precision
597 && same_tree_p (m_count_cst, other.m_count_cst));
598 }
599
600 bool emit (rich_location *rich_loc) FINAL OVERRIDE
601 {
602 return warning_at (rich_loc, OPT_Wanalyzer_shift_count_overflow,
603 "shift by count (%qE) >= precision of type (%qi)",
604 m_count_cst, m_operand_precision);
605 }
606
607 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
608 {
609 return ev.formatted_print ("shift by count %qE here", m_count_cst);
610 }
611
612private:
613 const gassign *m_assign;
614 int m_operand_precision;
615 tree m_count_cst;
616};
617
808f4dfe
DM
618/* If ASSIGN is a stmt that can be modelled via
619 set_value (lhs_reg, SVALUE, CTXT)
620 for some SVALUE, get the SVALUE.
621 Otherwise return NULL. */
757bf1df 622
808f4dfe
DM
623const svalue *
624region_model::get_gassign_result (const gassign *assign,
625 region_model_context *ctxt)
757bf1df
DM
626{
627 tree lhs = gimple_assign_lhs (assign);
628 tree rhs1 = gimple_assign_rhs1 (assign);
757bf1df
DM
629 enum tree_code op = gimple_assign_rhs_code (assign);
630 switch (op)
631 {
632 default:
808f4dfe 633 return NULL;
757bf1df
DM
634
635 case POINTER_PLUS_EXPR:
636 {
637 /* e.g. "_1 = a_10(D) + 12;" */
638 tree ptr = rhs1;
639 tree offset = gimple_assign_rhs2 (assign);
640
808f4dfe
DM
641 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
642 const svalue *offset_sval = get_rvalue (offset, ctxt);
643 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
644 is an integer of type sizetype". */
645 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
646
647 const svalue *sval_binop
648 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
649 ptr_sval, offset_sval);
650 return sval_binop;
757bf1df
DM
651 }
652 break;
653
654 case POINTER_DIFF_EXPR:
655 {
656 /* e.g. "_1 = p_2(D) - q_3(D);". */
808f4dfe
DM
657 tree rhs2 = gimple_assign_rhs2 (assign);
658 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
659 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 660
808f4dfe 661 // TODO: perhaps fold to zero if they're known to be equal?
757bf1df 662
808f4dfe
DM
663 const svalue *sval_binop
664 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
665 rhs1_sval, rhs2_sval);
666 return sval_binop;
757bf1df
DM
667 }
668 break;
669
808f4dfe
DM
670 /* Assignments of the form
671 set_value (lvalue (LHS), rvalue (EXPR))
672 for various EXPR.
673 We already have the lvalue for the LHS above, as "lhs_reg". */
674 case ADDR_EXPR: /* LHS = &RHS; */
675 case BIT_FIELD_REF:
676 case COMPONENT_REF: /* LHS = op0.op1; */
757bf1df 677 case MEM_REF:
757bf1df 678 case REAL_CST:
808f4dfe
DM
679 case COMPLEX_CST:
680 case VECTOR_CST:
757bf1df
DM
681 case INTEGER_CST:
682 case ARRAY_REF:
808f4dfe
DM
683 case SSA_NAME: /* LHS = VAR; */
684 case VAR_DECL: /* LHS = VAR; */
685 case PARM_DECL:/* LHS = VAR; */
686 case REALPART_EXPR:
687 case IMAGPART_EXPR:
688 return get_rvalue (rhs1, ctxt);
689
690 case ABS_EXPR:
691 case ABSU_EXPR:
692 case CONJ_EXPR:
693 case BIT_NOT_EXPR:
757bf1df
DM
694 case FIX_TRUNC_EXPR:
695 case FLOAT_EXPR:
808f4dfe 696 case NEGATE_EXPR:
757bf1df 697 case NOP_EXPR:
808f4dfe 698 case VIEW_CONVERT_EXPR:
757bf1df 699 {
808f4dfe
DM
700 /* Unary ops. */
701 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
702 const svalue *sval_unaryop
703 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
704 return sval_unaryop;
757bf1df 705 }
757bf1df
DM
706
707 case EQ_EXPR:
708 case GE_EXPR:
709 case LE_EXPR:
710 case NE_EXPR:
711 case GT_EXPR:
712 case LT_EXPR:
808f4dfe
DM
713 case UNORDERED_EXPR:
714 case ORDERED_EXPR:
757bf1df
DM
715 {
716 tree rhs2 = gimple_assign_rhs2 (assign);
717
808f4dfe
DM
718 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
719 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 720
2f5951bd 721 if (TREE_TYPE (lhs) == boolean_type_node)
808f4dfe 722 {
2f5951bd
DM
723 /* Consider constraints between svalues. */
724 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
725 if (t.is_known ())
726 return m_mgr->get_or_create_constant_svalue
727 (t.is_true () ? boolean_true_node : boolean_false_node);
808f4dfe 728 }
2f5951bd
DM
729
730 /* Otherwise, generate a symbolic binary op. */
731 const svalue *sval_binop
732 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
733 rhs1_sval, rhs2_sval);
734 return sval_binop;
757bf1df
DM
735 }
736 break;
737
738 case PLUS_EXPR:
739 case MINUS_EXPR:
740 case MULT_EXPR:
808f4dfe 741 case MULT_HIGHPART_EXPR:
757bf1df 742 case TRUNC_DIV_EXPR:
808f4dfe
DM
743 case CEIL_DIV_EXPR:
744 case FLOOR_DIV_EXPR:
745 case ROUND_DIV_EXPR:
757bf1df 746 case TRUNC_MOD_EXPR:
808f4dfe
DM
747 case CEIL_MOD_EXPR:
748 case FLOOR_MOD_EXPR:
749 case ROUND_MOD_EXPR:
750 case RDIV_EXPR:
751 case EXACT_DIV_EXPR:
757bf1df
DM
752 case LSHIFT_EXPR:
753 case RSHIFT_EXPR:
808f4dfe
DM
754 case LROTATE_EXPR:
755 case RROTATE_EXPR:
757bf1df
DM
756 case BIT_IOR_EXPR:
757 case BIT_XOR_EXPR:
758 case BIT_AND_EXPR:
759 case MIN_EXPR:
760 case MAX_EXPR:
808f4dfe 761 case COMPLEX_EXPR:
757bf1df
DM
762 {
763 /* Binary ops. */
764 tree rhs2 = gimple_assign_rhs2 (assign);
765
808f4dfe
DM
766 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
767 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 768
5e00ad3f
DM
769 if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
770 {
771 /* "INT34-C. Do not shift an expression by a negative number of bits
772 or by greater than or equal to the number of bits that exist in
773 the operand." */
774 if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
775 if (TREE_CODE (rhs2_cst) == INTEGER_CST)
776 {
777 if (tree_int_cst_sgn (rhs2_cst) < 0)
778 ctxt->warn (new shift_count_negative_diagnostic
779 (assign, rhs2_cst));
780 else if (compare_tree_int (rhs2_cst,
781 TYPE_PRECISION (TREE_TYPE (rhs1)))
782 >= 0)
783 ctxt->warn (new shift_count_overflow_diagnostic
784 (assign, TYPE_PRECISION (TREE_TYPE (rhs1)),
785 rhs2_cst));
786 }
787 }
788
808f4dfe
DM
789 const svalue *sval_binop
790 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
791 rhs1_sval, rhs2_sval);
792 return sval_binop;
793 }
794
795 /* Vector expressions. In theory we could implement these elementwise,
796 but for now, simply return unknown values. */
797 case VEC_DUPLICATE_EXPR:
798 case VEC_SERIES_EXPR:
799 case VEC_COND_EXPR:
800 case VEC_PERM_EXPR:
1b0be822
DM
801 case VEC_WIDEN_MULT_HI_EXPR:
802 case VEC_WIDEN_MULT_LO_EXPR:
803 case VEC_WIDEN_MULT_EVEN_EXPR:
804 case VEC_WIDEN_MULT_ODD_EXPR:
805 case VEC_UNPACK_HI_EXPR:
806 case VEC_UNPACK_LO_EXPR:
807 case VEC_UNPACK_FLOAT_HI_EXPR:
808 case VEC_UNPACK_FLOAT_LO_EXPR:
809 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
810 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
811 case VEC_PACK_TRUNC_EXPR:
812 case VEC_PACK_SAT_EXPR:
813 case VEC_PACK_FIX_TRUNC_EXPR:
814 case VEC_PACK_FLOAT_EXPR:
815 case VEC_WIDEN_LSHIFT_HI_EXPR:
816 case VEC_WIDEN_LSHIFT_LO_EXPR:
808f4dfe
DM
817 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
818 }
819}
820
33255ad3
DM
821/* Check for SVAL being poisoned, adding a warning to CTXT.
822 Return SVAL, or, if a warning is added, another value, to avoid
823 repeatedly complaining about the same poisoned value in followup code. */
824
825const svalue *
826region_model::check_for_poison (const svalue *sval,
827 tree expr,
828 region_model_context *ctxt) const
829{
830 if (!ctxt)
831 return sval;
832
833 if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
834 {
835 /* If we have an SSA name for a temporary, we don't want to print
836 '<unknown>'.
837 Poisoned values are shared by type, and so we can't reconstruct
838 the tree other than via the def stmts, using
839 fixup_tree_for_diagnostic. */
840 tree diag_arg = fixup_tree_for_diagnostic (expr);
841 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
842 if (ctxt->warn (new poisoned_value_diagnostic (diag_arg, pkind)))
843 {
844 /* We only want to report use of a poisoned value at the first
845 place it gets used; return an unknown value to avoid generating
846 a chain of followup warnings. */
847 sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
848 }
849
850 return sval;
851 }
852
853 return sval;
854}
855
808f4dfe
DM
856/* Update this model for the ASSIGN stmt, using CTXT to report any
857 diagnostics. */
858
859void
860region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
861{
862 tree lhs = gimple_assign_lhs (assign);
863 tree rhs1 = gimple_assign_rhs1 (assign);
864
865 const region *lhs_reg = get_lvalue (lhs, ctxt);
866
867 /* Most assignments are handled by:
868 set_value (lhs_reg, SVALUE, CTXT)
869 for some SVALUE. */
870 if (const svalue *sval = get_gassign_result (assign, ctxt))
871 {
33255ad3
DM
872 tree expr = get_diagnostic_tree_for_gassign (assign);
873 check_for_poison (sval, expr, ctxt);
808f4dfe
DM
874 set_value (lhs_reg, sval, ctxt);
875 return;
876 }
877
878 enum tree_code op = gimple_assign_rhs_code (assign);
879 switch (op)
880 {
881 default:
882 {
1b0be822 883 if (0)
808f4dfe
DM
884 sorry_at (assign->location, "unhandled assignment op: %qs",
885 get_tree_code_name (op));
1b0be822
DM
886 const svalue *unknown_sval
887 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
888 set_value (lhs_reg, unknown_sval, ctxt);
757bf1df
DM
889 }
890 break;
891
808f4dfe
DM
892 case CONSTRUCTOR:
893 {
894 if (TREE_CLOBBER_P (rhs1))
895 {
896 /* e.g. "x ={v} {CLOBBER};" */
897 clobber_region (lhs_reg);
898 }
899 else
900 {
901 /* Any CONSTRUCTOR that survives to this point is either
902 just a zero-init of everything, or a vector. */
903 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
904 zero_fill_region (lhs_reg);
905 unsigned ix;
906 tree index;
907 tree val;
908 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
909 {
910 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
911 if (!index)
912 index = build_int_cst (integer_type_node, ix);
913 gcc_assert (TREE_CODE (index) == INTEGER_CST);
914 const svalue *index_sval
915 = m_mgr->get_or_create_constant_svalue (index);
916 gcc_assert (index_sval);
917 const region *sub_reg
918 = m_mgr->get_element_region (lhs_reg,
919 TREE_TYPE (val),
920 index_sval);
921 const svalue *val_sval = get_rvalue (val, ctxt);
922 set_value (sub_reg, val_sval, ctxt);
923 }
924 }
925 }
926 break;
927
928 case STRING_CST:
757bf1df 929 {
808f4dfe 930 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
808f4dfe
DM
931 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
932 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 933 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
934 }
935 break;
936 }
937}
938
33255ad3
DM
939/* A pending_diagnostic subclass for implementing "__analyzer_dump_path". */
940
941class dump_path_diagnostic
942 : public pending_diagnostic_subclass<dump_path_diagnostic>
943{
944public:
945 bool emit (rich_location *richloc) FINAL OVERRIDE
946 {
947 inform (richloc, "path");
948 return true;
949 }
950
951 const char *get_kind () const FINAL OVERRIDE { return "dump_path_diagnostic"; }
952
953 bool operator== (const dump_path_diagnostic &) const
954 {
955 return true;
956 }
957};
958
959/* Handle the pre-sm-state part of STMT, modifying this object in-place.
960 Write true to *OUT_TERMINATE_PATH if the path should be terminated.
961 Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
962 side effects. */
963
964void
965region_model::on_stmt_pre (const gimple *stmt,
966 bool *out_terminate_path,
967 bool *out_unknown_side_effects,
968 region_model_context *ctxt)
969{
970 switch (gimple_code (stmt))
971 {
972 default:
973 /* No-op for now. */
974 break;
975
976 case GIMPLE_ASSIGN:
977 {
978 const gassign *assign = as_a <const gassign *> (stmt);
979 on_assignment (assign, ctxt);
980 }
981 break;
982
983 case GIMPLE_ASM:
ded2c2c0
DM
984 {
985 const gasm *asm_stmt = as_a <const gasm *> (stmt);
986 on_asm_stmt (asm_stmt, ctxt);
987 }
33255ad3
DM
988 break;
989
990 case GIMPLE_CALL:
991 {
992 /* Track whether we have a gcall to a function that's not recognized by
993 anything, for which we don't have a function body, or for which we
994 don't know the fndecl. */
995 const gcall *call = as_a <const gcall *> (stmt);
996
997 /* Debugging/test support. */
998 if (is_special_named_call_p (call, "__analyzer_describe", 2))
999 impl_call_analyzer_describe (call, ctxt);
1000 else if (is_special_named_call_p (call, "__analyzer_dump_capacity", 1))
1001 impl_call_analyzer_dump_capacity (call, ctxt);
4409152a
DM
1002 else if (is_special_named_call_p (call, "__analyzer_dump_escaped", 0))
1003 impl_call_analyzer_dump_escaped (call);
33255ad3
DM
1004 else if (is_special_named_call_p (call, "__analyzer_dump_path", 0))
1005 {
1006 /* Handle the builtin "__analyzer_dump_path" by queuing a
1007 diagnostic at this exploded_node. */
1008 ctxt->warn (new dump_path_diagnostic ());
1009 }
1010 else if (is_special_named_call_p (call, "__analyzer_dump_region_model",
1011 0))
1012 {
1013 /* Handle the builtin "__analyzer_dump_region_model" by dumping
1014 the region model's state to stderr. */
1015 dump (false);
1016 }
1017 else if (is_special_named_call_p (call, "__analyzer_eval", 1))
1018 impl_call_analyzer_eval (call, ctxt);
1019 else if (is_special_named_call_p (call, "__analyzer_break", 0))
1020 {
1021 /* Handle the builtin "__analyzer_break" by triggering a
1022 breakpoint. */
1023 /* TODO: is there a good cross-platform way to do this? */
1024 raise (SIGINT);
1025 }
1026 else if (is_special_named_call_p (call,
1027 "__analyzer_dump_exploded_nodes",
1028 1))
1029 {
1030 /* This is handled elsewhere. */
1031 }
1032 else
1033 *out_unknown_side_effects = on_call_pre (call, ctxt,
1034 out_terminate_path);
1035 }
1036 break;
1037
1038 case GIMPLE_RETURN:
1039 {
1040 const greturn *return_ = as_a <const greturn *> (stmt);
1041 on_return (return_, ctxt);
1042 }
1043 break;
1044 }
1045}
1046
757bf1df
DM
1047/* Update this model for the CALL stmt, using CTXT to report any
1048 diagnostics - the first half.
1049
1050 Updates to the region_model that should be made *before* sm-states
1051 are updated are done here; other updates to the region_model are done
ef7827b0 1052 in region_model::on_call_post.
757bf1df 1053
ef7827b0
DM
1054 Return true if the function call has unknown side effects (it wasn't
1055 recognized and we don't have a body for it, or are unable to tell which
5ee4ba03
DM
1056 fndecl it is).
1057
1058 Write true to *OUT_TERMINATE_PATH if this execution path should be
1059 terminated (e.g. the function call terminates the process). */
ef7827b0
DM
1060
1061bool
5ee4ba03
DM
1062region_model::on_call_pre (const gcall *call, region_model_context *ctxt,
1063 bool *out_terminate_path)
757bf1df 1064{
48e8a7a6
DM
1065 call_details cd (call, this, ctxt);
1066
ef7827b0
DM
1067 bool unknown_side_effects = false;
1068
33255ad3
DM
1069 /* Some of the cases below update the lhs of the call based on the
1070 return value, but not all. Provide a default value, which may
1071 get overwritten below. */
1072 if (tree lhs = gimple_call_lhs (call))
1073 {
1074 const region *lhs_region = get_lvalue (lhs, ctxt);
3a1d168e
DM
1075 const svalue *sval
1076 = m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs), call,
1077 lhs_region);
1078 purge_state_involving (sval, ctxt);
1079 set_value (lhs_region, sval, ctxt);
33255ad3
DM
1080 }
1081
48e8a7a6 1082 if (gimple_call_internal_p (call))
757bf1df 1083 {
48e8a7a6
DM
1084 switch (gimple_call_internal_fn (call))
1085 {
1086 default:
1087 break;
1088 case IFN_BUILTIN_EXPECT:
b5081130
DM
1089 impl_call_builtin_expect (cd);
1090 return false;
37eb3ef4
DM
1091 case IFN_UBSAN_BOUNDS:
1092 return false;
48e8a7a6
DM
1093 }
1094 }
808f4dfe 1095
48e8a7a6
DM
1096 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1097 {
808f4dfe
DM
1098 /* The various impl_call_* member functions are implemented
1099 in region-model-impl-calls.cc.
1100 Having them split out into separate functions makes it easier
1101 to put breakpoints on the handling of specific functions. */
ee7bfbe5 1102
47997a32 1103 if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
ee7bfbe5
DM
1104 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
1105 switch (DECL_UNCHECKED_FUNCTION_CODE (callee_fndecl))
1106 {
1107 default:
b7028f06
DM
1108 if (!DECL_PURE_P (callee_fndecl))
1109 unknown_side_effects = true;
ee7bfbe5
DM
1110 break;
1111 case BUILT_IN_ALLOCA:
1112 case BUILT_IN_ALLOCA_WITH_ALIGN:
b5081130
DM
1113 impl_call_alloca (cd);
1114 return false;
ee7bfbe5 1115 case BUILT_IN_CALLOC:
b5081130
DM
1116 impl_call_calloc (cd);
1117 return false;
ee7bfbe5
DM
1118 case BUILT_IN_EXPECT:
1119 case BUILT_IN_EXPECT_WITH_PROBABILITY:
b5081130
DM
1120 impl_call_builtin_expect (cd);
1121 return false;
ee7bfbe5
DM
1122 case BUILT_IN_FREE:
1123 /* Handle in "on_call_post". */
1124 break;
1125 case BUILT_IN_MALLOC:
b5081130
DM
1126 impl_call_malloc (cd);
1127 return false;
b7028f06
DM
1128 case BUILT_IN_MEMCPY:
1129 case BUILT_IN_MEMCPY_CHK:
1130 impl_call_memcpy (cd);
1131 return false;
ee7bfbe5 1132 case BUILT_IN_MEMSET:
bc62bfb0 1133 case BUILT_IN_MEMSET_CHK:
ee7bfbe5
DM
1134 impl_call_memset (cd);
1135 return false;
1136 break;
a6baafca 1137 case BUILT_IN_REALLOC:
a6baafca 1138 return false;
111fd515
DM
1139 case BUILT_IN_STRCHR:
1140 impl_call_strchr (cd);
1141 return false;
b7028f06
DM
1142 case BUILT_IN_STRCPY:
1143 case BUILT_IN_STRCPY_CHK:
1144 impl_call_strcpy (cd);
1145 return false;
ee7bfbe5 1146 case BUILT_IN_STRLEN:
b5081130
DM
1147 impl_call_strlen (cd);
1148 return false;
b7028f06 1149
37eb3ef4
DM
1150 case BUILT_IN_STACK_SAVE:
1151 case BUILT_IN_STACK_RESTORE:
1152 return false;
1153
b7028f06
DM
1154 /* Stdio builtins. */
1155 case BUILT_IN_FPRINTF:
1156 case BUILT_IN_FPRINTF_UNLOCKED:
1157 case BUILT_IN_PUTC:
1158 case BUILT_IN_PUTC_UNLOCKED:
1159 case BUILT_IN_FPUTC:
1160 case BUILT_IN_FPUTC_UNLOCKED:
1161 case BUILT_IN_FPUTS:
1162 case BUILT_IN_FPUTS_UNLOCKED:
1163 case BUILT_IN_FWRITE:
1164 case BUILT_IN_FWRITE_UNLOCKED:
1165 case BUILT_IN_PRINTF:
1166 case BUILT_IN_PRINTF_UNLOCKED:
1167 case BUILT_IN_PUTCHAR:
1168 case BUILT_IN_PUTCHAR_UNLOCKED:
1169 case BUILT_IN_PUTS:
1170 case BUILT_IN_PUTS_UNLOCKED:
1171 case BUILT_IN_VFPRINTF:
1172 case BUILT_IN_VPRINTF:
1173 /* These stdio builtins have external effects that are out
1174 of scope for the analyzer: we only want to model the effects
1175 on the return value. */
1176 break;
ee7bfbe5 1177 }
ee7bfbe5 1178 else if (is_named_call_p (callee_fndecl, "malloc", call, 1))
b5081130
DM
1179 {
1180 impl_call_malloc (cd);
1181 return false;
1182 }
808f4dfe 1183 else if (is_named_call_p (callee_fndecl, "calloc", call, 2))
b5081130
DM
1184 {
1185 impl_call_calloc (cd);
1186 return false;
1187 }
ee7bfbe5 1188 else if (is_named_call_p (callee_fndecl, "alloca", call, 1))
b5081130
DM
1189 {
1190 impl_call_alloca (cd);
1191 return false;
1192 }
a6baafca
DM
1193 else if (is_named_call_p (callee_fndecl, "realloc", call, 2))
1194 {
1195 impl_call_realloc (cd);
1196 return false;
1197 }
5ee4ba03
DM
1198 else if (is_named_call_p (callee_fndecl, "error"))
1199 {
1200 if (impl_call_error (cd, 3, out_terminate_path))
1201 return false;
1202 else
1203 unknown_side_effects = true;
1204 }
1205 else if (is_named_call_p (callee_fndecl, "error_at_line"))
1206 {
1207 if (impl_call_error (cd, 5, out_terminate_path))
1208 return false;
1209 else
1210 unknown_side_effects = true;
1211 }
33255ad3
DM
1212 else if (is_named_call_p (callee_fndecl, "fgets", call, 3)
1213 || is_named_call_p (callee_fndecl, "fgets_unlocked", call, 3))
1214 {
1215 impl_call_fgets (cd);
1216 return false;
1217 }
1218 else if (is_named_call_p (callee_fndecl, "fread", call, 4))
1219 {
1220 impl_call_fread (cd);
1221 return false;
1222 }
e097c9ab
DM
1223 else if (is_named_call_p (callee_fndecl, "getchar", call, 0))
1224 {
1225 /* No side-effects (tracking stream state is out-of-scope
1226 for the analyzer). */
1227 }
1e19ecd7
DM
1228 else if (is_named_call_p (callee_fndecl, "memset", call, 3)
1229 && POINTER_TYPE_P (cd.get_arg_type (0)))
e516294a 1230 {
808f4dfe 1231 impl_call_memset (cd);
e516294a
DM
1232 return false;
1233 }
111fd515
DM
1234 else if (is_named_call_p (callee_fndecl, "strchr", call, 2)
1235 && POINTER_TYPE_P (cd.get_arg_type (0)))
1236 {
1237 impl_call_strchr (cd);
1238 return false;
1239 }
1e19ecd7
DM
1240 else if (is_named_call_p (callee_fndecl, "strlen", call, 1)
1241 && POINTER_TYPE_P (cd.get_arg_type (0)))
757bf1df 1242 {
b5081130
DM
1243 impl_call_strlen (cd);
1244 return false;
757bf1df 1245 }
1690a839 1246 else if (is_named_call_p (callee_fndecl, "operator new", call, 1))
b5081130
DM
1247 {
1248 impl_call_operator_new (cd);
1249 return false;
1250 }
1690a839 1251 else if (is_named_call_p (callee_fndecl, "operator new []", call, 1))
b5081130
DM
1252 {
1253 impl_call_operator_new (cd);
1254 return false;
1255 }
1690a839
DM
1256 else if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
1257 || is_named_call_p (callee_fndecl, "operator delete", call, 2)
1258 || is_named_call_p (callee_fndecl, "operator delete []", call, 1))
1259 {
1260 /* Handle in "on_call_post". */
1261 }
ef7827b0 1262 else if (!fndecl_has_gimple_body_p (callee_fndecl)
808f4dfe
DM
1263 && !DECL_PURE_P (callee_fndecl)
1264 && !fndecl_built_in_p (callee_fndecl))
ef7827b0 1265 unknown_side_effects = true;
757bf1df 1266 }
ef7827b0
DM
1267 else
1268 unknown_side_effects = true;
757bf1df 1269
ef7827b0 1270 return unknown_side_effects;
757bf1df
DM
1271}
1272
1273/* Update this model for the CALL stmt, using CTXT to report any
1274 diagnostics - the second half.
1275
1276 Updates to the region_model that should be made *after* sm-states
1277 are updated are done here; other updates to the region_model are done
ef7827b0
DM
1278 in region_model::on_call_pre.
1279
1280 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
1281 to purge state. */
757bf1df
DM
1282
1283void
ef7827b0
DM
1284region_model::on_call_post (const gcall *call,
1285 bool unknown_side_effects,
1286 region_model_context *ctxt)
757bf1df 1287{
757bf1df 1288 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1690a839 1289 {
eafa9d96 1290 call_details cd (call, this, ctxt);
1690a839
DM
1291 if (is_named_call_p (callee_fndecl, "free", call, 1))
1292 {
1690a839
DM
1293 impl_call_free (cd);
1294 return;
1295 }
1296 if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
1297 || is_named_call_p (callee_fndecl, "operator delete", call, 2)
1298 || is_named_call_p (callee_fndecl, "operator delete []", call, 1))
1299 {
1690a839
DM
1300 impl_call_operator_delete (cd);
1301 return;
1302 }
c7e276b8
DM
1303 /* Was this fndecl referenced by
1304 __attribute__((malloc(FOO)))? */
1305 if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
1306 {
c7e276b8
DM
1307 impl_deallocation_call (cd);
1308 return;
1309 }
eafa9d96
DM
1310 if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
1311 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
1312 switch (DECL_UNCHECKED_FUNCTION_CODE (callee_fndecl))
1313 {
1314 default:
1315 break;
1316 case BUILT_IN_REALLOC:
1317 impl_call_realloc (cd);
1318 return;
1319 }
1690a839 1320 }
ef7827b0
DM
1321
1322 if (unknown_side_effects)
1323 handle_unrecognized_call (call, ctxt);
1324}
1325
33255ad3
DM
1326/* Purge state involving SVAL from this region_model, using CTXT
1327 (if non-NULL) to purge other state in a program_state.
1328
1329 For example, if we're at the def-stmt of an SSA name, then we need to
1330 purge any state for svalues that involve that SSA name. This avoids
1331 false positives in loops, since a symbolic value referring to the
1332 SSA name will be referring to the previous value of that SSA name.
1333
1334 For example, in:
1335 while ((e = hashmap_iter_next(&iter))) {
1336 struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
1337 free (e_strbuf->value);
1338 }
1339 at the def-stmt of e_8:
1340 e_8 = hashmap_iter_next (&iter);
1341 we should purge the "freed" state of:
1342 INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
1343 which is the "e_strbuf->value" value from the previous iteration,
1344 or we will erroneously report a double-free - the "e_8" within it
1345 refers to the previous value. */
1346
1347void
1348region_model::purge_state_involving (const svalue *sval,
1349 region_model_context *ctxt)
1350{
a113b143
DM
1351 if (!sval->can_have_associated_state_p ())
1352 return;
33255ad3
DM
1353 m_store.purge_state_involving (sval, m_mgr);
1354 m_constraints->purge_state_involving (sval);
1355 m_dynamic_extents.purge_state_involving (sval);
1356 if (ctxt)
1357 ctxt->purge_state_involving (sval);
1358}
1359
ef7827b0
DM
1360/* Handle a call CALL to a function with unknown behavior.
1361
1362 Traverse the regions in this model, determining what regions are
1363 reachable from pointer arguments to CALL and from global variables,
1364 recursively.
1365
1366 Set all reachable regions to new unknown values and purge sm-state
1367 from their values, and from values that point to them. */
1368
1369void
1370region_model::handle_unrecognized_call (const gcall *call,
1371 region_model_context *ctxt)
1372{
1373 tree fndecl = get_fndecl_for_call (call, ctxt);
1374
c710051a 1375 reachable_regions reachable_regs (this);
ef7827b0
DM
1376
1377 /* Determine the reachable regions and their mutability. */
1378 {
808f4dfe
DM
1379 /* Add globals and regions that already escaped in previous
1380 unknown calls. */
1381 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1382 &reachable_regs);
ef7827b0
DM
1383
1384 /* Params that are pointers. */
1385 tree iter_param_types = NULL_TREE;
1386 if (fndecl)
1387 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
1388 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
1389 {
1390 /* Track expected param type, where available. */
1391 tree param_type = NULL_TREE;
1392 if (iter_param_types)
1393 {
1394 param_type = TREE_VALUE (iter_param_types);
1395 gcc_assert (param_type);
1396 iter_param_types = TREE_CHAIN (iter_param_types);
1397 }
1398
1399 tree parm = gimple_call_arg (call, arg_idx);
808f4dfe
DM
1400 const svalue *parm_sval = get_rvalue (parm, ctxt);
1401 reachable_regs.handle_parm (parm_sval, param_type);
ef7827b0
DM
1402 }
1403 }
1404
33255ad3 1405 uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULL;
3a66c289 1406
808f4dfe
DM
1407 /* Purge sm-state for the svalues that were reachable,
1408 both in non-mutable and mutable form. */
1409 for (svalue_set::iterator iter
1410 = reachable_regs.begin_reachable_svals ();
1411 iter != reachable_regs.end_reachable_svals (); ++iter)
ef7827b0 1412 {
808f4dfe 1413 const svalue *sval = (*iter);
33255ad3
DM
1414 if (ctxt)
1415 ctxt->on_unknown_change (sval, false);
808f4dfe
DM
1416 }
1417 for (svalue_set::iterator iter
1418 = reachable_regs.begin_mutable_svals ();
1419 iter != reachable_regs.end_mutable_svals (); ++iter)
1420 {
1421 const svalue *sval = (*iter);
33255ad3
DM
1422 if (ctxt)
1423 ctxt->on_unknown_change (sval, true);
3a66c289
DM
1424 if (uncertainty)
1425 uncertainty->on_mutable_sval_at_unknown_call (sval);
808f4dfe 1426 }
ef7827b0 1427
808f4dfe 1428 /* Mark any clusters that have escaped. */
af66094d 1429 reachable_regs.mark_escaped_clusters (ctxt);
ef7827b0 1430
808f4dfe
DM
1431 /* Update bindings for all clusters that have escaped, whether above,
1432 or previously. */
1433 m_store.on_unknown_fncall (call, m_mgr->get_store_manager ());
9a2c9579
DM
1434
1435 /* Purge dynamic extents from any regions that have escaped mutably:
1436 realloc could have been called on them. */
1437 for (hash_set<const region *>::iterator
1438 iter = reachable_regs.begin_mutable_base_regs ();
1439 iter != reachable_regs.end_mutable_base_regs ();
1440 ++iter)
1441 {
1442 const region *base_reg = (*iter);
1443 unset_dynamic_extents (base_reg);
1444 }
808f4dfe 1445}
ef7827b0 1446
808f4dfe
DM
1447/* Traverse the regions in this model, determining what regions are
1448 reachable from the store and populating *OUT.
ef7827b0 1449
808f4dfe
DM
1450 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
1451 for reachability (for handling return values from functions when
1452 analyzing return of the only function on the stack).
1453
3a66c289
DM
1454 If UNCERTAINTY is non-NULL, treat any svalues that were recorded
1455 within it as being maybe-bound as additional "roots" for reachability.
1456
808f4dfe
DM
1457 Find svalues that haven't leaked. */
1458
1459void
1460region_model::get_reachable_svalues (svalue_set *out,
3a66c289
DM
1461 const svalue *extra_sval,
1462 const uncertainty_t *uncertainty)
808f4dfe 1463{
c710051a 1464 reachable_regions reachable_regs (this);
808f4dfe
DM
1465
1466 /* Add globals and regions that already escaped in previous
1467 unknown calls. */
1468 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1469 &reachable_regs);
1470
1471 if (extra_sval)
1472 reachable_regs.handle_sval (extra_sval);
ef7827b0 1473
3a66c289
DM
1474 if (uncertainty)
1475 for (uncertainty_t::iterator iter
1476 = uncertainty->begin_maybe_bound_svals ();
1477 iter != uncertainty->end_maybe_bound_svals (); ++iter)
1478 reachable_regs.handle_sval (*iter);
1479
808f4dfe
DM
1480 /* Get regions for locals that have explicitly bound values. */
1481 for (store::cluster_map_t::iterator iter = m_store.begin ();
1482 iter != m_store.end (); ++iter)
1483 {
1484 const region *base_reg = (*iter).first;
1485 if (const region *parent = base_reg->get_parent_region ())
1486 if (parent->get_kind () == RK_FRAME)
1487 reachable_regs.add (base_reg, false);
1488 }
1489
1490 /* Populate *OUT based on the values that were reachable. */
1491 for (svalue_set::iterator iter
1492 = reachable_regs.begin_reachable_svals ();
1493 iter != reachable_regs.end_reachable_svals (); ++iter)
1494 out->add (*iter);
757bf1df
DM
1495}
1496
1497/* Update this model for the RETURN_STMT, using CTXT to report any
1498 diagnostics. */
1499
1500void
1501region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
1502{
1503 tree callee = get_current_function ()->decl;
1504 tree lhs = DECL_RESULT (callee);
1505 tree rhs = gimple_return_retval (return_stmt);
1506
1507 if (lhs && rhs)
a96f1c38 1508 copy_region (get_lvalue (lhs, ctxt), get_lvalue (rhs, ctxt), ctxt);
757bf1df
DM
1509}
1510
342e14ff
DM
1511/* Update this model for a call and return of setjmp/sigsetjmp at CALL within
1512 ENODE, using CTXT to report any diagnostics.
757bf1df 1513
342e14ff
DM
1514 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
1515 0), as opposed to any second return due to longjmp/sigsetjmp. */
757bf1df
DM
1516
1517void
1518region_model::on_setjmp (const gcall *call, const exploded_node *enode,
1519 region_model_context *ctxt)
1520{
808f4dfe
DM
1521 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
1522 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
1523 ctxt);
757bf1df 1524
808f4dfe
DM
1525 /* Create a setjmp_svalue for this call and store it in BUF_REG's
1526 region. */
1527 if (buf_reg)
757bf1df 1528 {
fd9982bb 1529 setjmp_record r (enode, call);
808f4dfe
DM
1530 const svalue *sval
1531 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
1532 set_value (buf_reg, sval, ctxt);
757bf1df
DM
1533 }
1534
1535 /* Direct calls to setjmp return 0. */
1536 if (tree lhs = gimple_call_lhs (call))
1537 {
1aff29d4
DM
1538 const svalue *new_sval
1539 = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
808f4dfe
DM
1540 const region *lhs_reg = get_lvalue (lhs, ctxt);
1541 set_value (lhs_reg, new_sval, ctxt);
757bf1df
DM
1542 }
1543}
1544
1545/* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
1546 to a "setjmp" at SETJMP_CALL where the final stack depth should be
808f4dfe
DM
1547 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
1548 done, and should be done by the caller. */
757bf1df
DM
1549
1550void
1551region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
808f4dfe 1552 int setjmp_stack_depth, region_model_context *ctxt)
757bf1df
DM
1553{
1554 /* Evaluate the val, using the frame of the "longjmp". */
1555 tree fake_retval = gimple_call_arg (longjmp_call, 1);
808f4dfe 1556 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
757bf1df
DM
1557
1558 /* Pop any frames until we reach the stack depth of the function where
1559 setjmp was called. */
1560 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
1561 while (get_stack_depth () > setjmp_stack_depth)
808f4dfe 1562 pop_frame (NULL, NULL, ctxt);
757bf1df
DM
1563
1564 gcc_assert (get_stack_depth () == setjmp_stack_depth);
1565
1566 /* Assign to LHS of "setjmp" in new_state. */
1567 if (tree lhs = gimple_call_lhs (setjmp_call))
1568 {
1569 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
1aff29d4
DM
1570 const svalue *zero_sval
1571 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
808f4dfe 1572 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
757bf1df
DM
1573 /* If we have 0, use 1. */
1574 if (eq_zero.is_true ())
1575 {
808f4dfe 1576 const svalue *one_sval
1aff29d4 1577 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
808f4dfe 1578 fake_retval_sval = one_sval;
757bf1df
DM
1579 }
1580 else
1581 {
1582 /* Otherwise note that the value is nonzero. */
808f4dfe 1583 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
757bf1df
DM
1584 }
1585
808f4dfe
DM
1586 /* Decorate the return value from setjmp as being unmergeable,
1587 so that we don't attempt to merge states with it as zero
1588 with states in which it's nonzero, leading to a clean distinction
1589 in the exploded_graph betweeen the first return and the second
1590 return. */
1591 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
757bf1df 1592
808f4dfe
DM
1593 const region *lhs_reg = get_lvalue (lhs, ctxt);
1594 set_value (lhs_reg, fake_retval_sval, ctxt);
1595 }
757bf1df
DM
1596}
1597
1598/* Update this region_model for a phi stmt of the form
1599 LHS = PHI <...RHS...>.
e0a7a675
DM
1600 where RHS is for the appropriate edge.
1601 Get state from OLD_STATE so that all of the phi stmts for a basic block
1602 are effectively handled simultaneously. */
757bf1df
DM
1603
1604void
8525d1f5 1605region_model::handle_phi (const gphi *phi,
808f4dfe 1606 tree lhs, tree rhs,
e0a7a675 1607 const region_model &old_state,
757bf1df
DM
1608 region_model_context *ctxt)
1609{
1610 /* For now, don't bother tracking the .MEM SSA names. */
1611 if (tree var = SSA_NAME_VAR (lhs))
1612 if (TREE_CODE (var) == VAR_DECL)
1613 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
1614 return;
1615
e0a7a675
DM
1616 const svalue *src_sval = old_state.get_rvalue (rhs, ctxt);
1617 const region *dst_reg = old_state.get_lvalue (lhs, ctxt);
757bf1df 1618
e0a7a675 1619 set_value (dst_reg, src_sval, ctxt);
8525d1f5
DM
1620
1621 if (ctxt)
1622 ctxt->on_phi (phi, rhs);
757bf1df
DM
1623}
1624
1625/* Implementation of region_model::get_lvalue; the latter adds type-checking.
1626
1627 Get the id of the region for PV within this region_model,
1628 emitting any diagnostics to CTXT. */
1629
808f4dfe 1630const region *
53cb324c 1631region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
1632{
1633 tree expr = pv.m_tree;
1634
1635 gcc_assert (expr);
1636
1637 switch (TREE_CODE (expr))
1638 {
1639 default:
808f4dfe
DM
1640 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
1641 dump_location_t ());
757bf1df
DM
1642
1643 case ARRAY_REF:
1644 {
1645 tree array = TREE_OPERAND (expr, 0);
1646 tree index = TREE_OPERAND (expr, 1);
757bf1df 1647
808f4dfe
DM
1648 const region *array_reg = get_lvalue (array, ctxt);
1649 const svalue *index_sval = get_rvalue (index, ctxt);
1650 return m_mgr->get_element_region (array_reg,
1651 TREE_TYPE (TREE_TYPE (array)),
1652 index_sval);
757bf1df
DM
1653 }
1654 break;
1655
1656 case MEM_REF:
1657 {
1658 tree ptr = TREE_OPERAND (expr, 0);
1659 tree offset = TREE_OPERAND (expr, 1);
808f4dfe
DM
1660 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
1661 const svalue *offset_sval = get_rvalue (offset, ctxt);
1662 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
1663 return m_mgr->get_offset_region (star_ptr,
1664 TREE_TYPE (expr),
1665 offset_sval);
757bf1df
DM
1666 }
1667 break;
1668
808f4dfe
DM
1669 case FUNCTION_DECL:
1670 return m_mgr->get_region_for_fndecl (expr);
1671
1672 case LABEL_DECL:
1673 return m_mgr->get_region_for_label (expr);
1674
757bf1df
DM
1675 case VAR_DECL:
1676 /* Handle globals. */
1677 if (is_global_var (expr))
808f4dfe 1678 return m_mgr->get_region_for_global (expr);
757bf1df
DM
1679
1680 /* Fall through. */
1681
1682 case SSA_NAME:
1683 case PARM_DECL:
1684 case RESULT_DECL:
1685 {
1686 gcc_assert (TREE_CODE (expr) == SSA_NAME
1687 || TREE_CODE (expr) == PARM_DECL
1688 || TREE_CODE (expr) == VAR_DECL
1689 || TREE_CODE (expr) == RESULT_DECL);
1690
808f4dfe
DM
1691 int stack_index = pv.m_stack_depth;
1692 const frame_region *frame = get_frame_at_index (stack_index);
757bf1df 1693 gcc_assert (frame);
808f4dfe 1694 return frame->get_region_for_local (m_mgr, expr);
757bf1df
DM
1695 }
1696
1697 case COMPONENT_REF:
1698 {
1699 /* obj.field */
1700 tree obj = TREE_OPERAND (expr, 0);
1701 tree field = TREE_OPERAND (expr, 1);
808f4dfe
DM
1702 const region *obj_reg = get_lvalue (obj, ctxt);
1703 return m_mgr->get_field_region (obj_reg, field);
41a9e940
DM
1704 }
1705 break;
1706
757bf1df 1707 case STRING_CST:
808f4dfe 1708 return m_mgr->get_region_for_string (expr);
757bf1df
DM
1709 }
1710}
1711
1712/* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
1713
09bea584
DM
1714static void
1715assert_compat_types (tree src_type, tree dst_type)
1716{
1717 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
808f4dfe
DM
1718 {
1719#if CHECKING_P
1720 if (!(useless_type_conversion_p (src_type, dst_type)))
1721 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
1722#endif
1723 }
09bea584 1724}
757bf1df 1725
ea4e3218
DM
1726/* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
1727
e66b9f67 1728bool
ea4e3218
DM
1729compat_types_p (tree src_type, tree dst_type)
1730{
1731 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
1732 if (!(useless_type_conversion_p (src_type, dst_type)))
1733 return false;
1734 return true;
1735}
1736
808f4dfe 1737/* Get the region for PV within this region_model,
757bf1df
DM
1738 emitting any diagnostics to CTXT. */
1739
808f4dfe 1740const region *
53cb324c 1741region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
1742{
1743 if (pv.m_tree == NULL_TREE)
808f4dfe 1744 return NULL;
757bf1df 1745
808f4dfe
DM
1746 const region *result_reg = get_lvalue_1 (pv, ctxt);
1747 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
1748 return result_reg;
757bf1df
DM
1749}
1750
808f4dfe 1751/* Get the region for EXPR within this region_model (assuming the most
757bf1df
DM
1752 recent stack frame if it's a local). */
1753
808f4dfe 1754const region *
53cb324c 1755region_model::get_lvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
1756{
1757 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
1758}
1759
1760/* Implementation of region_model::get_rvalue; the latter adds type-checking.
1761
1762 Get the value of PV within this region_model,
1763 emitting any diagnostics to CTXT. */
1764
808f4dfe 1765const svalue *
53cb324c 1766region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
1767{
1768 gcc_assert (pv.m_tree);
1769
1770 switch (TREE_CODE (pv.m_tree))
1771 {
1772 default:
2242b975 1773 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
757bf1df
DM
1774
1775 case ADDR_EXPR:
1776 {
1777 /* "&EXPR". */
1778 tree expr = pv.m_tree;
1779 tree op0 = TREE_OPERAND (expr, 0);
808f4dfe
DM
1780 const region *expr_reg = get_lvalue (op0, ctxt);
1781 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
757bf1df
DM
1782 }
1783 break;
1784
808f4dfe 1785 case BIT_FIELD_REF:
d3b1ef7a
DM
1786 {
1787 tree expr = pv.m_tree;
1788 tree op0 = TREE_OPERAND (expr, 0);
1789 const region *reg = get_lvalue (op0, ctxt);
1790 tree num_bits = TREE_OPERAND (expr, 1);
1791 tree first_bit_offset = TREE_OPERAND (expr, 2);
1792 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
1793 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
1794 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
1795 TREE_INT_CST_LOW (num_bits));
9faf8348 1796 return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
d3b1ef7a 1797 }
808f4dfe
DM
1798
1799 case SSA_NAME:
1800 case VAR_DECL:
1801 case PARM_DECL:
1802 case RESULT_DECL:
757bf1df
DM
1803 case ARRAY_REF:
1804 {
da7c2773 1805 const region *reg = get_lvalue (pv, ctxt);
9faf8348 1806 return get_store_value (reg, ctxt);
757bf1df
DM
1807 }
1808
808f4dfe
DM
1809 case REALPART_EXPR:
1810 case IMAGPART_EXPR:
1811 case VIEW_CONVERT_EXPR:
1812 {
1813 tree expr = pv.m_tree;
1814 tree arg = TREE_OPERAND (expr, 0);
1815 const svalue *arg_sval = get_rvalue (arg, ctxt);
1816 const svalue *sval_unaryop
1817 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
1818 arg_sval);
1819 return sval_unaryop;
1820 };
1821
757bf1df
DM
1822 case INTEGER_CST:
1823 case REAL_CST:
808f4dfe
DM
1824 case COMPLEX_CST:
1825 case VECTOR_CST:
757bf1df 1826 case STRING_CST:
808f4dfe
DM
1827 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
1828
1829 case POINTER_PLUS_EXPR:
1830 {
1831 tree expr = pv.m_tree;
1832 tree ptr = TREE_OPERAND (expr, 0);
1833 tree offset = TREE_OPERAND (expr, 1);
1834 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
1835 const svalue *offset_sval = get_rvalue (offset, ctxt);
1836 const svalue *sval_binop
1837 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
1838 ptr_sval, offset_sval);
1839 return sval_binop;
1840 }
1841
1842 /* Binary ops. */
1843 case PLUS_EXPR:
1844 case MULT_EXPR:
1845 {
1846 tree expr = pv.m_tree;
1847 tree arg0 = TREE_OPERAND (expr, 0);
1848 tree arg1 = TREE_OPERAND (expr, 1);
1849 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
1850 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
1851 const svalue *sval_binop
1852 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
1853 arg0_sval, arg1_sval);
1854 return sval_binop;
1855 }
757bf1df
DM
1856
1857 case COMPONENT_REF:
1858 case MEM_REF:
757bf1df 1859 {
808f4dfe 1860 const region *ref_reg = get_lvalue (pv, ctxt);
9faf8348 1861 return get_store_value (ref_reg, ctxt);
757bf1df 1862 }
1b342485
AS
1863 case OBJ_TYPE_REF:
1864 {
1865 tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
1866 return get_rvalue (expr, ctxt);
1867 }
757bf1df
DM
1868 }
1869}
1870
1871/* Get the value of PV within this region_model,
1872 emitting any diagnostics to CTXT. */
1873
808f4dfe 1874const svalue *
53cb324c 1875region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
1876{
1877 if (pv.m_tree == NULL_TREE)
808f4dfe 1878 return NULL;
757bf1df 1879
808f4dfe 1880 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
757bf1df 1881
808f4dfe
DM
1882 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
1883
33255ad3
DM
1884 result_sval = check_for_poison (result_sval, pv.m_tree, ctxt);
1885
808f4dfe 1886 return result_sval;
757bf1df
DM
1887}
1888
1889/* Get the value of EXPR within this region_model (assuming the most
1890 recent stack frame if it's a local). */
1891
808f4dfe 1892const svalue *
53cb324c 1893region_model::get_rvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
1894{
1895 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
1896}
1897
623bc027
DM
1898/* Return true if this model is on a path with "main" as the entrypoint
1899 (as opposed to one in which we're merely analyzing a subset of the
1900 path through the code). */
1901
1902bool
1903region_model::called_from_main_p () const
1904{
1905 if (!m_current_frame)
1906 return false;
1907 /* Determine if the oldest stack frame in this model is for "main". */
1908 const frame_region *frame0 = get_frame_at_index (0);
1909 gcc_assert (frame0);
1910 return id_equal (DECL_NAME (frame0->get_function ()->decl), "main");
1911}
1912
1913/* Subroutine of region_model::get_store_value for when REG is (or is within)
1914 a global variable that hasn't been touched since the start of this path
1915 (or was implicitly touched due to a call to an unknown function). */
1916
1917const svalue *
1918region_model::get_initial_value_for_global (const region *reg) const
1919{
1920 /* Get the decl that REG is for (or is within). */
1921 const decl_region *base_reg
1922 = reg->get_base_region ()->dyn_cast_decl_region ();
1923 gcc_assert (base_reg);
1924 tree decl = base_reg->get_decl ();
1925
1926 /* Special-case: to avoid having to explicitly update all previously
1927 untracked globals when calling an unknown fn, they implicitly have
1928 an unknown value if an unknown call has occurred, unless this is
1929 static to-this-TU and hasn't escaped. Globals that have escaped
1930 are explicitly tracked, so we shouldn't hit this case for them. */
af66094d
DM
1931 if (m_store.called_unknown_fn_p ()
1932 && TREE_PUBLIC (decl)
1933 && !TREE_READONLY (decl))
623bc027
DM
1934 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
1935
1936 /* If we are on a path from the entrypoint from "main" and we have a
1937 global decl defined in this TU that hasn't been touched yet, then
1938 the initial value of REG can be taken from the initialization value
1939 of the decl. */
16ad9ae8 1940 if (called_from_main_p () || TREE_READONLY (decl))
623bc027 1941 {
61a43de5
DM
1942 /* Attempt to get the initializer value for base_reg. */
1943 if (const svalue *base_reg_init
1944 = base_reg->get_svalue_for_initializer (m_mgr))
623bc027 1945 {
61a43de5
DM
1946 if (reg == base_reg)
1947 return base_reg_init;
1948 else
623bc027 1949 {
61a43de5
DM
1950 /* Get the value for REG within base_reg_init. */
1951 binding_cluster c (base_reg);
e61ffa20 1952 c.bind (m_mgr->get_store_manager (), base_reg, base_reg_init);
61a43de5
DM
1953 const svalue *sval
1954 = c.get_any_binding (m_mgr->get_store_manager (), reg);
1955 if (sval)
1956 {
1957 if (reg->get_type ())
1958 sval = m_mgr->get_or_create_cast (reg->get_type (),
1959 sval);
1960 return sval;
1961 }
623bc027
DM
1962 }
1963 }
1964 }
1965
1966 /* Otherwise, return INIT_VAL(REG). */
1967 return m_mgr->get_or_create_initial_value (reg);
1968}
1969
808f4dfe 1970/* Get a value for REG, looking it up in the store, or otherwise falling
9faf8348
DM
1971 back to "initial" or "unknown" values.
1972 Use CTXT to report any warnings associated with reading from REG. */
757bf1df 1973
808f4dfe 1974const svalue *
9faf8348
DM
1975region_model::get_store_value (const region *reg,
1976 region_model_context *ctxt) const
757bf1df 1977{
9faf8348
DM
1978 check_region_for_read (reg, ctxt);
1979
2867118d
DM
1980 /* Special-case: handle var_decls in the constant pool. */
1981 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
1982 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
1983 return sval;
1984
808f4dfe
DM
1985 const svalue *sval
1986 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
1987 if (sval)
757bf1df 1988 {
808f4dfe
DM
1989 if (reg->get_type ())
1990 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
1991 return sval;
757bf1df 1992 }
757bf1df 1993
808f4dfe
DM
1994 /* Special-case: read at a constant index within a STRING_CST. */
1995 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
1996 if (tree byte_offset_cst
1997 = offset_reg->get_byte_offset ()->maybe_get_constant ())
1998 if (const string_region *str_reg
1999 = reg->get_parent_region ()->dyn_cast_string_region ())
757bf1df 2000 {
808f4dfe
DM
2001 tree string_cst = str_reg->get_string_cst ();
2002 if (const svalue *char_sval
2003 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2004 byte_offset_cst))
2005 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
757bf1df 2006 }
757bf1df 2007
808f4dfe
DM
2008 /* Special-case: read the initial char of a STRING_CST. */
2009 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
2010 if (const string_region *str_reg
2011 = cast_reg->get_original_region ()->dyn_cast_string_region ())
2012 {
2013 tree string_cst = str_reg->get_string_cst ();
2014 tree byte_offset_cst = build_int_cst (integer_type_node, 0);
2015 if (const svalue *char_sval
2016 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2017 byte_offset_cst))
2018 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2019 }
757bf1df 2020
808f4dfe
DM
2021 /* Otherwise we implicitly have the initial value of the region
2022 (if the cluster had been touched, binding_cluster::get_any_binding,
2023 would have returned UNKNOWN, and we would already have returned
2024 that above). */
757bf1df 2025
623bc027
DM
2026 /* Handle globals. */
2027 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
2028 == RK_GLOBALS)
2029 return get_initial_value_for_global (reg);
757bf1df 2030
808f4dfe 2031 return m_mgr->get_or_create_initial_value (reg);
757bf1df
DM
2032}
2033
808f4dfe
DM
2034/* Return false if REG does not exist, true if it may do.
2035 This is for detecting regions within the stack that don't exist anymore
2036 after frames are popped. */
757bf1df 2037
808f4dfe
DM
2038bool
2039region_model::region_exists_p (const region *reg) const
757bf1df 2040{
808f4dfe
DM
2041 /* If within a stack frame, check that the stack frame is live. */
2042 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
757bf1df 2043 {
808f4dfe
DM
2044 /* Check that the current frame is the enclosing frame, or is called
2045 by it. */
2046 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
2047 iter_frame = iter_frame->get_calling_frame ())
2048 if (iter_frame == enclosing_frame)
2049 return true;
2050 return false;
757bf1df 2051 }
808f4dfe
DM
2052
2053 return true;
757bf1df
DM
2054}
2055
808f4dfe
DM
2056/* Get a region for referencing PTR_SVAL, creating a region if need be, and
2057 potentially generating warnings via CTXT.
35e3f082 2058 PTR_SVAL must be of pointer type.
808f4dfe 2059 PTR_TREE if non-NULL can be used when emitting diagnostics. */
757bf1df 2060
808f4dfe
DM
2061const region *
2062region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
53cb324c 2063 region_model_context *ctxt) const
757bf1df 2064{
808f4dfe 2065 gcc_assert (ptr_sval);
35e3f082 2066 gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
757bf1df 2067
49bfbf18
DM
2068 /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
2069 as a constraint. This suppresses false positives from
2070 -Wanalyzer-null-dereference for the case where we later have an
2071 if (PTR_SVAL) that would occur if we considered the false branch
2072 and transitioned the malloc state machine from start->null. */
2073 tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
2074 const svalue *null_ptr = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
2075 m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
2076
808f4dfe 2077 switch (ptr_sval->get_kind ())
757bf1df 2078 {
808f4dfe 2079 default:
23ebfda0 2080 break;
808f4dfe 2081
757bf1df
DM
2082 case SK_REGION:
2083 {
808f4dfe
DM
2084 const region_svalue *region_sval
2085 = as_a <const region_svalue *> (ptr_sval);
757bf1df
DM
2086 return region_sval->get_pointee ();
2087 }
2088
808f4dfe
DM
2089 case SK_BINOP:
2090 {
2091 const binop_svalue *binop_sval
2092 = as_a <const binop_svalue *> (ptr_sval);
2093 switch (binop_sval->get_op ())
2094 {
2095 case POINTER_PLUS_EXPR:
2096 {
2097 /* If we have a symbolic value expressing pointer arithmentic,
2098 try to convert it to a suitable region. */
2099 const region *parent_region
2100 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
2101 const svalue *offset = binop_sval->get_arg1 ();
2102 tree type= TREE_TYPE (ptr_sval->get_type ());
2103 return m_mgr->get_offset_region (parent_region, type, offset);
2104 }
2105 default:
23ebfda0 2106 break;
808f4dfe
DM
2107 }
2108 }
23ebfda0 2109 break;
757bf1df
DM
2110
2111 case SK_POISONED:
2112 {
2113 if (ctxt)
808f4dfe
DM
2114 {
2115 tree ptr = get_representative_tree (ptr_sval);
2116 /* If we can't get a representative tree for PTR_SVAL
2117 (e.g. if it hasn't been bound into the store), then
2118 fall back on PTR_TREE, if non-NULL. */
2119 if (!ptr)
2120 ptr = ptr_tree;
2121 if (ptr)
2122 {
2123 const poisoned_svalue *poisoned_sval
2124 = as_a <const poisoned_svalue *> (ptr_sval);
2125 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
2126 ctxt->warn (new poisoned_value_diagnostic (ptr, pkind));
2127 }
2128 }
757bf1df 2129 }
23ebfda0 2130 break;
757bf1df
DM
2131 }
2132
23ebfda0 2133 return m_mgr->get_symbolic_region (ptr_sval);
757bf1df
DM
2134}
2135
d3b1ef7a
DM
2136/* Attempt to get BITS within any value of REG, as TYPE.
2137 In particular, extract values from compound_svalues for the case
2138 where there's a concrete binding at BITS.
9faf8348
DM
2139 Return an unknown svalue if we can't handle the given case.
2140 Use CTXT to report any warnings associated with reading from REG. */
d3b1ef7a
DM
2141
2142const svalue *
2143region_model::get_rvalue_for_bits (tree type,
2144 const region *reg,
9faf8348
DM
2145 const bit_range &bits,
2146 region_model_context *ctxt) const
d3b1ef7a 2147{
9faf8348 2148 const svalue *sval = get_store_value (reg, ctxt);
e61ffa20 2149 return m_mgr->get_or_create_bits_within (type, bits, sval);
d3b1ef7a
DM
2150}
2151
3175d40f
DM
2152/* A subclass of pending_diagnostic for complaining about writes to
2153 constant regions of memory. */
2154
2155class write_to_const_diagnostic
2156: public pending_diagnostic_subclass<write_to_const_diagnostic>
2157{
2158public:
2159 write_to_const_diagnostic (const region *reg, tree decl)
2160 : m_reg (reg), m_decl (decl)
2161 {}
2162
2163 const char *get_kind () const FINAL OVERRIDE
2164 {
2165 return "write_to_const_diagnostic";
2166 }
2167
2168 bool operator== (const write_to_const_diagnostic &other) const
2169 {
2170 return (m_reg == other.m_reg
2171 && m_decl == other.m_decl);
2172 }
2173
2174 bool emit (rich_location *rich_loc) FINAL OVERRIDE
2175 {
111fd515
DM
2176 auto_diagnostic_group d;
2177 bool warned;
2178 switch (m_reg->get_kind ())
2179 {
2180 default:
2181 warned = warning_at (rich_loc, OPT_Wanalyzer_write_to_const,
2182 "write to %<const%> object %qE", m_decl);
2183 break;
2184 case RK_FUNCTION:
2185 warned = warning_at (rich_loc, OPT_Wanalyzer_write_to_const,
2186 "write to function %qE", m_decl);
2187 break;
2188 case RK_LABEL:
2189 warned = warning_at (rich_loc, OPT_Wanalyzer_write_to_const,
2190 "write to label %qE", m_decl);
2191 break;
2192 }
3175d40f
DM
2193 if (warned)
2194 inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
2195 return warned;
2196 }
2197
2198 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
2199 {
111fd515
DM
2200 switch (m_reg->get_kind ())
2201 {
2202 default:
2203 return ev.formatted_print ("write to %<const%> object %qE here", m_decl);
2204 case RK_FUNCTION:
2205 return ev.formatted_print ("write to function %qE here", m_decl);
2206 case RK_LABEL:
2207 return ev.formatted_print ("write to label %qE here", m_decl);
2208 }
3175d40f
DM
2209 }
2210
2211private:
2212 const region *m_reg;
2213 tree m_decl;
2214};
2215
2216/* A subclass of pending_diagnostic for complaining about writes to
2217 string literals. */
2218
2219class write_to_string_literal_diagnostic
2220: public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
2221{
2222public:
2223 write_to_string_literal_diagnostic (const region *reg)
2224 : m_reg (reg)
2225 {}
2226
2227 const char *get_kind () const FINAL OVERRIDE
2228 {
2229 return "write_to_string_literal_diagnostic";
2230 }
2231
2232 bool operator== (const write_to_string_literal_diagnostic &other) const
2233 {
2234 return m_reg == other.m_reg;
2235 }
2236
2237 bool emit (rich_location *rich_loc) FINAL OVERRIDE
2238 {
2239 return warning_at (rich_loc, OPT_Wanalyzer_write_to_string_literal,
2240 "write to string literal");
2241 /* Ideally we would show the location of the STRING_CST as well,
2242 but it is not available at this point. */
2243 }
2244
2245 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
2246 {
2247 return ev.formatted_print ("write to string literal here");
2248 }
2249
2250private:
2251 const region *m_reg;
2252};
2253
2254/* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
2255
2256void
2257region_model::check_for_writable_region (const region* dest_reg,
2258 region_model_context *ctxt) const
2259{
2260 /* Fail gracefully if CTXT is NULL. */
2261 if (!ctxt)
2262 return;
2263
2264 const region *base_reg = dest_reg->get_base_region ();
2265 switch (base_reg->get_kind ())
2266 {
2267 default:
2268 break;
111fd515
DM
2269 case RK_FUNCTION:
2270 {
2271 const function_region *func_reg = as_a <const function_region *> (base_reg);
2272 tree fndecl = func_reg->get_fndecl ();
2273 ctxt->warn (new write_to_const_diagnostic (func_reg, fndecl));
2274 }
2275 break;
2276 case RK_LABEL:
2277 {
2278 const label_region *label_reg = as_a <const label_region *> (base_reg);
2279 tree label = label_reg->get_label ();
2280 ctxt->warn (new write_to_const_diagnostic (label_reg, label));
2281 }
2282 break;
3175d40f
DM
2283 case RK_DECL:
2284 {
2285 const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
2286 tree decl = decl_reg->get_decl ();
2287 /* Warn about writes to const globals.
2288 Don't warn for writes to const locals, and params in particular,
2289 since we would warn in push_frame when setting them up (e.g the
2290 "this" param is "T* const"). */
2291 if (TREE_READONLY (decl)
2292 && is_global_var (decl))
2293 ctxt->warn (new write_to_const_diagnostic (dest_reg, decl));
2294 }
2295 break;
2296 case RK_STRING:
2297 ctxt->warn (new write_to_string_literal_diagnostic (dest_reg));
2298 break;
2299 }
2300}
2301
9a2c9579
DM
2302/* Get the capacity of REG in bytes. */
2303
2304const svalue *
2305region_model::get_capacity (const region *reg) const
2306{
2307 switch (reg->get_kind ())
2308 {
2309 default:
2310 break;
2311 case RK_DECL:
2312 {
2313 const decl_region *decl_reg = as_a <const decl_region *> (reg);
2314 tree decl = decl_reg->get_decl ();
2315 if (TREE_CODE (decl) == SSA_NAME)
2316 {
2317 tree type = TREE_TYPE (decl);
2318 tree size = TYPE_SIZE (type);
2319 return get_rvalue (size, NULL);
2320 }
2321 else
2322 {
2323 tree size = decl_init_size (decl, false);
2324 if (size)
2325 return get_rvalue (size, NULL);
2326 }
2327 }
2328 break;
e61ffa20
DM
2329 case RK_SIZED:
2330 /* Look through sized regions to get at the capacity
2331 of the underlying regions. */
2332 return get_capacity (reg->get_parent_region ());
9a2c9579
DM
2333 }
2334
2335 if (const svalue *recorded = get_dynamic_extents (reg))
2336 return recorded;
2337
2338 return m_mgr->get_or_create_unknown_svalue (sizetype);
2339}
2340
9faf8348
DM
2341/* If CTXT is non-NULL, use it to warn about any problems accessing REG,
2342 using DIR to determine if this access is a read or write. */
2343
2344void
2345region_model::check_region_access (const region *reg,
2346 enum access_direction dir,
2347 region_model_context *ctxt) const
2348{
2349 /* Fail gracefully if CTXT is NULL. */
2350 if (!ctxt)
2351 return;
2352
b9365b93
DM
2353 check_region_for_taint (reg, dir, ctxt);
2354
9faf8348
DM
2355 switch (dir)
2356 {
2357 default:
2358 gcc_unreachable ();
2359 case DIR_READ:
2360 /* Currently a no-op. */
2361 break;
2362 case DIR_WRITE:
2363 check_for_writable_region (reg, ctxt);
2364 break;
2365 }
2366}
2367
2368/* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
2369
2370void
2371region_model::check_region_for_write (const region *dest_reg,
2372 region_model_context *ctxt) const
2373{
2374 check_region_access (dest_reg, DIR_WRITE, ctxt);
2375}
2376
2377/* If CTXT is non-NULL, use it to warn about any problems reading from REG. */
2378
2379void
2380region_model::check_region_for_read (const region *src_reg,
2381 region_model_context *ctxt) const
2382{
2383 check_region_access (src_reg, DIR_READ, ctxt);
2384}
2385
808f4dfe 2386/* Set the value of the region given by LHS_REG to the value given
9faf8348
DM
2387 by RHS_SVAL.
2388 Use CTXT to report any warnings associated with writing to LHS_REG. */
757bf1df 2389
808f4dfe
DM
2390void
2391region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
3175d40f 2392 region_model_context *ctxt)
757bf1df 2393{
808f4dfe
DM
2394 gcc_assert (lhs_reg);
2395 gcc_assert (rhs_sval);
2396
9faf8348 2397 check_region_for_write (lhs_reg, ctxt);
3175d40f 2398
808f4dfe 2399 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 2400 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
2401}
2402
808f4dfe 2403/* Set the value of the region given by LHS to the value given by RHS. */
757bf1df
DM
2404
2405void
808f4dfe 2406region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
757bf1df 2407{
808f4dfe
DM
2408 const region *lhs_reg = get_lvalue (lhs, ctxt);
2409 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
2410 gcc_assert (lhs_reg);
2411 gcc_assert (rhs_sval);
2412 set_value (lhs_reg, rhs_sval, ctxt);
757bf1df
DM
2413}
2414
808f4dfe 2415/* Remove all bindings overlapping REG within the store. */
884d9141
DM
2416
2417void
808f4dfe
DM
2418region_model::clobber_region (const region *reg)
2419{
2420 m_store.clobber_region (m_mgr->get_store_manager(), reg);
2421}
2422
2423/* Remove any bindings for REG within the store. */
2424
2425void
2426region_model::purge_region (const region *reg)
2427{
2428 m_store.purge_region (m_mgr->get_store_manager(), reg);
2429}
2430
e61ffa20
DM
2431/* Fill REG with SVAL. */
2432
2433void
2434region_model::fill_region (const region *reg, const svalue *sval)
2435{
2436 m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
2437}
2438
808f4dfe
DM
2439/* Zero-fill REG. */
2440
2441void
2442region_model::zero_fill_region (const region *reg)
2443{
2444 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
2445}
2446
2447/* Mark REG as having unknown content. */
2448
2449void
3a66c289
DM
2450region_model::mark_region_as_unknown (const region *reg,
2451 uncertainty_t *uncertainty)
884d9141 2452{
3a66c289
DM
2453 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
2454 uncertainty);
884d9141
DM
2455}
2456
808f4dfe 2457/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
757bf1df
DM
2458 this model. */
2459
2460tristate
808f4dfe
DM
2461region_model::eval_condition (const svalue *lhs,
2462 enum tree_code op,
2463 const svalue *rhs) const
757bf1df 2464{
e978955d
DM
2465 /* For now, make no attempt to capture constraints on floating-point
2466 values. */
2467 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
2468 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
2469 return tristate::unknown ();
2470
808f4dfe 2471 tristate ts = eval_condition_without_cm (lhs, op, rhs);
757bf1df
DM
2472 if (ts.is_known ())
2473 return ts;
2474
2475 /* Otherwise, try constraints. */
808f4dfe 2476 return m_constraints->eval_condition (lhs, op, rhs);
757bf1df
DM
2477}
2478
808f4dfe 2479/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
757bf1df
DM
2480 this model, without resorting to the constraint_manager.
2481
2482 This is exposed so that impl_region_model_context::on_state_leak can
2483 check for equality part-way through region_model::purge_unused_svalues
2484 without risking creating new ECs. */
2485
2486tristate
808f4dfe
DM
2487region_model::eval_condition_without_cm (const svalue *lhs,
2488 enum tree_code op,
2489 const svalue *rhs) const
757bf1df 2490{
757bf1df
DM
2491 gcc_assert (lhs);
2492 gcc_assert (rhs);
2493
2494 /* See what we know based on the values. */
808f4dfe
DM
2495
2496 /* For now, make no attempt to capture constraints on floating-point
2497 values. */
2498 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
2499 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
2500 return tristate::unknown ();
2501
2502 /* Unwrap any unmergeable values. */
2503 lhs = lhs->unwrap_any_unmergeable ();
2504 rhs = rhs->unwrap_any_unmergeable ();
2505
2506 if (lhs == rhs)
757bf1df 2507 {
808f4dfe
DM
2508 /* If we have the same svalue, then we have equality
2509 (apart from NaN-handling).
2510 TODO: should this definitely be the case for poisoned values? */
2511 /* Poisoned and unknown values are "unknowable". */
2512 if (lhs->get_kind () == SK_POISONED
2513 || lhs->get_kind () == SK_UNKNOWN)
2514 return tristate::TS_UNKNOWN;
e978955d 2515
808f4dfe 2516 switch (op)
757bf1df 2517 {
808f4dfe
DM
2518 case EQ_EXPR:
2519 case GE_EXPR:
2520 case LE_EXPR:
2521 return tristate::TS_TRUE;
07c86323 2522
808f4dfe
DM
2523 case NE_EXPR:
2524 case GT_EXPR:
2525 case LT_EXPR:
2526 return tristate::TS_FALSE;
2527
2528 default:
2529 /* For other ops, use the logic below. */
2530 break;
757bf1df 2531 }
808f4dfe 2532 }
757bf1df 2533
808f4dfe
DM
2534 /* If we have a pair of region_svalues, compare them. */
2535 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
2536 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
2537 {
2538 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
2539 if (res.is_known ())
2540 return res;
2541 /* Otherwise, only known through constraints. */
2542 }
757bf1df 2543
808f4dfe
DM
2544 /* If we have a pair of constants, compare them. */
2545 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
2546 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
2547 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
757bf1df 2548
e82e0f14
DM
2549 /* Handle comparison against zero. */
2550 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
2551 if (zerop (cst_rhs->get_constant ()))
2552 {
2553 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
2554 {
2555 /* A region_svalue is a non-NULL pointer, except in certain
2556 special cases (see the comment for region::non_null_p). */
2557 const region *pointee = ptr->get_pointee ();
2558 if (pointee->non_null_p ())
2559 {
2560 switch (op)
2561 {
2562 default:
2563 gcc_unreachable ();
2564
2565 case EQ_EXPR:
2566 case GE_EXPR:
2567 case LE_EXPR:
2568 return tristate::TS_FALSE;
2569
2570 case NE_EXPR:
2571 case GT_EXPR:
2572 case LT_EXPR:
2573 return tristate::TS_TRUE;
2574 }
2575 }
2576 }
2577 else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
2578 {
2579 /* Treat offsets from a non-NULL pointer as being non-NULL. This
2580 isn't strictly true, in that eventually ptr++ will wrap
2581 around and be NULL, but it won't occur in practise and thus
2582 can be used to suppress effectively false positives that we
2583 shouldn't warn for. */
2584 if (binop->get_op () == POINTER_PLUS_EXPR)
2585 {
2586 tristate lhs_ts
2587 = eval_condition_without_cm (binop->get_arg0 (),
2588 op, rhs);
2589 if (lhs_ts.is_known ())
2590 return lhs_ts;
2591 }
2592 }
2593 }
808f4dfe
DM
2594
2595 /* Handle rejection of equality for comparisons of the initial values of
2596 "external" values (such as params) with the address of locals. */
2597 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
2598 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
2599 {
2600 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
2601 if (res.is_known ())
2602 return res;
2603 }
2604 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
2605 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
2606 {
2607 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
2608 if (res.is_known ())
2609 return res;
2610 }
2611
2612 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
2613 if (tree rhs_cst = rhs->maybe_get_constant ())
2614 {
2615 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
2616 if (res.is_known ())
2617 return res;
2618 }
2619
2620 return tristate::TS_UNKNOWN;
2621}
2622
2623/* Subroutine of region_model::eval_condition_without_cm, for rejecting
2624 equality of INIT_VAL(PARM) with &LOCAL. */
2625
2626tristate
2627region_model::compare_initial_and_pointer (const initial_svalue *init,
2628 const region_svalue *ptr) const
2629{
2630 const region *pointee = ptr->get_pointee ();
2631
2632 /* If we have a pointer to something within a stack frame, it can't be the
2633 initial value of a param. */
2634 if (pointee->maybe_get_frame_region ())
e0139b2a
DM
2635 if (init->initial_value_of_param_p ())
2636 return tristate::TS_FALSE;
757bf1df
DM
2637
2638 return tristate::TS_UNKNOWN;
2639}
2640
48e8a7a6
DM
2641/* Handle various constraints of the form:
2642 LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
2643 OP : == or !=
2644 RHS: zero
2645 and (with a cast):
2646 LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
2647 OP : == or !=
2648 RHS: zero
2649 by adding constraints for INNER_LHS INNEROP INNER_RHS.
2650
2651 Return true if this function can fully handle the constraint; if
2652 so, add the implied constraint(s) and write true to *OUT if they
2653 are consistent with existing constraints, or write false to *OUT
2654 if they contradicts existing constraints.
2655
2656 Return false for cases that this function doeesn't know how to handle.
2657
2658 For example, if we're checking a stored conditional, we'll have
2659 something like:
2660 LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
2661 OP : NE_EXPR
2662 RHS: zero
2663 which this function can turn into an add_constraint of:
2664 (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
2665
2666 Similarly, optimized && and || conditionals lead to e.g.
2667 if (p && q)
2668 becoming gimple like this:
2669 _1 = p_6 == 0B;
2670 _2 = q_8 == 0B
2671 _3 = _1 | _2
2672 On the "_3 is false" branch we can have constraints of the form:
2673 ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
2674 | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
2675 == 0
2676 which implies that both _1 and _2 are false,
2677 which this function can turn into a pair of add_constraints of
2678 (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
2679 and:
2680 (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
2681
2682bool
2683region_model::add_constraints_from_binop (const svalue *outer_lhs,
2684 enum tree_code outer_op,
2685 const svalue *outer_rhs,
2686 bool *out,
2687 region_model_context *ctxt)
2688{
2689 while (const svalue *cast = outer_lhs->maybe_undo_cast ())
2690 outer_lhs = cast;
2691 const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
2692 if (!binop_sval)
2693 return false;
2694 if (!outer_rhs->all_zeroes_p ())
2695 return false;
2696
2697 const svalue *inner_lhs = binop_sval->get_arg0 ();
2698 enum tree_code inner_op = binop_sval->get_op ();
2699 const svalue *inner_rhs = binop_sval->get_arg1 ();
2700
2701 if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
2702 return false;
2703
2704 /* We have either
2705 - "OUTER_LHS != false" (i.e. OUTER is true), or
2706 - "OUTER_LHS == false" (i.e. OUTER is false). */
2707 bool is_true = outer_op == NE_EXPR;
2708
2709 switch (inner_op)
2710 {
2711 default:
2712 return false;
2713
2714 case EQ_EXPR:
2715 case NE_EXPR:
2716 {
2717 /* ...and "(inner_lhs OP inner_rhs) == 0"
2718 then (inner_lhs OP inner_rhs) must have the same
2719 logical value as LHS. */
2720 if (!is_true)
2721 inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
2722 *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
2723 return true;
2724 }
2725 break;
2726
2727 case BIT_AND_EXPR:
2728 if (is_true)
2729 {
2730 /* ...and "(inner_lhs & inner_rhs) != 0"
2731 then both inner_lhs and inner_rhs must be true. */
2732 const svalue *false_sval
2733 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
2734 bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
2735 bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
2736 *out = sat1 && sat2;
2737 return true;
2738 }
2739 return false;
2740
2741 case BIT_IOR_EXPR:
2742 if (!is_true)
2743 {
2744 /* ...and "(inner_lhs | inner_rhs) == 0"
2745 i.e. "(inner_lhs | inner_rhs)" is false
2746 then both inner_lhs and inner_rhs must be false. */
2747 const svalue *false_sval
2748 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
2749 bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
2750 bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
2751 *out = sat1 && sat2;
2752 return true;
2753 }
2754 return false;
2755 }
2756}
2757
757bf1df
DM
2758/* Attempt to add the constraint "LHS OP RHS" to this region_model.
2759 If it is consistent with existing constraints, add it, and return true.
2760 Return false if it contradicts existing constraints.
2761 Use CTXT for reporting any diagnostics associated with the accesses. */
2762
2763bool
2764region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
2765 region_model_context *ctxt)
2766{
e978955d
DM
2767 /* For now, make no attempt to capture constraints on floating-point
2768 values. */
2769 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
2770 return true;
2771
808f4dfe
DM
2772 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
2773 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
757bf1df 2774
48e8a7a6
DM
2775 return add_constraint (lhs_sval, op, rhs_sval, ctxt);
2776}
2777
2778/* Attempt to add the constraint "LHS OP RHS" to this region_model.
2779 If it is consistent with existing constraints, add it, and return true.
2780 Return false if it contradicts existing constraints.
2781 Use CTXT for reporting any diagnostics associated with the accesses. */
2782
2783bool
2784region_model::add_constraint (const svalue *lhs,
2785 enum tree_code op,
2786 const svalue *rhs,
2787 region_model_context *ctxt)
2788{
2789 tristate t_cond = eval_condition (lhs, op, rhs);
757bf1df
DM
2790
2791 /* If we already have the condition, do nothing. */
2792 if (t_cond.is_true ())
2793 return true;
2794
2795 /* Reject a constraint that would contradict existing knowledge, as
2796 unsatisfiable. */
2797 if (t_cond.is_false ())
2798 return false;
2799
48e8a7a6
DM
2800 bool out;
2801 if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
2802 return out;
757bf1df 2803
48e8a7a6
DM
2804 /* Store the constraint. */
2805 m_constraints->add_constraint (lhs, op, rhs);
757bf1df
DM
2806
2807 /* Notify the context, if any. This exists so that the state machines
2808 in a program_state can be notified about the condition, and so can
2809 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
2810 when synthesizing constraints as above. */
2811 if (ctxt)
2812 ctxt->on_condition (lhs, op, rhs);
2813
9a2c9579
DM
2814 /* If we have &REGION == NULL, then drop dynamic extents for REGION (for
2815 the case where REGION is heap-allocated and thus could be NULL). */
48e8a7a6
DM
2816 if (tree rhs_cst = rhs->maybe_get_constant ())
2817 if (op == EQ_EXPR && zerop (rhs_cst))
2818 if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
2819 unset_dynamic_extents (region_sval->get_pointee ());
9a2c9579 2820
757bf1df
DM
2821 return true;
2822}
2823
84fb3546
DM
2824/* As above, but when returning false, if OUT is non-NULL, write a
2825 new rejected_constraint to *OUT. */
2826
2827bool
2828region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
2829 region_model_context *ctxt,
2830 rejected_constraint **out)
2831{
2832 bool sat = add_constraint (lhs, op, rhs, ctxt);
2833 if (!sat && out)
8ca7fa84 2834 *out = new rejected_op_constraint (*this, lhs, op, rhs);
84fb3546
DM
2835 return sat;
2836}
2837
757bf1df
DM
2838/* Determine what is known about the condition "LHS OP RHS" within
2839 this model.
2840 Use CTXT for reporting any diagnostics associated with the accesses. */
2841
2842tristate
2843region_model::eval_condition (tree lhs,
2844 enum tree_code op,
2845 tree rhs,
2846 region_model_context *ctxt)
2847{
e978955d
DM
2848 /* For now, make no attempt to model constraints on floating-point
2849 values. */
2850 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
2851 return tristate::unknown ();
2852
757bf1df
DM
2853 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
2854}
2855
467a4820
DM
2856/* Implementation of region_model::get_representative_path_var.
2857 Attempt to return a path_var that represents SVAL, or return NULL_TREE.
808f4dfe
DM
2858 Use VISITED to prevent infinite mutual recursion with the overload for
2859 regions. */
757bf1df 2860
808f4dfe 2861path_var
467a4820
DM
2862region_model::get_representative_path_var_1 (const svalue *sval,
2863 svalue_set *visited) const
757bf1df 2864{
467a4820 2865 gcc_assert (sval);
757bf1df 2866
808f4dfe
DM
2867 /* Prevent infinite recursion. */
2868 if (visited->contains (sval))
2869 return path_var (NULL_TREE, 0);
2870 visited->add (sval);
757bf1df 2871
467a4820
DM
2872 /* Handle casts by recursion into get_representative_path_var. */
2873 if (const svalue *cast_sval = sval->maybe_undo_cast ())
2874 {
2875 path_var result = get_representative_path_var (cast_sval, visited);
2876 tree orig_type = sval->get_type ();
2877 /* If necessary, wrap the result in a cast. */
2878 if (result.m_tree && orig_type)
2879 result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
2880 return result;
2881 }
2882
808f4dfe
DM
2883 auto_vec<path_var> pvs;
2884 m_store.get_representative_path_vars (this, visited, sval, &pvs);
757bf1df 2885
808f4dfe
DM
2886 if (tree cst = sval->maybe_get_constant ())
2887 pvs.safe_push (path_var (cst, 0));
757bf1df 2888
90f7c300 2889 /* Handle string literals and various other pointers. */
808f4dfe
DM
2890 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
2891 {
2892 const region *reg = ptr_sval->get_pointee ();
2893 if (path_var pv = get_representative_path_var (reg, visited))
2894 return path_var (build1 (ADDR_EXPR,
467a4820 2895 sval->get_type (),
808f4dfe
DM
2896 pv.m_tree),
2897 pv.m_stack_depth);
2898 }
2899
2900 /* If we have a sub_svalue, look for ways to represent the parent. */
2901 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
90f7c300 2902 {
808f4dfe
DM
2903 const svalue *parent_sval = sub_sval->get_parent ();
2904 const region *subreg = sub_sval->get_subregion ();
2905 if (path_var parent_pv
2906 = get_representative_path_var (parent_sval, visited))
2907 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
2908 return path_var (build3 (COMPONENT_REF,
2909 sval->get_type (),
2910 parent_pv.m_tree,
2911 field_reg->get_field (),
2912 NULL_TREE),
2913 parent_pv.m_stack_depth);
90f7c300
DM
2914 }
2915
b9365b93
DM
2916 /* Handle binops. */
2917 if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
2918 if (path_var lhs_pv
2919 = get_representative_path_var (binop_sval->get_arg0 (), visited))
2920 if (path_var rhs_pv
2921 = get_representative_path_var (binop_sval->get_arg1 (), visited))
2922 return path_var (build2 (binop_sval->get_op (),
2923 sval->get_type (),
2924 lhs_pv.m_tree, rhs_pv.m_tree),
2925 lhs_pv.m_stack_depth);
2926
808f4dfe
DM
2927 if (pvs.length () < 1)
2928 return path_var (NULL_TREE, 0);
2929
2930 pvs.qsort (readability_comparator);
2931 return pvs[0];
757bf1df
DM
2932}
2933
467a4820
DM
2934/* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
2935 Use VISITED to prevent infinite mutual recursion with the overload for
2936 regions
2937
2938 This function defers to get_representative_path_var_1 to do the work;
2939 it adds verification that get_representative_path_var_1 returned a tree
2940 of the correct type. */
2941
2942path_var
2943region_model::get_representative_path_var (const svalue *sval,
2944 svalue_set *visited) const
2945{
2946 if (sval == NULL)
2947 return path_var (NULL_TREE, 0);
2948
2949 tree orig_type = sval->get_type ();
2950
2951 path_var result = get_representative_path_var_1 (sval, visited);
2952
2953 /* Verify that the result has the same type as SVAL, if any. */
2954 if (result.m_tree && orig_type)
2955 gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
2956
2957 return result;
2958}
2959
2960/* Attempt to return a tree that represents SVAL, or return NULL_TREE.
2961
2962 Strip off any top-level cast, to avoid messages like
2963 double-free of '(void *)ptr'
2964 from analyzer diagnostics. */
757bf1df 2965
808f4dfe
DM
2966tree
2967region_model::get_representative_tree (const svalue *sval) const
757bf1df 2968{
808f4dfe 2969 svalue_set visited;
467a4820
DM
2970 tree expr = get_representative_path_var (sval, &visited).m_tree;
2971
2972 /* Strip off any top-level cast. */
2973 if (expr && TREE_CODE (expr) == NOP_EXPR)
e4bb1bd6 2974 expr = TREE_OPERAND (expr, 0);
467a4820 2975
e4bb1bd6 2976 return fixup_tree_for_diagnostic (expr);
808f4dfe
DM
2977}
2978
467a4820
DM
2979/* Implementation of region_model::get_representative_path_var.
2980
2981 Attempt to return a path_var that represents REG, or return
808f4dfe
DM
2982 the NULL path_var.
2983 For example, a region for a field of a local would be a path_var
2984 wrapping a COMPONENT_REF.
2985 Use VISITED to prevent infinite mutual recursion with the overload for
2986 svalues. */
757bf1df 2987
808f4dfe 2988path_var
467a4820
DM
2989region_model::get_representative_path_var_1 (const region *reg,
2990 svalue_set *visited) const
808f4dfe
DM
2991{
2992 switch (reg->get_kind ())
757bf1df 2993 {
808f4dfe
DM
2994 default:
2995 gcc_unreachable ();
e516294a 2996
808f4dfe
DM
2997 case RK_FRAME:
2998 case RK_GLOBALS:
2999 case RK_CODE:
3000 case RK_HEAP:
3001 case RK_STACK:
3002 case RK_ROOT:
3003 /* Regions that represent memory spaces are not expressible as trees. */
3004 return path_var (NULL_TREE, 0);
757bf1df 3005
808f4dfe 3006 case RK_FUNCTION:
884d9141 3007 {
808f4dfe
DM
3008 const function_region *function_reg
3009 = as_a <const function_region *> (reg);
3010 return path_var (function_reg->get_fndecl (), 0);
884d9141 3011 }
808f4dfe 3012 case RK_LABEL:
9e78634c
DM
3013 {
3014 const label_region *label_reg = as_a <const label_region *> (reg);
3015 return path_var (label_reg->get_label (), 0);
3016 }
90f7c300 3017
808f4dfe
DM
3018 case RK_SYMBOLIC:
3019 {
3020 const symbolic_region *symbolic_reg
3021 = as_a <const symbolic_region *> (reg);
3022 const svalue *pointer = symbolic_reg->get_pointer ();
3023 path_var pointer_pv = get_representative_path_var (pointer, visited);
3024 if (!pointer_pv)
3025 return path_var (NULL_TREE, 0);
3026 tree offset = build_int_cst (pointer->get_type (), 0);
3027 return path_var (build2 (MEM_REF,
3028 reg->get_type (),
3029 pointer_pv.m_tree,
3030 offset),
3031 pointer_pv.m_stack_depth);
3032 }
3033 case RK_DECL:
3034 {
3035 const decl_region *decl_reg = as_a <const decl_region *> (reg);
3036 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
3037 }
3038 case RK_FIELD:
3039 {
3040 const field_region *field_reg = as_a <const field_region *> (reg);
3041 path_var parent_pv
3042 = get_representative_path_var (reg->get_parent_region (), visited);
3043 if (!parent_pv)
3044 return path_var (NULL_TREE, 0);
3045 return path_var (build3 (COMPONENT_REF,
3046 reg->get_type (),
3047 parent_pv.m_tree,
3048 field_reg->get_field (),
3049 NULL_TREE),
3050 parent_pv.m_stack_depth);
3051 }
757bf1df 3052
808f4dfe
DM
3053 case RK_ELEMENT:
3054 {
3055 const element_region *element_reg
3056 = as_a <const element_region *> (reg);
3057 path_var parent_pv
3058 = get_representative_path_var (reg->get_parent_region (), visited);
3059 if (!parent_pv)
3060 return path_var (NULL_TREE, 0);
3061 path_var index_pv
3062 = get_representative_path_var (element_reg->get_index (), visited);
3063 if (!index_pv)
3064 return path_var (NULL_TREE, 0);
3065 return path_var (build4 (ARRAY_REF,
3066 reg->get_type (),
3067 parent_pv.m_tree, index_pv.m_tree,
3068 NULL_TREE, NULL_TREE),
3069 parent_pv.m_stack_depth);
3070 }
757bf1df 3071
808f4dfe 3072 case RK_OFFSET:
757bf1df 3073 {
808f4dfe
DM
3074 const offset_region *offset_reg
3075 = as_a <const offset_region *> (reg);
3076 path_var parent_pv
3077 = get_representative_path_var (reg->get_parent_region (), visited);
3078 if (!parent_pv)
3079 return path_var (NULL_TREE, 0);
3080 path_var offset_pv
3081 = get_representative_path_var (offset_reg->get_byte_offset (),
3082 visited);
29f5db8e 3083 if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
808f4dfe 3084 return path_var (NULL_TREE, 0);
29f5db8e
DM
3085 tree addr_parent = build1 (ADDR_EXPR,
3086 build_pointer_type (reg->get_type ()),
3087 parent_pv.m_tree);
808f4dfe
DM
3088 return path_var (build2 (MEM_REF,
3089 reg->get_type (),
29f5db8e 3090 addr_parent, offset_pv.m_tree),
808f4dfe 3091 parent_pv.m_stack_depth);
757bf1df 3092 }
757bf1df 3093
e61ffa20
DM
3094 case RK_SIZED:
3095 return path_var (NULL_TREE, 0);
3096
808f4dfe
DM
3097 case RK_CAST:
3098 {
3099 path_var parent_pv
3100 = get_representative_path_var (reg->get_parent_region (), visited);
3101 if (!parent_pv)
3102 return path_var (NULL_TREE, 0);
3103 return path_var (build1 (NOP_EXPR,
3104 reg->get_type (),
3105 parent_pv.m_tree),
3106 parent_pv.m_stack_depth);
3107 }
757bf1df 3108
808f4dfe
DM
3109 case RK_HEAP_ALLOCATED:
3110 case RK_ALLOCA:
3111 /* No good way to express heap-allocated/alloca regions as trees. */
3112 return path_var (NULL_TREE, 0);
757bf1df 3113
808f4dfe
DM
3114 case RK_STRING:
3115 {
3116 const string_region *string_reg = as_a <const string_region *> (reg);
3117 return path_var (string_reg->get_string_cst (), 0);
3118 }
757bf1df 3119
808f4dfe
DM
3120 case RK_UNKNOWN:
3121 return path_var (NULL_TREE, 0);
3122 }
757bf1df
DM
3123}
3124
467a4820
DM
3125/* Attempt to return a path_var that represents REG, or return
3126 the NULL path_var.
3127 For example, a region for a field of a local would be a path_var
3128 wrapping a COMPONENT_REF.
3129 Use VISITED to prevent infinite mutual recursion with the overload for
3130 svalues.
3131
3132 This function defers to get_representative_path_var_1 to do the work;
3133 it adds verification that get_representative_path_var_1 returned a tree
3134 of the correct type. */
3135
3136path_var
3137region_model::get_representative_path_var (const region *reg,
3138 svalue_set *visited) const
3139{
3140 path_var result = get_representative_path_var_1 (reg, visited);
3141
3142 /* Verify that the result has the same type as REG, if any. */
3143 if (result.m_tree && reg->get_type ())
3144 gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
3145
3146 return result;
3147}
3148
757bf1df
DM
3149/* Update this model for any phis in SNODE, assuming we came from
3150 LAST_CFG_SUPEREDGE. */
3151
3152void
3153region_model::update_for_phis (const supernode *snode,
3154 const cfg_superedge *last_cfg_superedge,
3155 region_model_context *ctxt)
3156{
3157 gcc_assert (last_cfg_superedge);
3158
e0a7a675
DM
3159 /* Copy this state and pass it to handle_phi so that all of the phi stmts
3160 are effectively handled simultaneously. */
3161 const region_model old_state (*this);
3162
757bf1df
DM
3163 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
3164 !gsi_end_p (gpi); gsi_next (&gpi))
3165 {
3166 gphi *phi = gpi.phi ();
3167
3168 tree src = last_cfg_superedge->get_phi_arg (phi);
3169 tree lhs = gimple_phi_result (phi);
3170
e0a7a675
DM
3171 /* Update next_state based on phi and old_state. */
3172 handle_phi (phi, lhs, src, old_state, ctxt);
757bf1df
DM
3173 }
3174}
3175
3176/* Attempt to update this model for taking EDGE (where the last statement
3177 was LAST_STMT), returning true if the edge can be taken, false
3178 otherwise.
84fb3546
DM
3179 When returning false, if OUT is non-NULL, write a new rejected_constraint
3180 to it.
757bf1df
DM
3181
3182 For CFG superedges where LAST_STMT is a conditional or a switch
3183 statement, attempt to add the relevant conditions for EDGE to this
3184 model, returning true if they are feasible, or false if they are
3185 impossible.
3186
3187 For call superedges, push frame information and store arguments
3188 into parameters.
3189
3190 For return superedges, pop frame information and store return
3191 values into any lhs.
3192
3193 Rejection of call/return superedges happens elsewhere, in
3194 program_point::on_edge (i.e. based on program point, rather
3195 than program state). */
3196
3197bool
3198region_model::maybe_update_for_edge (const superedge &edge,
3199 const gimple *last_stmt,
84fb3546
DM
3200 region_model_context *ctxt,
3201 rejected_constraint **out)
757bf1df
DM
3202{
3203 /* Handle frame updates for interprocedural edges. */
3204 switch (edge.m_kind)
3205 {
3206 default:
3207 break;
3208
3209 case SUPEREDGE_CALL:
3210 {
3211 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
3212 update_for_call_superedge (*call_edge, ctxt);
3213 }
3214 break;
3215
3216 case SUPEREDGE_RETURN:
3217 {
3218 const return_superedge *return_edge
3219 = as_a <const return_superedge *> (&edge);
3220 update_for_return_superedge (*return_edge, ctxt);
3221 }
3222 break;
3223
3224 case SUPEREDGE_INTRAPROCEDURAL_CALL:
3225 {
3226 const callgraph_superedge *cg_sedge
3227 = as_a <const callgraph_superedge *> (&edge);
3228 update_for_call_summary (*cg_sedge, ctxt);
3229 }
3230 break;
3231 }
3232
3233 if (last_stmt == NULL)
3234 return true;
3235
3236 /* Apply any constraints for conditionals/switch statements. */
3237
3238 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
3239 {
3240 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
84fb3546 3241 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out);
757bf1df
DM
3242 }
3243
3244 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
3245 {
3246 const switch_cfg_superedge *switch_sedge
3247 = as_a <const switch_cfg_superedge *> (&edge);
84fb3546
DM
3248 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt,
3249 ctxt, out);
757bf1df
DM
3250 }
3251
1690a839
DM
3252 /* Apply any constraints due to an exception being thrown. */
3253 if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge))
3254 if (cfg_sedge->get_flags () & EDGE_EH)
84fb3546 3255 return apply_constraints_for_exception (last_stmt, ctxt, out);
1690a839 3256
757bf1df
DM
3257 return true;
3258}
3259
3260/* Push a new frame_region on to the stack region.
3261 Populate the frame_region with child regions for the function call's
3262 parameters, using values from the arguments at the callsite in the
3263 caller's frame. */
3264
3265void
aef703cf 3266region_model::update_for_gcall (const gcall *call_stmt,
e92d0ff6
AS
3267 region_model_context *ctxt,
3268 function *callee)
757bf1df 3269{
808f4dfe 3270 /* Build a vec of argument svalues, using the current top
757bf1df 3271 frame for resolving tree expressions. */
808f4dfe 3272 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
757bf1df
DM
3273
3274 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
3275 {
3276 tree arg = gimple_call_arg (call_stmt, i);
808f4dfe 3277 arg_svals.quick_push (get_rvalue (arg, ctxt));
757bf1df
DM
3278 }
3279
e92d0ff6
AS
3280 if(!callee)
3281 {
3282 /* Get the function * from the gcall. */
3283 tree fn_decl = get_fndecl_for_call (call_stmt,ctxt);
3284 callee = DECL_STRUCT_FUNCTION (fn_decl);
3285 }
3286
3287 push_frame (callee, &arg_svals, ctxt);
757bf1df
DM
3288}
3289
a96f1c38
DM
3290/* Pop the top-most frame_region from the stack, and copy the return
3291 region's values (if any) into the region for the lvalue of the LHS of
757bf1df 3292 the call (if any). */
aef703cf 3293
757bf1df 3294void
aef703cf
AS
3295region_model::update_for_return_gcall (const gcall *call_stmt,
3296 region_model_context *ctxt)
757bf1df 3297{
a96f1c38 3298 /* Get the region for the result of the call, within the caller frame. */
808f4dfe 3299 const region *result_dst_reg = NULL;
757bf1df
DM
3300 tree lhs = gimple_call_lhs (call_stmt);
3301 if (lhs)
a96f1c38
DM
3302 {
3303 /* Normally we access the top-level frame, which is:
aef703cf
AS
3304 path_var (expr, get_stack_depth () - 1)
3305 whereas here we need the caller frame, hence "- 2" here. */
808f4dfe
DM
3306 gcc_assert (get_stack_depth () >= 2);
3307 result_dst_reg = get_lvalue (path_var (lhs, get_stack_depth () - 2),
aef703cf 3308 ctxt);
a96f1c38
DM
3309 }
3310
808f4dfe 3311 pop_frame (result_dst_reg, NULL, ctxt);
757bf1df
DM
3312}
3313
aef703cf
AS
3314/* Extract calling information from the superedge and update the model for the
3315 call */
3316
3317void
3318region_model::update_for_call_superedge (const call_superedge &call_edge,
3319 region_model_context *ctxt)
3320{
3321 const gcall *call_stmt = call_edge.get_call_stmt ();
e92d0ff6 3322 update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ());
aef703cf
AS
3323}
3324
3325/* Extract calling information from the return superedge and update the model
3326 for the returning call */
3327
3328void
3329region_model::update_for_return_superedge (const return_superedge &return_edge,
3330 region_model_context *ctxt)
3331{
3332 const gcall *call_stmt = return_edge.get_call_stmt ();
3333 update_for_return_gcall (call_stmt, ctxt);
3334}
3335
757bf1df
DM
3336/* Update this region_model with a summary of the effect of calling
3337 and returning from CG_SEDGE.
3338
3339 TODO: Currently this is extremely simplistic: we merely set the
3340 return value to "unknown". A proper implementation would e.g. update
3341 sm-state, and presumably be reworked to support multiple outcomes. */
3342
3343void
3344region_model::update_for_call_summary (const callgraph_superedge &cg_sedge,
3345 region_model_context *ctxt)
3346{
3347 /* For now, set any return value to "unknown". */
3348 const gcall *call_stmt = cg_sedge.get_call_stmt ();
3349 tree lhs = gimple_call_lhs (call_stmt);
3350 if (lhs)
3a66c289
DM
3351 mark_region_as_unknown (get_lvalue (lhs, ctxt),
3352 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
3353
3354 // TODO: actually implement some kind of summary here
3355}
3356
3357/* Given a true or false edge guarded by conditional statement COND_STMT,
3358 determine appropriate constraints for the edge to be taken.
3359
3360 If they are feasible, add the constraints and return true.
3361
3362 Return false if the constraints contradict existing knowledge
84fb3546
DM
3363 (and so the edge should not be taken).
3364 When returning false, if OUT is non-NULL, write a new rejected_constraint
3365 to it. */
757bf1df
DM
3366
3367bool
3368region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
3369 const gcond *cond_stmt,
84fb3546
DM
3370 region_model_context *ctxt,
3371 rejected_constraint **out)
757bf1df
DM
3372{
3373 ::edge cfg_edge = sedge.get_cfg_edge ();
3374 gcc_assert (cfg_edge != NULL);
3375 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
3376
3377 enum tree_code op = gimple_cond_code (cond_stmt);
3378 tree lhs = gimple_cond_lhs (cond_stmt);
3379 tree rhs = gimple_cond_rhs (cond_stmt);
3380 if (cfg_edge->flags & EDGE_FALSE_VALUE)
3381 op = invert_tree_comparison (op, false /* honor_nans */);
84fb3546 3382 return add_constraint (lhs, op, rhs, ctxt, out);
757bf1df
DM
3383}
3384
3385/* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
3386 for the edge to be taken.
3387
3388 If they are feasible, add the constraints and return true.
3389
3390 Return false if the constraints contradict existing knowledge
84fb3546
DM
3391 (and so the edge should not be taken).
3392 When returning false, if OUT is non-NULL, write a new rejected_constraint
3393 to it. */
757bf1df
DM
3394
3395bool
3396region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
3397 const gswitch *switch_stmt,
84fb3546
DM
3398 region_model_context *ctxt,
3399 rejected_constraint **out)
757bf1df 3400{
8ca7fa84
DM
3401 bounded_ranges_manager *ranges_mgr = get_range_manager ();
3402 const bounded_ranges *all_cases_ranges
3403 = ranges_mgr->get_or_create_ranges_for_switch (&edge, switch_stmt);
757bf1df 3404 tree index = gimple_switch_index (switch_stmt);
8ca7fa84
DM
3405 const svalue *index_sval = get_rvalue (index, ctxt);
3406 bool sat = m_constraints->add_bounded_ranges (index_sval, all_cases_ranges);
3407 if (!sat && out)
3408 *out = new rejected_ranges_constraint (*this, index, all_cases_ranges);
3409 return sat;
757bf1df
DM
3410}
3411
1690a839
DM
3412/* Apply any constraints due to an exception being thrown at LAST_STMT.
3413
3414 If they are feasible, add the constraints and return true.
3415
3416 Return false if the constraints contradict existing knowledge
84fb3546
DM
3417 (and so the edge should not be taken).
3418 When returning false, if OUT is non-NULL, write a new rejected_constraint
3419 to it. */
1690a839
DM
3420
3421bool
3422region_model::apply_constraints_for_exception (const gimple *last_stmt,
84fb3546
DM
3423 region_model_context *ctxt,
3424 rejected_constraint **out)
1690a839
DM
3425{
3426 gcc_assert (last_stmt);
3427 if (const gcall *call = dyn_cast <const gcall *> (last_stmt))
3428 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
3429 if (is_named_call_p (callee_fndecl, "operator new", call, 1)
3430 || is_named_call_p (callee_fndecl, "operator new []", call, 1))
3431 {
3432 /* We have an exception thrown from operator new.
3433 Add a constraint that the result was NULL, to avoid a false
3434 leak report due to the result being lost when following
3435 the EH edge. */
3436 if (tree lhs = gimple_call_lhs (call))
84fb3546 3437 return add_constraint (lhs, EQ_EXPR, null_pointer_node, ctxt, out);
1690a839
DM
3438 return true;
3439 }
3440 return true;
3441}
3442
808f4dfe
DM
3443/* For use with push_frame when handling a top-level call within the analysis.
3444 PARAM has a defined but unknown initial value.
3445 Anything it points to has escaped, since the calling context "knows"
3446 the pointer, and thus calls to unknown functions could read/write into
3447 the region. */
757bf1df
DM
3448
3449void
808f4dfe 3450region_model::on_top_level_param (tree param,
3a25f345 3451 region_model_context *ctxt)
757bf1df 3452{
808f4dfe 3453 if (POINTER_TYPE_P (TREE_TYPE (param)))
5eae0ac7 3454 {
808f4dfe
DM
3455 const region *param_reg = get_lvalue (param, ctxt);
3456 const svalue *init_ptr_sval
3457 = m_mgr->get_or_create_initial_value (param_reg);
3458 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
3459 m_store.mark_as_escaped (pointee_reg);
5eae0ac7 3460 }
757bf1df
DM
3461}
3462
808f4dfe
DM
3463/* Update this region_model to reflect pushing a frame onto the stack
3464 for a call to FUN.
757bf1df 3465
808f4dfe
DM
3466 If ARG_SVALS is non-NULL, use it to populate the parameters
3467 in the new frame.
3468 Otherwise, the params have their initial_svalues.
757bf1df 3469
808f4dfe 3470 Return the frame_region for the new frame. */
757bf1df 3471
808f4dfe
DM
3472const region *
3473region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
3474 region_model_context *ctxt)
757bf1df 3475{
808f4dfe
DM
3476 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
3477 if (arg_svals)
757bf1df 3478 {
808f4dfe
DM
3479 /* Arguments supplied from a caller frame. */
3480 tree fndecl = fun->decl;
3481 unsigned idx = 0;
3482 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
3483 iter_parm = DECL_CHAIN (iter_parm), ++idx)
757bf1df 3484 {
808f4dfe
DM
3485 /* If there's a mismatching declaration, the call stmt might
3486 not have enough args. Handle this case by leaving the
3487 rest of the params as uninitialized. */
3488 if (idx >= arg_svals->length ())
3489 break;
294b6da2
DM
3490 tree parm_lval = iter_parm;
3491 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
3492 parm_lval = parm_default_ssa;
3493 const region *parm_reg = get_lvalue (parm_lval, ctxt);
808f4dfe 3494 const svalue *arg_sval = (*arg_svals)[idx];
808f4dfe 3495 set_value (parm_reg, arg_sval, ctxt);
757bf1df 3496 }
757bf1df 3497 }
808f4dfe 3498 else
757bf1df 3499 {
808f4dfe
DM
3500 /* Otherwise we have a top-level call within the analysis. The params
3501 have defined but unknown initial values.
3502 Anything they point to has escaped. */
3503 tree fndecl = fun->decl;
3504 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
3505 iter_parm = DECL_CHAIN (iter_parm))
757bf1df 3506 {
294b6da2 3507 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
808f4dfe 3508 on_top_level_param (parm_default_ssa, ctxt);
294b6da2
DM
3509 else
3510 on_top_level_param (iter_parm, ctxt);
757bf1df
DM
3511 }
3512 }
757bf1df 3513
808f4dfe 3514 return m_current_frame;
757bf1df
DM
3515}
3516
808f4dfe
DM
3517/* Get the function of the top-most frame in this region_model's stack.
3518 There must be such a frame. */
757bf1df 3519
808f4dfe
DM
3520function *
3521region_model::get_current_function () const
757bf1df 3522{
808f4dfe
DM
3523 const frame_region *frame = get_current_frame ();
3524 gcc_assert (frame);
3525 return frame->get_function ();
757bf1df
DM
3526}
3527
808f4dfe 3528/* Pop the topmost frame_region from this region_model's stack;
757bf1df 3529
808f4dfe
DM
3530 If RESULT_DST_REG is non-null, copy any return value from the frame
3531 into RESULT_DST_REG's region.
3532 If OUT_RESULT is non-null, copy any return value from the frame
3533 into *OUT_RESULT.
757bf1df 3534
808f4dfe
DM
3535 Purge the frame region and all its descendent regions.
3536 Convert any pointers that point into such regions into
3537 POISON_KIND_POPPED_STACK svalues. */
757bf1df 3538
808f4dfe
DM
3539void
3540region_model::pop_frame (const region *result_dst_reg,
3541 const svalue **out_result,
3542 region_model_context *ctxt)
3543{
3544 gcc_assert (m_current_frame);
757bf1df 3545
808f4dfe
DM
3546 /* Evaluate the result, within the callee frame. */
3547 const frame_region *frame_reg = m_current_frame;
3548 tree fndecl = m_current_frame->get_function ()->decl;
3549 tree result = DECL_RESULT (fndecl);
3550 if (result && TREE_TYPE (result) != void_type_node)
3551 {
3552 if (result_dst_reg)
3553 {
3554 /* Copy the result to RESULT_DST_REG. */
3555 copy_region (result_dst_reg,
3556 get_lvalue (result, ctxt),
3557 ctxt);
3558 }
3559 if (out_result)
3560 *out_result = get_rvalue (result, ctxt);
3561 }
757bf1df 3562
808f4dfe
DM
3563 /* Pop the frame. */
3564 m_current_frame = m_current_frame->get_calling_frame ();
757bf1df 3565
808f4dfe 3566 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
757bf1df
DM
3567}
3568
808f4dfe 3569/* Get the number of frames in this region_model's stack. */
757bf1df 3570
808f4dfe
DM
3571int
3572region_model::get_stack_depth () const
757bf1df 3573{
808f4dfe
DM
3574 const frame_region *frame = get_current_frame ();
3575 if (frame)
3576 return frame->get_stack_depth ();
3577 else
3578 return 0;
757bf1df
DM
3579}
3580
808f4dfe
DM
3581/* Get the frame_region with the given index within the stack.
3582 The frame_region must exist. */
757bf1df 3583
808f4dfe
DM
3584const frame_region *
3585region_model::get_frame_at_index (int index) const
757bf1df 3586{
808f4dfe
DM
3587 const frame_region *frame = get_current_frame ();
3588 gcc_assert (frame);
3589 gcc_assert (index >= 0);
3590 gcc_assert (index <= frame->get_index ());
3591 while (index != frame->get_index ())
3592 {
3593 frame = frame->get_calling_frame ();
3594 gcc_assert (frame);
3595 }
3596 return frame;
757bf1df
DM
3597}
3598
808f4dfe
DM
3599/* Unbind svalues for any regions in REG and below.
3600 Find any pointers to such regions; convert them to
9a2c9579
DM
3601 poisoned values of kind PKIND.
3602 Also purge any dynamic extents. */
757bf1df 3603
808f4dfe
DM
3604void
3605region_model::unbind_region_and_descendents (const region *reg,
3606 enum poison_kind pkind)
757bf1df 3607{
808f4dfe
DM
3608 /* Gather a set of base regions to be unbound. */
3609 hash_set<const region *> base_regs;
3610 for (store::cluster_map_t::iterator iter = m_store.begin ();
3611 iter != m_store.end (); ++iter)
757bf1df 3612 {
808f4dfe
DM
3613 const region *iter_base_reg = (*iter).first;
3614 if (iter_base_reg->descendent_of_p (reg))
3615 base_regs.add (iter_base_reg);
757bf1df 3616 }
808f4dfe
DM
3617 for (hash_set<const region *>::iterator iter = base_regs.begin ();
3618 iter != base_regs.end (); ++iter)
3619 m_store.purge_cluster (*iter);
757bf1df 3620
808f4dfe
DM
3621 /* Find any pointers to REG or its descendents; convert to poisoned. */
3622 poison_any_pointers_to_descendents (reg, pkind);
9a2c9579
DM
3623
3624 /* Purge dynamic extents of any base regions in REG and below
3625 (e.g. VLAs and alloca stack regions). */
3626 for (auto iter : m_dynamic_extents)
3627 {
3628 const region *iter_reg = iter.first;
3629 if (iter_reg->descendent_of_p (reg))
3630 unset_dynamic_extents (iter_reg);
3631 }
757bf1df
DM
3632}
3633
808f4dfe
DM
3634/* Implementation of BindingVisitor.
3635 Update the bound svalues for regions below REG to use poisoned
3636 values instead. */
757bf1df 3637
808f4dfe 3638struct bad_pointer_finder
757bf1df 3639{
808f4dfe
DM
3640 bad_pointer_finder (const region *reg, enum poison_kind pkind,
3641 region_model_manager *mgr)
3642 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
3643 {}
757bf1df 3644
808f4dfe
DM
3645 void on_binding (const binding_key *, const svalue *&sval)
3646 {
3647 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
3648 {
3649 const region *ptr_dst = ptr_sval->get_pointee ();
3650 /* Poison ptrs to descendents of REG, but not to REG itself,
3651 otherwise double-free detection doesn't work (since sm-state
3652 for "free" is stored on the original ptr svalue). */
3653 if (ptr_dst->descendent_of_p (m_reg)
3654 && ptr_dst != m_reg)
3655 {
3656 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
3657 sval->get_type ());
3658 ++m_count;
3659 }
3660 }
3661 }
757bf1df 3662
808f4dfe
DM
3663 const region *m_reg;
3664 enum poison_kind m_pkind;
3665 region_model_manager *const m_mgr;
3666 int m_count;
3667};
757bf1df 3668
808f4dfe
DM
3669/* Find any pointers to REG or its descendents; convert them to
3670 poisoned values of kind PKIND.
3671 Return the number of pointers that were poisoned. */
757bf1df 3672
808f4dfe
DM
3673int
3674region_model::poison_any_pointers_to_descendents (const region *reg,
3675 enum poison_kind pkind)
3676{
3677 bad_pointer_finder bv (reg, pkind, m_mgr);
3678 m_store.for_each_binding (bv);
3679 return bv.m_count;
757bf1df
DM
3680}
3681
808f4dfe
DM
3682/* Attempt to merge THIS with OTHER_MODEL, writing the result
3683 to OUT_MODEL. Use POINT to distinguish values created as a
3684 result of merging. */
757bf1df 3685
808f4dfe
DM
3686bool
3687region_model::can_merge_with_p (const region_model &other_model,
3688 const program_point &point,
f573d351
DM
3689 region_model *out_model,
3690 const extrinsic_state *ext_state,
3691 const program_state *state_a,
3692 const program_state *state_b) const
757bf1df 3693{
808f4dfe
DM
3694 gcc_assert (out_model);
3695 gcc_assert (m_mgr == other_model.m_mgr);
3696 gcc_assert (m_mgr == out_model->m_mgr);
757bf1df 3697
808f4dfe
DM
3698 if (m_current_frame != other_model.m_current_frame)
3699 return false;
3700 out_model->m_current_frame = m_current_frame;
757bf1df 3701
f573d351
DM
3702 model_merger m (this, &other_model, point, out_model,
3703 ext_state, state_a, state_b);
757bf1df 3704
808f4dfe
DM
3705 if (!store::can_merge_p (&m_store, &other_model.m_store,
3706 &out_model->m_store, m_mgr->get_store_manager (),
3707 &m))
3708 return false;
3709
9a2c9579
DM
3710 if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
3711 &out_model->m_dynamic_extents))
3712 return false;
3713
808f4dfe
DM
3714 /* Merge constraints. */
3715 constraint_manager::merge (*m_constraints,
3716 *other_model.m_constraints,
c710051a 3717 out_model->m_constraints);
757bf1df 3718
808f4dfe 3719 return true;
757bf1df
DM
3720}
3721
3722/* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
3723 otherwise. */
3724
3725tree
3726region_model::get_fndecl_for_call (const gcall *call,
3727 region_model_context *ctxt)
3728{
3729 tree fn_ptr = gimple_call_fn (call);
3730 if (fn_ptr == NULL_TREE)
3731 return NULL_TREE;
808f4dfe
DM
3732 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
3733 if (const region_svalue *fn_ptr_ptr
3734 = fn_ptr_sval->dyn_cast_region_svalue ())
757bf1df 3735 {
808f4dfe
DM
3736 const region *reg = fn_ptr_ptr->get_pointee ();
3737 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
757bf1df 3738 {
808f4dfe 3739 tree fn_decl = fn_reg->get_fndecl ();
0ba70d1b
DM
3740 cgraph_node *node = cgraph_node::get (fn_decl);
3741 if (!node)
3742 return NULL_TREE;
3743 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
91f993b7
DM
3744 if (ultimate_node)
3745 return ultimate_node->decl;
757bf1df
DM
3746 }
3747 }
3748
3749 return NULL_TREE;
3750}
3751
808f4dfe 3752/* Would be much simpler to use a lambda here, if it were supported. */
757bf1df 3753
808f4dfe 3754struct append_ssa_names_cb_data
757bf1df 3755{
808f4dfe
DM
3756 const region_model *model;
3757 auto_vec<const decl_region *> *out;
3758};
757bf1df 3759
808f4dfe
DM
3760/* Populate *OUT with all decl_regions for SSA names in the current
3761 frame that have clusters within the store. */
757bf1df
DM
3762
3763void
808f4dfe
DM
3764region_model::
3765get_ssa_name_regions_for_current_frame (auto_vec<const decl_region *> *out)
3766 const
757bf1df 3767{
808f4dfe
DM
3768 append_ssa_names_cb_data data;
3769 data.model = this;
3770 data.out = out;
3771 m_store.for_each_cluster (append_ssa_names_cb, &data);
757bf1df
DM
3772}
3773
808f4dfe 3774/* Implementation detail of get_ssa_name_regions_for_current_frame. */
757bf1df 3775
808f4dfe
DM
3776void
3777region_model::append_ssa_names_cb (const region *base_reg,
3778 append_ssa_names_cb_data *cb_data)
757bf1df 3779{
808f4dfe
DM
3780 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
3781 return;
3782 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
757bf1df 3783 {
808f4dfe
DM
3784 if (TREE_CODE (decl_reg->get_decl ()) == SSA_NAME)
3785 cb_data->out->safe_push (decl_reg);
757bf1df
DM
3786 }
3787}
3788
b9365b93
DM
3789/* Return a new region describing a heap-allocated block of memory.
3790 Use CTXT to complain about tainted sizes. */
757bf1df 3791
808f4dfe 3792const region *
b9365b93
DM
3793region_model::create_region_for_heap_alloc (const svalue *size_in_bytes,
3794 region_model_context *ctxt)
757bf1df 3795{
808f4dfe 3796 const region *reg = m_mgr->create_region_for_heap_alloc ();
ea4e3218 3797 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
b9365b93 3798 set_dynamic_extents (reg, size_in_bytes, ctxt);
808f4dfe 3799 return reg;
757bf1df
DM
3800}
3801
808f4dfe 3802/* Return a new region describing a block of memory allocated within the
b9365b93
DM
3803 current frame.
3804 Use CTXT to complain about tainted sizes. */
757bf1df 3805
808f4dfe 3806const region *
b9365b93
DM
3807region_model::create_region_for_alloca (const svalue *size_in_bytes,
3808 region_model_context *ctxt)
757bf1df 3809{
808f4dfe 3810 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
ea4e3218 3811 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
b9365b93 3812 set_dynamic_extents (reg, size_in_bytes, ctxt);
808f4dfe 3813 return reg;
757bf1df
DM
3814}
3815
b9365b93
DM
3816/* Record that the size of REG is SIZE_IN_BYTES.
3817 Use CTXT to complain about tainted sizes. */
757bf1df
DM
3818
3819void
9a2c9579 3820region_model::set_dynamic_extents (const region *reg,
b9365b93
DM
3821 const svalue *size_in_bytes,
3822 region_model_context *ctxt)
9a2c9579
DM
3823{
3824 assert_compat_types (size_in_bytes->get_type (), size_type_node);
b9365b93
DM
3825 if (ctxt)
3826 check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
3827 ctxt);
9a2c9579
DM
3828 m_dynamic_extents.put (reg, size_in_bytes);
3829}
3830
3831/* Get the recording of REG in bytes, or NULL if no dynamic size was
3832 recorded. */
3833
3834const svalue *
3835region_model::get_dynamic_extents (const region *reg) const
757bf1df 3836{
9a2c9579
DM
3837 if (const svalue * const *slot = m_dynamic_extents.get (reg))
3838 return *slot;
3839 return NULL;
3840}
3841
3842/* Unset any recorded dynamic size of REG. */
3843
3844void
3845region_model::unset_dynamic_extents (const region *reg)
3846{
3847 m_dynamic_extents.remove (reg);
757bf1df
DM
3848}
3849
eafa9d96
DM
3850/* class noop_region_model_context : public region_model_context. */
3851
3852void
3853noop_region_model_context::bifurcate (custom_edge_info *info)
3854{
3855 delete info;
3856}
3857
3858void
3859noop_region_model_context::terminate_path ()
3860{
3861}
3862
808f4dfe 3863/* struct model_merger. */
757bf1df 3864
808f4dfe 3865/* Dump a multiline representation of this merger to PP. */
757bf1df
DM
3866
3867void
808f4dfe 3868model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
757bf1df 3869{
808f4dfe
DM
3870 pp_string (pp, "model A:");
3871 pp_newline (pp);
3872 m_model_a->dump_to_pp (pp, simple, true);
3873 pp_newline (pp);
757bf1df 3874
808f4dfe 3875 pp_string (pp, "model B:");
757bf1df 3876 pp_newline (pp);
808f4dfe 3877 m_model_b->dump_to_pp (pp, simple, true);
757bf1df
DM
3878 pp_newline (pp);
3879
808f4dfe 3880 pp_string (pp, "merged model:");
757bf1df 3881 pp_newline (pp);
808f4dfe 3882 m_merged_model->dump_to_pp (pp, simple, true);
757bf1df
DM
3883 pp_newline (pp);
3884}
3885
808f4dfe 3886/* Dump a multiline representation of this merger to FILE. */
757bf1df
DM
3887
3888void
808f4dfe 3889model_merger::dump (FILE *fp, bool simple) const
757bf1df
DM
3890{
3891 pretty_printer pp;
3892 pp_format_decoder (&pp) = default_tree_printer;
3893 pp_show_color (&pp) = pp_show_color (global_dc->printer);
3894 pp.buffer->stream = fp;
808f4dfe 3895 dump_to_pp (&pp, simple);
757bf1df
DM
3896 pp_flush (&pp);
3897}
3898
808f4dfe 3899/* Dump a multiline representation of this merger to stderr. */
757bf1df
DM
3900
3901DEBUG_FUNCTION void
808f4dfe 3902model_merger::dump (bool simple) const
757bf1df 3903{
808f4dfe 3904 dump (stderr, simple);
757bf1df
DM
3905}
3906
f573d351
DM
3907/* Return true if it's OK to merge SVAL with other svalues. */
3908
3909bool
3910model_merger::mergeable_svalue_p (const svalue *sval) const
3911{
3912 if (m_ext_state)
3913 {
3914 /* Reject merging svalues that have non-purgable sm-state,
3915 to avoid falsely reporting memory leaks by merging them
3916 with something else. For example, given a local var "p",
3917 reject the merger of a:
3918 store_a mapping "p" to a malloc-ed ptr
3919 with:
3920 store_b mapping "p" to a NULL ptr. */
3921 if (m_state_a)
3922 if (!m_state_a->can_purge_p (*m_ext_state, sval))
3923 return false;
3924 if (m_state_b)
3925 if (!m_state_b->can_purge_p (*m_ext_state, sval))
3926 return false;
3927 }
3928 return true;
3929}
3930
75038aa6
DM
3931} // namespace ana
3932
808f4dfe 3933/* Dump RMODEL fully to stderr (i.e. without summarization). */
757bf1df 3934
808f4dfe
DM
3935DEBUG_FUNCTION void
3936debug (const region_model &rmodel)
757bf1df 3937{
808f4dfe 3938 rmodel.dump (false);
757bf1df
DM
3939}
3940
8ca7fa84 3941/* class rejected_op_constraint : public rejected_constraint. */
84fb3546
DM
3942
3943void
8ca7fa84 3944rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
84fb3546
DM
3945{
3946 region_model m (m_model);
3947 const svalue *lhs_sval = m.get_rvalue (m_lhs, NULL);
3948 const svalue *rhs_sval = m.get_rvalue (m_rhs, NULL);
3949 lhs_sval->dump_to_pp (pp, true);
3950 pp_printf (pp, " %s ", op_symbol_code (m_op));
3951 rhs_sval->dump_to_pp (pp, true);
3952}
3953
8ca7fa84
DM
3954/* class rejected_ranges_constraint : public rejected_constraint. */
3955
3956void
3957rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
3958{
3959 region_model m (m_model);
3960 const svalue *sval = m.get_rvalue (m_expr, NULL);
3961 sval->dump_to_pp (pp, true);
3962 pp_string (pp, " in ");
3963 m_ranges->dump_to_pp (pp, true);
3964}
3965
808f4dfe 3966/* class engine. */
757bf1df 3967
11a2ff8d
DM
3968/* engine's ctor. */
3969
3970engine::engine (logger *logger)
3971: m_mgr (logger)
3972{
3973}
3974
808f4dfe 3975/* Dump the managed objects by class to LOGGER, and the per-class totals. */
757bf1df 3976
808f4dfe
DM
3977void
3978engine::log_stats (logger *logger) const
757bf1df 3979{
808f4dfe 3980 m_mgr.log_stats (logger, true);
757bf1df
DM
3981}
3982
75038aa6
DM
3983namespace ana {
3984
757bf1df
DM
3985#if CHECKING_P
3986
3987namespace selftest {
3988
8c08c983
DM
3989/* Build a constant tree of the given type from STR. */
3990
3991static tree
3992build_real_cst_from_string (tree type, const char *str)
3993{
3994 REAL_VALUE_TYPE real;
3995 real_from_string (&real, str);
3996 return build_real (type, real);
3997}
3998
3999/* Append various "interesting" constants to OUT (e.g. NaN). */
4000
4001static void
4002append_interesting_constants (auto_vec<tree> *out)
4003{
4004 out->safe_push (build_int_cst (integer_type_node, 0));
4005 out->safe_push (build_int_cst (integer_type_node, 42));
4006 out->safe_push (build_int_cst (unsigned_type_node, 0));
4007 out->safe_push (build_int_cst (unsigned_type_node, 42));
4008 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
4009 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
4010 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
4011 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
4012 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
4013 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
4014 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
4015 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
4016}
4017
4018/* Verify that tree_cmp is a well-behaved comparator for qsort, even
4019 if the underlying constants aren't comparable. */
4020
4021static void
4022test_tree_cmp_on_constants ()
4023{
4024 auto_vec<tree> csts;
4025 append_interesting_constants (&csts);
4026
4027 /* Try sorting every triple. */
4028 const unsigned num = csts.length ();
4029 for (unsigned i = 0; i < num; i++)
4030 for (unsigned j = 0; j < num; j++)
4031 for (unsigned k = 0; k < num; k++)
4032 {
4033 auto_vec<tree> v (3);
4034 v.quick_push (csts[i]);
4035 v.quick_push (csts[j]);
4036 v.quick_push (csts[k]);
4037 v.qsort (tree_cmp);
4038 }
4039}
4040
757bf1df
DM
4041/* Implementation detail of the ASSERT_CONDITION_* macros. */
4042
808f4dfe
DM
4043void
4044assert_condition (const location &loc,
4045 region_model &model,
4046 const svalue *lhs, tree_code op, const svalue *rhs,
4047 tristate expected)
4048{
4049 tristate actual = model.eval_condition (lhs, op, rhs);
4050 ASSERT_EQ_AT (loc, actual, expected);
4051}
4052
4053/* Implementation detail of the ASSERT_CONDITION_* macros. */
4054
757bf1df
DM
4055void
4056assert_condition (const location &loc,
4057 region_model &model,
4058 tree lhs, tree_code op, tree rhs,
4059 tristate expected)
4060{
4061 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
4062 ASSERT_EQ_AT (loc, actual, expected);
4063}
4064
90f7c300
DM
4065/* Implementation detail of ASSERT_DUMP_TREE_EQ. */
4066
4067static void
4068assert_dump_tree_eq (const location &loc, tree t, const char *expected)
4069{
4070 auto_fix_quotes sentinel;
4071 pretty_printer pp;
4072 pp_format_decoder (&pp) = default_tree_printer;
4073 dump_tree (&pp, t);
4074 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
4075}
4076
4077/* Assert that dump_tree (T) is EXPECTED. */
4078
4079#define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
4080 SELFTEST_BEGIN_STMT \
4081 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
4082 SELFTEST_END_STMT
4083
757bf1df
DM
4084/* Implementation detail of ASSERT_DUMP_EQ. */
4085
4086static void
4087assert_dump_eq (const location &loc,
4088 const region_model &model,
4089 bool summarize,
4090 const char *expected)
4091{
4092 auto_fix_quotes sentinel;
4093 pretty_printer pp;
4094 pp_format_decoder (&pp) = default_tree_printer;
808f4dfe
DM
4095
4096 model.dump_to_pp (&pp, summarize, true);
757bf1df
DM
4097 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
4098}
4099
4100/* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
4101
4102#define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
4103 SELFTEST_BEGIN_STMT \
4104 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
4105 SELFTEST_END_STMT
4106
4107/* Smoketest for region_model::dump_to_pp. */
4108
4109static void
4110test_dump ()
4111{
808f4dfe
DM
4112 region_model_manager mgr;
4113 region_model model (&mgr);
757bf1df
DM
4114
4115 ASSERT_DUMP_EQ (model, false,
808f4dfe
DM
4116 "stack depth: 0\n"
4117 "m_called_unknown_fn: FALSE\n"
4118 "constraint_manager:\n"
4119 " equiv classes:\n"
4120 " constraints:\n");
4121 ASSERT_DUMP_EQ (model, true,
4122 "stack depth: 0\n"
4123 "m_called_unknown_fn: FALSE\n"
4124 "constraint_manager:\n"
757bf1df
DM
4125 " equiv classes:\n"
4126 " constraints:\n");
757bf1df
DM
4127}
4128
884d9141
DM
4129/* Helper function for selftests. Create a struct or union type named NAME,
4130 with the fields given by the FIELD_DECLS in FIELDS.
4131 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
4132 create a UNION_TYPE. */
4133
4134static tree
4135make_test_compound_type (const char *name, bool is_struct,
4136 const auto_vec<tree> *fields)
4137{
4138 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
4139 TYPE_NAME (t) = get_identifier (name);
4140 TYPE_SIZE (t) = 0;
4141
4142 tree fieldlist = NULL;
4143 int i;
4144 tree field;
4145 FOR_EACH_VEC_ELT (*fields, i, field)
4146 {
4147 gcc_assert (TREE_CODE (field) == FIELD_DECL);
4148 DECL_CONTEXT (field) = t;
4149 fieldlist = chainon (field, fieldlist);
4150 }
4151 fieldlist = nreverse (fieldlist);
4152 TYPE_FIELDS (t) = fieldlist;
4153
4154 layout_type (t);
4155 return t;
4156}
4157
a96f1c38
DM
4158/* Selftest fixture for creating the type "struct coord {int x; int y; };". */
4159
4160struct coord_test
4161{
4162 coord_test ()
4163 {
4164 auto_vec<tree> fields;
4165 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
4166 get_identifier ("x"), integer_type_node);
4167 fields.safe_push (m_x_field);
4168 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
4169 get_identifier ("y"), integer_type_node);
4170 fields.safe_push (m_y_field);
4171 m_coord_type = make_test_compound_type ("coord", true, &fields);
4172 }
4173
4174 tree m_x_field;
4175 tree m_y_field;
4176 tree m_coord_type;
4177};
4178
808f4dfe 4179/* Verify usage of a struct. */
884d9141
DM
4180
4181static void
808f4dfe 4182test_struct ()
884d9141 4183{
a96f1c38
DM
4184 coord_test ct;
4185
4186 tree c = build_global_decl ("c", ct.m_coord_type);
4187 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4188 c, ct.m_x_field, NULL_TREE);
4189 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4190 c, ct.m_y_field, NULL_TREE);
884d9141
DM
4191
4192 tree int_17 = build_int_cst (integer_type_node, 17);
4193 tree int_m3 = build_int_cst (integer_type_node, -3);
4194
808f4dfe
DM
4195 region_model_manager mgr;
4196 region_model model (&mgr);
884d9141
DM
4197 model.set_value (c_x, int_17, NULL);
4198 model.set_value (c_y, int_m3, NULL);
4199
808f4dfe
DM
4200 /* Verify get_offset for "c.x". */
4201 {
4202 const region *c_x_reg = model.get_lvalue (c_x, NULL);
4203 region_offset offset = c_x_reg->get_offset ();
4204 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
4205 ASSERT_EQ (offset.get_bit_offset (), 0);
4206 }
4207
4208 /* Verify get_offset for "c.y". */
4209 {
4210 const region *c_y_reg = model.get_lvalue (c_y, NULL);
4211 region_offset offset = c_y_reg->get_offset ();
4212 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
4213 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
4214 }
884d9141
DM
4215}
4216
808f4dfe 4217/* Verify usage of an array element. */
884d9141
DM
4218
4219static void
808f4dfe 4220test_array_1 ()
884d9141
DM
4221{
4222 tree tlen = size_int (10);
4223 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
4224
4225 tree a = build_global_decl ("a", arr_type);
4226
808f4dfe
DM
4227 region_model_manager mgr;
4228 region_model model (&mgr);
884d9141
DM
4229 tree int_0 = build_int_cst (integer_type_node, 0);
4230 tree a_0 = build4 (ARRAY_REF, char_type_node,
4231 a, int_0, NULL_TREE, NULL_TREE);
4232 tree char_A = build_int_cst (char_type_node, 'A');
4233 model.set_value (a_0, char_A, NULL);
884d9141
DM
4234}
4235
90f7c300
DM
4236/* Verify that region_model::get_representative_tree works as expected. */
4237
4238static void
4239test_get_representative_tree ()
4240{
808f4dfe
DM
4241 region_model_manager mgr;
4242
90f7c300
DM
4243 /* STRING_CST. */
4244 {
4245 tree string_cst = build_string (4, "foo");
808f4dfe
DM
4246 region_model m (&mgr);
4247 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
4248 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
4249 ASSERT_EQ (rep, string_cst);
4250 }
4251
4252 /* String literal. */
4253 {
4254 tree string_cst_ptr = build_string_literal (4, "foo");
808f4dfe
DM
4255 region_model m (&mgr);
4256 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
4257 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
4258 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
4259 }
808f4dfe
DM
4260
4261 /* Value of an element within an array. */
4262 {
4263 tree tlen = size_int (10);
4264 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
4265 tree a = build_global_decl ("a", arr_type);
4266 placeholder_svalue test_sval (char_type_node, "test value");
4267
4268 /* Value of a[3]. */
4269 {
4270 test_region_model_context ctxt;
4271 region_model model (&mgr);
4272 tree int_3 = build_int_cst (integer_type_node, 3);
4273 tree a_3 = build4 (ARRAY_REF, char_type_node,
4274 a, int_3, NULL_TREE, NULL_TREE);
4275 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
4276 model.set_value (a_3_reg, &test_sval, &ctxt);
4277 tree rep = model.get_representative_tree (&test_sval);
4278 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
4279 }
4280
4281 /* Value of a[0]. */
4282 {
4283 test_region_model_context ctxt;
4284 region_model model (&mgr);
4285 tree idx = build_int_cst (integer_type_node, 0);
4286 tree a_0 = build4 (ARRAY_REF, char_type_node,
4287 a, idx, NULL_TREE, NULL_TREE);
4288 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
4289 model.set_value (a_0_reg, &test_sval, &ctxt);
4290 tree rep = model.get_representative_tree (&test_sval);
4291 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
4292 }
4293 }
4294
4295 /* Value of a field within a struct. */
4296 {
4297 coord_test ct;
4298
4299 tree c = build_global_decl ("c", ct.m_coord_type);
4300 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4301 c, ct.m_x_field, NULL_TREE);
4302 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4303 c, ct.m_y_field, NULL_TREE);
4304
4305 test_region_model_context ctxt;
4306
4307 /* Value of initial field. */
4308 {
4309 region_model m (&mgr);
4310 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
4311 placeholder_svalue test_sval_x (integer_type_node, "test x val");
4312 m.set_value (c_x_reg, &test_sval_x, &ctxt);
4313 tree rep = m.get_representative_tree (&test_sval_x);
4314 ASSERT_DUMP_TREE_EQ (rep, "c.x");
4315 }
4316
4317 /* Value of non-initial field. */
4318 {
4319 region_model m (&mgr);
4320 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
4321 placeholder_svalue test_sval_y (integer_type_node, "test y val");
4322 m.set_value (c_y_reg, &test_sval_y, &ctxt);
4323 tree rep = m.get_representative_tree (&test_sval_y);
4324 ASSERT_DUMP_TREE_EQ (rep, "c.y");
4325 }
4326 }
90f7c300
DM
4327}
4328
757bf1df 4329/* Verify that calling region_model::get_rvalue repeatedly on the same
808f4dfe 4330 tree constant retrieves the same svalue *. */
757bf1df
DM
4331
4332static void
4333test_unique_constants ()
4334{
4335 tree int_0 = build_int_cst (integer_type_node, 0);
4336 tree int_42 = build_int_cst (integer_type_node, 42);
4337
4338 test_region_model_context ctxt;
808f4dfe
DM
4339 region_model_manager mgr;
4340 region_model model (&mgr);
757bf1df
DM
4341 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
4342 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
4343 model.get_rvalue (int_42, &ctxt));
4344 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
4345 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
757bf1df 4346
808f4dfe
DM
4347 /* A "(const int)42" will be a different tree from "(int)42)"... */
4348 tree const_int_type_node
4349 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
4350 tree const_int_42 = build_int_cst (const_int_type_node, 42);
4351 ASSERT_NE (int_42, const_int_42);
4352 /* It should have a different const_svalue. */
4353 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
4354 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
4355 ASSERT_NE (int_42_sval, const_int_42_sval);
4356 /* But they should compare as equal. */
4357 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
4358 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
757bf1df
DM
4359}
4360
808f4dfe
DM
4361/* Verify that each type gets its own singleton unknown_svalue within a
4362 region_model_manager, and that NULL_TREE gets its own singleton. */
757bf1df
DM
4363
4364static void
808f4dfe 4365test_unique_unknowns ()
757bf1df 4366{
808f4dfe
DM
4367 region_model_manager mgr;
4368 const svalue *unknown_int
4369 = mgr.get_or_create_unknown_svalue (integer_type_node);
4370 /* Repeated calls with the same type should get the same "unknown"
4371 svalue. */
4372 const svalue *unknown_int_2
4373 = mgr.get_or_create_unknown_svalue (integer_type_node);
4374 ASSERT_EQ (unknown_int, unknown_int_2);
757bf1df 4375
808f4dfe
DM
4376 /* Different types (or the NULL type) should have different
4377 unknown_svalues. */
4378 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
4379 ASSERT_NE (unknown_NULL_type, unknown_int);
757bf1df 4380
808f4dfe
DM
4381 /* Repeated calls with NULL for the type should get the same "unknown"
4382 svalue. */
4383 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
4384 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
757bf1df
DM
4385}
4386
808f4dfe 4387/* Verify that initial_svalue are handled as expected. */
757bf1df 4388
808f4dfe
DM
4389static void
4390test_initial_svalue_folding ()
757bf1df 4391{
808f4dfe
DM
4392 region_model_manager mgr;
4393 tree x = build_global_decl ("x", integer_type_node);
4394 tree y = build_global_decl ("y", integer_type_node);
757bf1df 4395
808f4dfe
DM
4396 test_region_model_context ctxt;
4397 region_model model (&mgr);
4398 const svalue *x_init = model.get_rvalue (x, &ctxt);
4399 const svalue *y_init = model.get_rvalue (y, &ctxt);
4400 ASSERT_NE (x_init, y_init);
4401 const region *x_reg = model.get_lvalue (x, &ctxt);
4402 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
757bf1df 4403
808f4dfe 4404}
757bf1df 4405
808f4dfe 4406/* Verify that unary ops are folded as expected. */
757bf1df
DM
4407
4408static void
808f4dfe 4409test_unaryop_svalue_folding ()
757bf1df 4410{
808f4dfe 4411 region_model_manager mgr;
757bf1df
DM
4412 tree x = build_global_decl ("x", integer_type_node);
4413 tree y = build_global_decl ("y", integer_type_node);
4414
808f4dfe
DM
4415 test_region_model_context ctxt;
4416 region_model model (&mgr);
4417 const svalue *x_init = model.get_rvalue (x, &ctxt);
4418 const svalue *y_init = model.get_rvalue (y, &ctxt);
4419 const region *x_reg = model.get_lvalue (x, &ctxt);
4420 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
4421
4422 /* "(int)x" -> "x". */
4423 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
4424
4425 /* "(void *)x" -> something other than "x". */
4426 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
4427
4428 /* "!(x == y)" -> "x != y". */
4429 ASSERT_EQ (mgr.get_or_create_unaryop
4430 (boolean_type_node, TRUTH_NOT_EXPR,
4431 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
4432 x_init, y_init)),
4433 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
4434 x_init, y_init));
4435 /* "!(x > y)" -> "x <= y". */
4436 ASSERT_EQ (mgr.get_or_create_unaryop
4437 (boolean_type_node, TRUTH_NOT_EXPR,
4438 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
4439 x_init, y_init)),
4440 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
4441 x_init, y_init));
4442}
4443
4444/* Verify that binops on constant svalues are folded. */
757bf1df 4445
808f4dfe
DM
4446static void
4447test_binop_svalue_folding ()
4448{
4449#define NUM_CSTS 10
4450 tree cst_int[NUM_CSTS];
4451 region_model_manager mgr;
4452 const svalue *cst_sval[NUM_CSTS];
4453 for (int i = 0; i < NUM_CSTS; i++)
4454 {
4455 cst_int[i] = build_int_cst (integer_type_node, i);
4456 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
4457 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
4458 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
4459 }
757bf1df 4460
808f4dfe
DM
4461 for (int i = 0; i < NUM_CSTS; i++)
4462 for (int j = 0; j < NUM_CSTS; j++)
4463 {
4464 if (i != j)
4465 ASSERT_NE (cst_sval[i], cst_sval[j]);
4466 if (i + j < NUM_CSTS)
4467 {
4468 const svalue *sum
4469 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4470 cst_sval[i], cst_sval[j]);
4471 ASSERT_EQ (sum, cst_sval[i + j]);
4472 }
4473 if (i - j >= 0)
4474 {
4475 const svalue *difference
4476 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
4477 cst_sval[i], cst_sval[j]);
4478 ASSERT_EQ (difference, cst_sval[i - j]);
4479 }
4480 if (i * j < NUM_CSTS)
4481 {
4482 const svalue *product
4483 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4484 cst_sval[i], cst_sval[j]);
4485 ASSERT_EQ (product, cst_sval[i * j]);
4486 }
4487 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
4488 cst_sval[i], cst_sval[j]);
4489 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
4490 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
4491 cst_sval[i], cst_sval[j]);
4492 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
4493 // etc
4494 }
757bf1df 4495
808f4dfe 4496 tree x = build_global_decl ("x", integer_type_node);
757bf1df 4497
808f4dfe
DM
4498 test_region_model_context ctxt;
4499 region_model model (&mgr);
4500 const svalue *x_init = model.get_rvalue (x, &ctxt);
4501
4502 /* PLUS_EXPR folding. */
4503 const svalue *x_init_plus_zero
4504 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4505 x_init, cst_sval[0]);
4506 ASSERT_EQ (x_init_plus_zero, x_init);
4507 const svalue *zero_plus_x_init
4508 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4509 cst_sval[0], x_init);
4510 ASSERT_EQ (zero_plus_x_init, x_init);
4511
4512 /* MULT_EXPR folding. */
4513 const svalue *x_init_times_zero
4514 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4515 x_init, cst_sval[0]);
4516 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
4517 const svalue *zero_times_x_init
4518 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4519 cst_sval[0], x_init);
4520 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
4521
4522 const svalue *x_init_times_one
4523 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4524 x_init, cst_sval[1]);
4525 ASSERT_EQ (x_init_times_one, x_init);
4526 const svalue *one_times_x_init
4527 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4528 cst_sval[1], x_init);
4529 ASSERT_EQ (one_times_x_init, x_init);
4530
4531 // etc
4532 // TODO: do we want to use the match-and-simplify DSL for this?
4533
4534 /* Verify that binops put any constants on the RHS. */
4535 const svalue *four_times_x_init
4536 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4537 cst_sval[4], x_init);
4538 const svalue *x_init_times_four
4539 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4540 x_init, cst_sval[4]);
4541 ASSERT_EQ (four_times_x_init, x_init_times_four);
4542 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
4543 ASSERT_EQ (binop->get_op (), MULT_EXPR);
4544 ASSERT_EQ (binop->get_arg0 (), x_init);
4545 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
4546
4547 /* Verify that ((x + 1) + 1) == (x + 2). */
4548 const svalue *x_init_plus_one
4549 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4550 x_init, cst_sval[1]);
4551 const svalue *x_init_plus_two
4552 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4553 x_init, cst_sval[2]);
4554 const svalue *x_init_plus_one_plus_one
4555 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4556 x_init_plus_one, cst_sval[1]);
4557 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
4f34f8cc
DM
4558
4559 /* Verify various binops on booleans. */
4560 {
4561 const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1);
4562 const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0);
4563 const svalue *sval_unknown
4564 = mgr.get_or_create_unknown_svalue (boolean_type_node);
4565 const placeholder_svalue sval_placeholder (boolean_type_node, "v");
4566 for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR})
4567 {
4568 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
4569 sval_true, sval_unknown),
4570 sval_true);
4571 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
4572 sval_false, sval_unknown),
4573 sval_unknown);
4574 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
4575 sval_false, &sval_placeholder),
4576 &sval_placeholder);
4577 }
4578 for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR})
4579 {
4580 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
4581 sval_false, sval_unknown),
4582 sval_false);
4583 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
4584 sval_true, sval_unknown),
4585 sval_unknown);
4586 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
4587 sval_true, &sval_placeholder),
4588 &sval_placeholder);
4589 }
4590 }
808f4dfe
DM
4591}
4592
4593/* Verify that sub_svalues are folded as expected. */
757bf1df 4594
808f4dfe
DM
4595static void
4596test_sub_svalue_folding ()
4597{
4598 coord_test ct;
4599 tree c = build_global_decl ("c", ct.m_coord_type);
4600 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4601 c, ct.m_x_field, NULL_TREE);
757bf1df 4602
808f4dfe
DM
4603 region_model_manager mgr;
4604 region_model model (&mgr);
4605 test_region_model_context ctxt;
4606 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
757bf1df 4607
808f4dfe
DM
4608 /* Verify that sub_svalue of "unknown" simply
4609 yields an unknown. */
757bf1df 4610
808f4dfe
DM
4611 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
4612 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
4613 unknown, c_x_reg);
4614 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
4615 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
757bf1df
DM
4616}
4617
808f4dfe 4618/* Test that region::descendent_of_p works as expected. */
757bf1df
DM
4619
4620static void
808f4dfe 4621test_descendent_of_p ()
757bf1df 4622{
808f4dfe
DM
4623 region_model_manager mgr;
4624 const region *stack = mgr.get_stack_region ();
4625 const region *heap = mgr.get_heap_region ();
4626 const region *code = mgr.get_code_region ();
4627 const region *globals = mgr.get_globals_region ();
757bf1df 4628
808f4dfe
DM
4629 /* descendent_of_p should return true when used on the region itself. */
4630 ASSERT_TRUE (stack->descendent_of_p (stack));
4631 ASSERT_FALSE (stack->descendent_of_p (heap));
4632 ASSERT_FALSE (stack->descendent_of_p (code));
4633 ASSERT_FALSE (stack->descendent_of_p (globals));
757bf1df 4634
808f4dfe
DM
4635 tree x = build_global_decl ("x", integer_type_node);
4636 const region *x_reg = mgr.get_region_for_global (x);
4637 ASSERT_TRUE (x_reg->descendent_of_p (globals));
757bf1df 4638
808f4dfe
DM
4639 /* A cast_region should be a descendent of the original region. */
4640 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
4641 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
757bf1df
DM
4642}
4643
4644/* Verify that simple assignments work as expected. */
4645
4646static void
4647test_assignment ()
4648{
4649 tree int_0 = build_int_cst (integer_type_node, 0);
4650 tree x = build_global_decl ("x", integer_type_node);
4651 tree y = build_global_decl ("y", integer_type_node);
4652
4653 /* "x == 0", then use of y, then "y = 0;". */
808f4dfe
DM
4654 region_model_manager mgr;
4655 region_model model (&mgr);
757bf1df
DM
4656 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
4657 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
4658 model.set_value (model.get_lvalue (y, NULL),
4659 model.get_rvalue (int_0, NULL),
4660 NULL);
4661 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
4662 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
757bf1df
DM
4663}
4664
a96f1c38
DM
4665/* Verify that compound assignments work as expected. */
4666
4667static void
4668test_compound_assignment ()
4669{
4670 coord_test ct;
4671
4672 tree c = build_global_decl ("c", ct.m_coord_type);
4673 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4674 c, ct.m_x_field, NULL_TREE);
4675 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4676 c, ct.m_y_field, NULL_TREE);
4677 tree d = build_global_decl ("d", ct.m_coord_type);
4678 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4679 d, ct.m_x_field, NULL_TREE);
4680 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4681 d, ct.m_y_field, NULL_TREE);
4682
4683 tree int_17 = build_int_cst (integer_type_node, 17);
4684 tree int_m3 = build_int_cst (integer_type_node, -3);
4685
808f4dfe
DM
4686 region_model_manager mgr;
4687 region_model model (&mgr);
a96f1c38
DM
4688 model.set_value (c_x, int_17, NULL);
4689 model.set_value (c_y, int_m3, NULL);
4690
a96f1c38
DM
4691 /* Copy c to d. */
4692 model.copy_region (model.get_lvalue (d, NULL), model.get_lvalue (c, NULL),
4693 NULL);
4694 /* Check that the fields have the same svalues. */
4695 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
4696 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
4697}
4698
757bf1df
DM
4699/* Verify the details of pushing and popping stack frames. */
4700
4701static void
4702test_stack_frames ()
4703{
4704 tree int_42 = build_int_cst (integer_type_node, 42);
4705 tree int_10 = build_int_cst (integer_type_node, 10);
4706 tree int_5 = build_int_cst (integer_type_node, 5);
4707 tree int_0 = build_int_cst (integer_type_node, 0);
4708
4709 auto_vec <tree> param_types;
4710 tree parent_fndecl = make_fndecl (integer_type_node,
4711 "parent_fn",
4712 param_types);
4713 allocate_struct_function (parent_fndecl, true);
4714
4715 tree child_fndecl = make_fndecl (integer_type_node,
4716 "child_fn",
4717 param_types);
4718 allocate_struct_function (child_fndecl, true);
4719
4720 /* "a" and "b" in the parent frame. */
4721 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4722 get_identifier ("a"),
4723 integer_type_node);
4724 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4725 get_identifier ("b"),
4726 integer_type_node);
4727 /* "x" and "y" in a child frame. */
4728 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4729 get_identifier ("x"),
4730 integer_type_node);
4731 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4732 get_identifier ("y"),
4733 integer_type_node);
4734
4735 /* "p" global. */
4736 tree p = build_global_decl ("p", ptr_type_node);
4737
4738 /* "q" global. */
4739 tree q = build_global_decl ("q", ptr_type_node);
4740
808f4dfe 4741 region_model_manager mgr;
757bf1df 4742 test_region_model_context ctxt;
808f4dfe 4743 region_model model (&mgr);
757bf1df
DM
4744
4745 /* Push stack frame for "parent_fn". */
808f4dfe
DM
4746 const region *parent_frame_reg
4747 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl),
4748 NULL, &ctxt);
4749 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
4750 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
4751 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
4752 model.set_value (a_in_parent_reg,
4753 model.get_rvalue (int_42, &ctxt),
4754 &ctxt);
4755 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
4756
757bf1df
DM
4757 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
4758 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
4759 tristate (tristate::TS_TRUE));
4760
4761 /* Push stack frame for "child_fn". */
808f4dfe 4762 const region *child_frame_reg
757bf1df 4763 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
808f4dfe
DM
4764 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
4765 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
4766 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
4767 model.set_value (x_in_child_reg,
4768 model.get_rvalue (int_0, &ctxt),
4769 &ctxt);
4770 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
4771
757bf1df
DM
4772 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
4773 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
4774 tristate (tristate::TS_TRUE));
4775
4776 /* Point a global pointer at a local in the child frame: p = &x. */
808f4dfe
DM
4777 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
4778 model.set_value (p_in_globals_reg,
4779 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
757bf1df 4780 &ctxt);
808f4dfe 4781 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
757bf1df
DM
4782
4783 /* Point another global pointer at p: q = &p. */
808f4dfe
DM
4784 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
4785 model.set_value (q_in_globals_reg,
4786 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
757bf1df
DM
4787 &ctxt);
4788
808f4dfe
DM
4789 /* Test region::descendent_of_p. */
4790 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
4791 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
4792 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
757bf1df
DM
4793
4794 /* Pop the "child_fn" frame from the stack. */
808f4dfe
DM
4795 model.pop_frame (NULL, NULL, &ctxt);
4796 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
4797 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
757bf1df
DM
4798
4799 /* Verify that p (which was pointing at the local "x" in the popped
4800 frame) has been poisoned. */
33255ad3 4801 const svalue *new_p_sval = model.get_rvalue (p, NULL);
757bf1df
DM
4802 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
4803 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
4804 POISON_KIND_POPPED_STACK);
4805
4806 /* Verify that q still points to p, in spite of the region
4807 renumbering. */
808f4dfe 4808 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
757bf1df 4809 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
5932dd35 4810 ASSERT_EQ (new_q_sval->maybe_get_region (),
757bf1df
DM
4811 model.get_lvalue (p, &ctxt));
4812
4813 /* Verify that top of stack has been updated. */
808f4dfe 4814 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
757bf1df
DM
4815
4816 /* Verify locals in parent frame. */
4817 /* Verify "a" still has its value. */
808f4dfe 4818 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
757bf1df
DM
4819 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
4820 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
4821 int_42);
4822 /* Verify "b" still has its constraint. */
4823 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
4824 tristate (tristate::TS_TRUE));
4825}
4826
4827/* Verify that get_representative_path_var works as expected, that
808f4dfe 4828 we can map from regions to parms and back within a recursive call
757bf1df
DM
4829 stack. */
4830
4831static void
4832test_get_representative_path_var ()
4833{
4834 auto_vec <tree> param_types;
4835 tree fndecl = make_fndecl (integer_type_node,
4836 "factorial",
4837 param_types);
4838 allocate_struct_function (fndecl, true);
4839
4840 /* Parm "n". */
4841 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4842 get_identifier ("n"),
4843 integer_type_node);
4844
808f4dfe
DM
4845 region_model_manager mgr;
4846 test_region_model_context ctxt;
4847 region_model model (&mgr);
757bf1df
DM
4848
4849 /* Push 5 stack frames for "factorial", each with a param */
808f4dfe
DM
4850 auto_vec<const region *> parm_regs;
4851 auto_vec<const svalue *> parm_svals;
757bf1df
DM
4852 for (int depth = 0; depth < 5; depth++)
4853 {
808f4dfe
DM
4854 const region *frame_n_reg
4855 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
4856 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
4857 parm_regs.safe_push (parm_n_reg);
757bf1df 4858
808f4dfe
DM
4859 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
4860 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
4861 parm_svals.safe_push (sval_n);
757bf1df
DM
4862 }
4863
4864 /* Verify that we can recognize that the regions are the parms,
4865 at every depth. */
4866 for (int depth = 0; depth < 5; depth++)
4867 {
808f4dfe
DM
4868 {
4869 svalue_set visited;
4870 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
4871 &visited),
4872 path_var (n, depth + 1));
4873 }
757bf1df
DM
4874 /* ...and that we can lookup lvalues for locals for all frames,
4875 not just the top. */
4876 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
808f4dfe 4877 parm_regs[depth]);
757bf1df 4878 /* ...and that we can locate the svalues. */
808f4dfe
DM
4879 {
4880 svalue_set visited;
4881 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
4882 &visited),
4883 path_var (n, depth + 1));
4884 }
757bf1df
DM
4885 }
4886}
4887
808f4dfe 4888/* Ensure that region_model::operator== works as expected. */
757bf1df
DM
4889
4890static void
808f4dfe 4891test_equality_1 ()
757bf1df 4892{
808f4dfe
DM
4893 tree int_42 = build_int_cst (integer_type_node, 42);
4894 tree int_17 = build_int_cst (integer_type_node, 17);
757bf1df 4895
808f4dfe
DM
4896/* Verify that "empty" region_model instances are equal to each other. */
4897 region_model_manager mgr;
4898 region_model model0 (&mgr);
4899 region_model model1 (&mgr);
757bf1df 4900 ASSERT_EQ (model0, model1);
808f4dfe
DM
4901
4902 /* Verify that setting state in model1 makes the models non-equal. */
4903 tree x = build_global_decl ("x", integer_type_node);
4904 model0.set_value (x, int_42, NULL);
4905 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
4906 ASSERT_NE (model0, model1);
4907
4908 /* Verify the copy-ctor. */
4909 region_model model2 (model0);
4910 ASSERT_EQ (model0, model2);
4911 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
4912 ASSERT_NE (model1, model2);
4913
4914 /* Verify that models obtained from copy-ctor are independently editable
4915 w/o affecting the original model. */
4916 model2.set_value (x, int_17, NULL);
4917 ASSERT_NE (model0, model2);
4918 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
4919 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
757bf1df
DM
4920}
4921
4922/* Verify that region models for
4923 x = 42; y = 113;
4924 and
4925 y = 113; x = 42;
808f4dfe 4926 are equal. */
757bf1df
DM
4927
4928static void
4929test_canonicalization_2 ()
4930{
4931 tree int_42 = build_int_cst (integer_type_node, 42);
4932 tree int_113 = build_int_cst (integer_type_node, 113);
4933 tree x = build_global_decl ("x", integer_type_node);
4934 tree y = build_global_decl ("y", integer_type_node);
4935
808f4dfe
DM
4936 region_model_manager mgr;
4937 region_model model0 (&mgr);
757bf1df
DM
4938 model0.set_value (model0.get_lvalue (x, NULL),
4939 model0.get_rvalue (int_42, NULL),
4940 NULL);
4941 model0.set_value (model0.get_lvalue (y, NULL),
4942 model0.get_rvalue (int_113, NULL),
4943 NULL);
4944
808f4dfe 4945 region_model model1 (&mgr);
757bf1df
DM
4946 model1.set_value (model1.get_lvalue (y, NULL),
4947 model1.get_rvalue (int_113, NULL),
4948 NULL);
4949 model1.set_value (model1.get_lvalue (x, NULL),
4950 model1.get_rvalue (int_42, NULL),
4951 NULL);
4952
757bf1df
DM
4953 ASSERT_EQ (model0, model1);
4954}
4955
4956/* Verify that constraints for
4957 x > 3 && y > 42
4958 and
4959 y > 42 && x > 3
4960 are equal after canonicalization. */
4961
4962static void
4963test_canonicalization_3 ()
4964{
4965 tree int_3 = build_int_cst (integer_type_node, 3);
4966 tree int_42 = build_int_cst (integer_type_node, 42);
4967 tree x = build_global_decl ("x", integer_type_node);
4968 tree y = build_global_decl ("y", integer_type_node);
4969
808f4dfe
DM
4970 region_model_manager mgr;
4971 region_model model0 (&mgr);
757bf1df
DM
4972 model0.add_constraint (x, GT_EXPR, int_3, NULL);
4973 model0.add_constraint (y, GT_EXPR, int_42, NULL);
4974
808f4dfe 4975 region_model model1 (&mgr);
757bf1df
DM
4976 model1.add_constraint (y, GT_EXPR, int_42, NULL);
4977 model1.add_constraint (x, GT_EXPR, int_3, NULL);
4978
808f4dfe
DM
4979 model0.canonicalize ();
4980 model1.canonicalize ();
757bf1df
DM
4981 ASSERT_EQ (model0, model1);
4982}
4983
8c08c983
DM
4984/* Verify that we can canonicalize a model containing NaN and other real
4985 constants. */
4986
4987static void
4988test_canonicalization_4 ()
4989{
4990 auto_vec<tree> csts;
4991 append_interesting_constants (&csts);
4992
808f4dfe
DM
4993 region_model_manager mgr;
4994 region_model model (&mgr);
8c08c983 4995
3f207ab3 4996 for (tree cst : csts)
8c08c983
DM
4997 model.get_rvalue (cst, NULL);
4998
808f4dfe 4999 model.canonicalize ();
8c08c983
DM
5000}
5001
757bf1df
DM
5002/* Assert that if we have two region_model instances
5003 with values VAL_A and VAL_B for EXPR that they are
5004 mergable. Write the merged model to *OUT_MERGED_MODEL,
5005 and the merged svalue ptr to *OUT_MERGED_SVALUE.
5006 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
5007 for that region_model. */
5008
5009static void
5010assert_region_models_merge (tree expr, tree val_a, tree val_b,
808f4dfe
DM
5011 region_model *out_merged_model,
5012 const svalue **out_merged_svalue)
757bf1df 5013{
808f4dfe 5014 program_point point (program_point::origin ());
757bf1df 5015 test_region_model_context ctxt;
808f4dfe
DM
5016 region_model_manager *mgr = out_merged_model->get_manager ();
5017 region_model model0 (mgr);
5018 region_model model1 (mgr);
757bf1df
DM
5019 if (val_a)
5020 model0.set_value (model0.get_lvalue (expr, &ctxt),
5021 model0.get_rvalue (val_a, &ctxt),
5022 &ctxt);
5023 if (val_b)
5024 model1.set_value (model1.get_lvalue (expr, &ctxt),
5025 model1.get_rvalue (val_b, &ctxt),
5026 &ctxt);
5027
5028 /* They should be mergeable. */
808f4dfe
DM
5029 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
5030 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
757bf1df
DM
5031}
5032
5033/* Verify that we can merge region_model instances. */
5034
5035static void
5036test_state_merging ()
5037{
5038 tree int_42 = build_int_cst (integer_type_node, 42);
5039 tree int_113 = build_int_cst (integer_type_node, 113);
5040 tree x = build_global_decl ("x", integer_type_node);
5041 tree y = build_global_decl ("y", integer_type_node);
5042 tree z = build_global_decl ("z", integer_type_node);
5043 tree p = build_global_decl ("p", ptr_type_node);
5044
5045 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
5046 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
5047
5048 auto_vec <tree> param_types;
5049 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
5050 allocate_struct_function (test_fndecl, true);
5051
5052 /* Param "a". */
5053 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
5054 get_identifier ("a"),
5055 integer_type_node);
5056 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
5057
455f58ec
DM
5058 /* Param "q", a pointer. */
5059 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
5060 get_identifier ("q"),
5061 ptr_type_node);
5062
808f4dfe
DM
5063 program_point point (program_point::origin ());
5064 region_model_manager mgr;
5065
757bf1df 5066 {
808f4dfe
DM
5067 region_model model0 (&mgr);
5068 region_model model1 (&mgr);
5069 region_model merged (&mgr);
757bf1df 5070 /* Verify empty models can be merged. */
808f4dfe 5071 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
5072 ASSERT_EQ (model0, merged);
5073 }
5074
5075 /* Verify that we can merge two contradictory constraints on the
5076 value for a global. */
5077 /* TODO: verify that the merged model doesn't have a value for
5078 the global */
5079 {
808f4dfe
DM
5080 region_model model0 (&mgr);
5081 region_model model1 (&mgr);
5082 region_model merged (&mgr);
757bf1df
DM
5083 test_region_model_context ctxt;
5084 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
5085 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe 5086 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
5087 ASSERT_NE (model0, merged);
5088 ASSERT_NE (model1, merged);
5089 }
5090
5091 /* Verify handling of a PARM_DECL. */
5092 {
5093 test_region_model_context ctxt;
808f4dfe
DM
5094 region_model model0 (&mgr);
5095 region_model model1 (&mgr);
757bf1df
DM
5096 ASSERT_EQ (model0.get_stack_depth (), 0);
5097 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
5098 ASSERT_EQ (model0.get_stack_depth (), 1);
757bf1df
DM
5099 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
5100
808f4dfe
DM
5101 placeholder_svalue test_sval (integer_type_node, "test sval");
5102 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
5103 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
757bf1df
DM
5104 ASSERT_EQ (model0, model1);
5105
757bf1df 5106 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
5107 region_model merged (&mgr);
5108 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 5109 ASSERT_EQ (model0, merged);
808f4dfe
DM
5110 /* In particular, "a" should have the placeholder value. */
5111 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
757bf1df
DM
5112 }
5113
5114 /* Verify handling of a global. */
5115 {
5116 test_region_model_context ctxt;
808f4dfe
DM
5117 region_model model0 (&mgr);
5118 region_model model1 (&mgr);
757bf1df 5119
808f4dfe
DM
5120 placeholder_svalue test_sval (integer_type_node, "test sval");
5121 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
5122 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
5123 ASSERT_EQ (model0, model1);
757bf1df
DM
5124
5125 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
5126 region_model merged (&mgr);
5127 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 5128 ASSERT_EQ (model0, merged);
808f4dfe
DM
5129 /* In particular, "x" should have the placeholder value. */
5130 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
757bf1df
DM
5131 }
5132
5133 /* Use global-handling to verify various combinations of values. */
5134
5135 /* Two equal constant values. */
5136 {
808f4dfe
DM
5137 region_model merged (&mgr);
5138 const svalue *merged_x_sval;
757bf1df
DM
5139 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
5140
5141 /* In particular, there should be a constant value for "x". */
5142 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
5143 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
5144 int_42);
5145 }
5146
5147 /* Two non-equal constant values. */
5148 {
808f4dfe
DM
5149 region_model merged (&mgr);
5150 const svalue *merged_x_sval;
757bf1df
DM
5151 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
5152
808f4dfe
DM
5153 /* In particular, there should be a "widening" value for "x". */
5154 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
757bf1df
DM
5155 }
5156
808f4dfe 5157 /* Initial and constant. */
757bf1df 5158 {
808f4dfe
DM
5159 region_model merged (&mgr);
5160 const svalue *merged_x_sval;
757bf1df
DM
5161 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
5162
5163 /* In particular, there should be an unknown value for "x". */
5164 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
5165 }
5166
808f4dfe 5167 /* Constant and initial. */
757bf1df 5168 {
808f4dfe
DM
5169 region_model merged (&mgr);
5170 const svalue *merged_x_sval;
757bf1df
DM
5171 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
5172
5173 /* In particular, there should be an unknown value for "x". */
5174 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
5175 }
5176
5177 /* Unknown and constant. */
5178 // TODO
5179
5180 /* Pointers: NULL and NULL. */
5181 // TODO
5182
5183 /* Pointers: NULL and non-NULL. */
5184 // TODO
5185
5186 /* Pointers: non-NULL and non-NULL: ptr to a local. */
5187 {
808f4dfe 5188 region_model model0 (&mgr);
757bf1df 5189 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
757bf1df
DM
5190 model0.set_value (model0.get_lvalue (p, NULL),
5191 model0.get_rvalue (addr_of_a, NULL), NULL);
5192
5193 region_model model1 (model0);
5194 ASSERT_EQ (model0, model1);
5195
5196 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
5197 region_model merged (&mgr);
5198 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
5199 ASSERT_EQ (model0, merged);
5200 }
5201
5202 /* Pointers: non-NULL and non-NULL: ptr to a global. */
5203 {
808f4dfe 5204 region_model merged (&mgr);
757bf1df 5205 /* p == &y in both input models. */
808f4dfe 5206 const svalue *merged_p_sval;
757bf1df
DM
5207 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
5208 &merged_p_sval);
5209
5210 /* We should get p == &y in the merged model. */
5211 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
808f4dfe
DM
5212 const region_svalue *merged_p_ptr
5213 = merged_p_sval->dyn_cast_region_svalue ();
5214 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
5215 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
757bf1df
DM
5216 }
5217
5218 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
5219 {
808f4dfe
DM
5220 region_model merged (&mgr);
5221 /* x == &y vs x == &z in the input models; these are actually casts
5222 of the ptrs to "int". */
5223 const svalue *merged_x_sval;
5224 // TODO:
757bf1df
DM
5225 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
5226 &merged_x_sval);
5227
5228 /* We should get x == unknown in the merged model. */
5229 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
5230 }
5231
5232 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
5233 {
5234 test_region_model_context ctxt;
808f4dfe 5235 region_model model0 (&mgr);
9a2c9579 5236 tree size = build_int_cst (size_type_node, 1024);
808f4dfe 5237 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
b9365b93
DM
5238 const region *new_reg
5239 = model0.create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe 5240 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
757bf1df 5241 model0.set_value (model0.get_lvalue (p, &ctxt),
808f4dfe 5242 ptr_sval, &ctxt);
757bf1df
DM
5243
5244 region_model model1 (model0);
5245
5246 ASSERT_EQ (model0, model1);
5247
808f4dfe
DM
5248 region_model merged (&mgr);
5249 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 5250
808f4dfe 5251 /* The merged model ought to be identical. */
757bf1df
DM
5252 ASSERT_EQ (model0, merged);
5253 }
5254
808f4dfe
DM
5255 /* Two regions sharing the same placeholder svalue should continue sharing
5256 it after self-merger. */
757bf1df
DM
5257 {
5258 test_region_model_context ctxt;
808f4dfe
DM
5259 region_model model0 (&mgr);
5260 placeholder_svalue placeholder_sval (integer_type_node, "test");
5261 model0.set_value (model0.get_lvalue (x, &ctxt),
5262 &placeholder_sval, &ctxt);
5263 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
757bf1df
DM
5264 region_model model1 (model0);
5265
5266 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
5267 region_model merged (&mgr);
5268 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
5269 ASSERT_EQ (model0, merged);
5270
5271 /* In particular, we should have x == y. */
5272 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
5273 tristate (tristate::TS_TRUE));
5274 }
5275
757bf1df 5276 {
808f4dfe
DM
5277 region_model model0 (&mgr);
5278 region_model model1 (&mgr);
757bf1df
DM
5279 test_region_model_context ctxt;
5280 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
5281 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
808f4dfe
DM
5282 region_model merged (&mgr);
5283 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
5284 }
5285
5286 {
808f4dfe
DM
5287 region_model model0 (&mgr);
5288 region_model model1 (&mgr);
757bf1df
DM
5289 test_region_model_context ctxt;
5290 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
5291 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
5292 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe
DM
5293 region_model merged (&mgr);
5294 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 5295 }
757bf1df
DM
5296
5297 // TODO: what can't we merge? need at least one such test
5298
5299 /* TODO: various things
5300 - heap regions
5301 - value merging:
5302 - every combination, but in particular
808f4dfe 5303 - pairs of regions
757bf1df
DM
5304 */
5305
5306 /* Views. */
5307 {
5308 test_region_model_context ctxt;
808f4dfe 5309 region_model model0 (&mgr);
757bf1df 5310
808f4dfe
DM
5311 const region *x_reg = model0.get_lvalue (x, &ctxt);
5312 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
757bf1df
DM
5313 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
5314
5315 region_model model1 (model0);
5316 ASSERT_EQ (model1, model0);
5317
5318 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
5319 region_model merged (&mgr);
5320 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 5321 }
455f58ec
DM
5322
5323 /* Verify that we can merge a model in which a local in an older stack
5324 frame points to a local in a more recent stack frame. */
5325 {
808f4dfe 5326 region_model model0 (&mgr);
455f58ec 5327 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
808f4dfe 5328 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
455f58ec
DM
5329
5330 /* Push a second frame. */
808f4dfe 5331 const region *reg_2nd_frame
455f58ec
DM
5332 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
5333
5334 /* Have a pointer in the older frame point to a local in the
5335 more recent frame. */
808f4dfe
DM
5336 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
5337 model0.set_value (q_in_first_frame, sval_ptr, NULL);
455f58ec
DM
5338
5339 /* Verify that it's pointing at the newer frame. */
5932dd35 5340 const region *reg_pointee = sval_ptr->maybe_get_region ();
808f4dfe 5341 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
455f58ec 5342
808f4dfe 5343 model0.canonicalize ();
455f58ec
DM
5344
5345 region_model model1 (model0);
5346 ASSERT_EQ (model0, model1);
5347
5348 /* They should be mergeable, and the result should be the same
5349 (after canonicalization, at least). */
808f4dfe
DM
5350 region_model merged (&mgr);
5351 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5352 merged.canonicalize ();
455f58ec
DM
5353 ASSERT_EQ (model0, merged);
5354 }
5355
5356 /* Verify that we can merge a model in which a local points to a global. */
5357 {
808f4dfe 5358 region_model model0 (&mgr);
455f58ec
DM
5359 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
5360 model0.set_value (model0.get_lvalue (q, NULL),
5361 model0.get_rvalue (addr_of_y, NULL), NULL);
5362
455f58ec
DM
5363 region_model model1 (model0);
5364 ASSERT_EQ (model0, model1);
5365
5366 /* They should be mergeable, and the result should be the same
5367 (after canonicalization, at least). */
808f4dfe
DM
5368 region_model merged (&mgr);
5369 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
455f58ec
DM
5370 ASSERT_EQ (model0, merged);
5371 }
757bf1df
DM
5372}
5373
5374/* Verify that constraints are correctly merged when merging region_model
5375 instances. */
5376
5377static void
5378test_constraint_merging ()
5379{
5380 tree int_0 = build_int_cst (integer_type_node, 0);
5381 tree int_5 = build_int_cst (integer_type_node, 5);
5382 tree x = build_global_decl ("x", integer_type_node);
5383 tree y = build_global_decl ("y", integer_type_node);
5384 tree z = build_global_decl ("z", integer_type_node);
5385 tree n = build_global_decl ("n", integer_type_node);
5386
808f4dfe 5387 region_model_manager mgr;
757bf1df
DM
5388 test_region_model_context ctxt;
5389
5390 /* model0: 0 <= (x == y) < n. */
808f4dfe 5391 region_model model0 (&mgr);
757bf1df
DM
5392 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
5393 model0.add_constraint (x, GE_EXPR, int_0, NULL);
5394 model0.add_constraint (x, LT_EXPR, n, NULL);
5395
5396 /* model1: z != 5 && (0 <= x < n). */
808f4dfe 5397 region_model model1 (&mgr);
757bf1df
DM
5398 model1.add_constraint (z, NE_EXPR, int_5, NULL);
5399 model1.add_constraint (x, GE_EXPR, int_0, NULL);
5400 model1.add_constraint (x, LT_EXPR, n, NULL);
5401
5402 /* They should be mergeable; the merged constraints should
5403 be: (0 <= x < n). */
808f4dfe
DM
5404 program_point point (program_point::origin ());
5405 region_model merged (&mgr);
5406 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
5407
5408 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
5409 tristate (tristate::TS_TRUE));
5410 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
5411 tristate (tristate::TS_TRUE));
5412
5413 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
5414 tristate (tristate::TS_UNKNOWN));
5415 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
5416 tristate (tristate::TS_UNKNOWN));
5417}
5418
808f4dfe
DM
5419/* Verify that widening_svalue::eval_condition_without_cm works as
5420 expected. */
5421
5422static void
5423test_widening_constraints ()
5424{
5425 program_point point (program_point::origin ());
5426 tree int_0 = build_int_cst (integer_type_node, 0);
5427 tree int_m1 = build_int_cst (integer_type_node, -1);
5428 tree int_1 = build_int_cst (integer_type_node, 1);
5429 tree int_256 = build_int_cst (integer_type_node, 256);
5430 region_model_manager mgr;
5431 test_region_model_context ctxt;
5432 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
5433 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
5434 const svalue *w_zero_then_one_sval
5435 = mgr.get_or_create_widening_svalue (integer_type_node, point,
5436 int_0_sval, int_1_sval);
5437 const widening_svalue *w_zero_then_one
5438 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
5439 ASSERT_EQ (w_zero_then_one->get_direction (),
5440 widening_svalue::DIR_ASCENDING);
5441 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
5442 tristate::TS_FALSE);
5443 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
5444 tristate::TS_FALSE);
5445 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
5446 tristate::TS_UNKNOWN);
5447 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
5448 tristate::TS_UNKNOWN);
5449
5450 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
5451 tristate::TS_FALSE);
5452 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
5453 tristate::TS_UNKNOWN);
5454 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
5455 tristate::TS_UNKNOWN);
5456 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
5457 tristate::TS_UNKNOWN);
5458
5459 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
5460 tristate::TS_TRUE);
5461 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
5462 tristate::TS_UNKNOWN);
5463 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
5464 tristate::TS_UNKNOWN);
5465 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
5466 tristate::TS_UNKNOWN);
5467
5468 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
5469 tristate::TS_TRUE);
5470 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
5471 tristate::TS_TRUE);
5472 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
5473 tristate::TS_UNKNOWN);
5474 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
5475 tristate::TS_UNKNOWN);
5476
5477 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
5478 tristate::TS_FALSE);
5479 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
5480 tristate::TS_UNKNOWN);
5481 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
5482 tristate::TS_UNKNOWN);
5483 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
5484 tristate::TS_UNKNOWN);
5485
5486 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
5487 tristate::TS_TRUE);
5488 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
5489 tristate::TS_UNKNOWN);
5490 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
5491 tristate::TS_UNKNOWN);
5492 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
5493 tristate::TS_UNKNOWN);
5494}
5495
5496/* Verify merging constraints for states simulating successive iterations
5497 of a loop.
5498 Simulate:
5499 for (i = 0; i < 256; i++)
5500 [...body...]
5501 i.e. this gimple:.
5502 i_15 = 0;
5503 goto <bb 4>;
5504
5505 <bb 4> :
5506 i_11 = PHI <i_15(2), i_23(3)>
5507 if (i_11 <= 255)
5508 goto <bb 3>;
5509 else
5510 goto [AFTER LOOP]
5511
5512 <bb 3> :
5513 [LOOP BODY]
5514 i_23 = i_11 + 1;
5515
5516 and thus these ops (and resultant states):
5517 i_11 = PHI()
5518 {i_11: 0}
5519 add_constraint (i_11 <= 255) [for the true edge]
5520 {i_11: 0} [constraint was a no-op]
5521 i_23 = i_11 + 1;
5522 {i_22: 1}
5523 i_11 = PHI()
5524 {i_11: WIDENED (at phi, 0, 1)}
5525 add_constraint (i_11 <= 255) [for the true edge]
5526 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
5527 i_23 = i_11 + 1;
5528 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
5529 i_11 = PHI(); merge with state at phi above
5530 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
5531 [changing meaning of "WIDENED" here]
5532 if (i_11 <= 255)
5533 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
5534 F: {i_11: 256}
5535 */
5536
5537static void
5538test_iteration_1 ()
5539{
5540 program_point point (program_point::origin ());
5541
5542 tree int_0 = build_int_cst (integer_type_node, 0);
5543 tree int_1 = build_int_cst (integer_type_node, 1);
5544 tree int_256 = build_int_cst (integer_type_node, 256);
5545 tree int_257 = build_int_cst (integer_type_node, 257);
5546 tree i = build_global_decl ("i", integer_type_node);
5547
5548 region_model_manager mgr;
5549 test_region_model_context ctxt;
5550
5551 /* model0: i: 0. */
5552 region_model model0 (&mgr);
5553 model0.set_value (i, int_0, &ctxt);
5554
5555 /* model1: i: 1. */
5556 region_model model1 (&mgr);
5557 model1.set_value (i, int_1, &ctxt);
5558
5559 /* Should merge "i" to a widened value. */
5560 region_model model2 (&mgr);
5561 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
5562 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
5563 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
5564 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
5565 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
5566
5567 /* Add constraint: i < 256 */
5568 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
5569 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
5570 tristate (tristate::TS_TRUE));
5571 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
5572 tristate (tristate::TS_TRUE));
5573
5574 /* Try merging with the initial state. */
5575 region_model model3 (&mgr);
5576 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
5577 /* Merging the merged value with the initial value should be idempotent,
5578 so that the analysis converges. */
5579 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
5580 /* Merger of 0 and a widening value with constraint < CST
5581 should retain the constraint, even though it was implicit
5582 for the 0 case. */
5583 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
5584 tristate (tristate::TS_TRUE));
5585 /* ...and we should have equality: the analysis should have converged. */
5586 ASSERT_EQ (model3, model2);
5587
5588 /* "i_23 = i_11 + 1;" */
5589 region_model model4 (model3);
5590 ASSERT_EQ (model4, model2);
5591 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
5592 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
5593 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
5594
5595 /* Try merging with the "i: 1" state. */
5596 region_model model5 (&mgr);
5597 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
5598 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
5599 ASSERT_EQ (model5, model4);
5600
5601 /* "i_11 = PHI();" merge with state at phi above.
5602 For i, we should have a merger of WIDENING with WIDENING + 1,
5603 and this should be WIDENING again. */
5604 region_model model6 (&mgr);
5605 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
5606 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
5607 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
5608
5609 ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257);
5610}
5611
6969ac30
DM
5612/* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
5613 all cast pointers to that region are also known to be non-NULL. */
5614
5615static void
5616test_malloc_constraints ()
5617{
808f4dfe
DM
5618 region_model_manager mgr;
5619 region_model model (&mgr);
6969ac30
DM
5620 tree p = build_global_decl ("p", ptr_type_node);
5621 tree char_star = build_pointer_type (char_type_node);
5622 tree q = build_global_decl ("q", char_star);
5623 tree null_ptr = build_int_cst (ptr_type_node, 0);
5624
808f4dfe 5625 const svalue *size_in_bytes
9a2c9579 5626 = mgr.get_or_create_unknown_svalue (size_type_node);
b9365b93 5627 const region *reg = model.create_region_for_heap_alloc (size_in_bytes, NULL);
808f4dfe
DM
5628 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
5629 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
6969ac30
DM
5630 model.set_value (q, p, NULL);
5631
6969ac30
DM
5632 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
5633 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
5634 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
5635 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
5636
5637 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
5638
6969ac30
DM
5639 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
5640 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
5641 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
5642 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
5643}
5644
808f4dfe
DM
5645/* Smoketest of getting and setting the value of a variable. */
5646
5647static void
5648test_var ()
5649{
5650 /* "int i;" */
5651 tree i = build_global_decl ("i", integer_type_node);
5652
5653 tree int_17 = build_int_cst (integer_type_node, 17);
5654 tree int_m3 = build_int_cst (integer_type_node, -3);
5655
5656 region_model_manager mgr;
5657 region_model model (&mgr);
5658
5659 const region *i_reg = model.get_lvalue (i, NULL);
5660 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
5661
5662 /* Reading "i" should give a symbolic "initial value". */
5663 const svalue *sval_init = model.get_rvalue (i, NULL);
5664 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
5665 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
5666 /* ..and doing it again should give the same "initial value". */
5667 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
5668
5669 /* "i = 17;". */
5670 model.set_value (i, int_17, NULL);
5671 ASSERT_EQ (model.get_rvalue (i, NULL),
5672 model.get_rvalue (int_17, NULL));
5673
5674 /* "i = -3;". */
5675 model.set_value (i, int_m3, NULL);
5676 ASSERT_EQ (model.get_rvalue (i, NULL),
5677 model.get_rvalue (int_m3, NULL));
5678
5679 /* Verify get_offset for "i". */
5680 {
5681 region_offset offset = i_reg->get_offset ();
5682 ASSERT_EQ (offset.get_base_region (), i_reg);
5683 ASSERT_EQ (offset.get_bit_offset (), 0);
5684 }
5685}
5686
5687static void
5688test_array_2 ()
5689{
5690 /* "int arr[10];" */
5691 tree tlen = size_int (10);
5692 tree arr_type
5693 = build_array_type (integer_type_node, build_index_type (tlen));
5694 tree arr = build_global_decl ("arr", arr_type);
5695
5696 /* "int i;" */
5697 tree i = build_global_decl ("i", integer_type_node);
5698
5699 tree int_0 = build_int_cst (integer_type_node, 0);
5700 tree int_1 = build_int_cst (integer_type_node, 1);
5701
5702 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
5703 arr, int_0, NULL_TREE, NULL_TREE);
5704 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
5705 arr, int_1, NULL_TREE, NULL_TREE);
5706 tree arr_i = build4 (ARRAY_REF, integer_type_node,
5707 arr, i, NULL_TREE, NULL_TREE);
5708
5709 tree int_17 = build_int_cst (integer_type_node, 17);
5710 tree int_42 = build_int_cst (integer_type_node, 42);
5711 tree int_m3 = build_int_cst (integer_type_node, -3);
5712
5713 region_model_manager mgr;
5714 region_model model (&mgr);
5715 /* "arr[0] = 17;". */
5716 model.set_value (arr_0, int_17, NULL);
5717 /* "arr[1] = -3;". */
5718 model.set_value (arr_1, int_m3, NULL);
5719
5720 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
5721 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
5722
5723 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
5724 model.set_value (arr_1, int_42, NULL);
5725 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
5726
5727 /* Verify get_offset for "arr[0]". */
5728 {
5729 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
5730 region_offset offset = arr_0_reg->get_offset ();
5731 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
5732 ASSERT_EQ (offset.get_bit_offset (), 0);
5733 }
5734
5735 /* Verify get_offset for "arr[1]". */
5736 {
5737 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
5738 region_offset offset = arr_1_reg->get_offset ();
5739 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
5740 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
5741 }
5742
5743 /* "arr[i] = i;" - this should remove the earlier bindings. */
5744 model.set_value (arr_i, i, NULL);
5745 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
5746 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
5747
5748 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
5749 model.set_value (arr_0, int_17, NULL);
5750 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
5751 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
5752}
5753
5754/* Smoketest of dereferencing a pointer via MEM_REF. */
5755
5756static void
5757test_mem_ref ()
5758{
5759 /*
5760 x = 17;
5761 p = &x;
5762 *p;
5763 */
5764 tree x = build_global_decl ("x", integer_type_node);
5765 tree int_star = build_pointer_type (integer_type_node);
5766 tree p = build_global_decl ("p", int_star);
5767
5768 tree int_17 = build_int_cst (integer_type_node, 17);
5769 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
5770 tree offset_0 = build_int_cst (integer_type_node, 0);
5771 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
5772
5773 region_model_manager mgr;
5774 region_model model (&mgr);
5775
5776 /* "x = 17;". */
5777 model.set_value (x, int_17, NULL);
5778
5779 /* "p = &x;". */
5780 model.set_value (p, addr_of_x, NULL);
5781
5782 const svalue *sval = model.get_rvalue (star_p, NULL);
5783 ASSERT_EQ (sval->maybe_get_constant (), int_17);
5784}
5785
5786/* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
5787 Analogous to this code:
5788 void test_6 (int a[10])
5789 {
5790 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
5791 a[3] = 42;
5792 __analyzer_eval (a[3] == 42); [should be TRUE]
5793 }
5794 from data-model-1.c, which looks like this at the gimple level:
5795 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
5796 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
5797 int _2 = *_1; # MEM_REF
5798 _Bool _3 = _2 == 42;
5799 int _4 = (int) _3;
5800 __analyzer_eval (_4);
5801
5802 # a[3] = 42;
5803 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
5804 *_5 = 42; # MEM_REF
5805
5806 # __analyzer_eval (a[3] == 42); [should be TRUE]
5807 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
5808 int _7 = *_6; # MEM_REF
5809 _Bool _8 = _7 == 42;
5810 int _9 = (int) _8;
5811 __analyzer_eval (_9); */
5812
5813static void
5814test_POINTER_PLUS_EXPR_then_MEM_REF ()
5815{
5816 tree int_star = build_pointer_type (integer_type_node);
5817 tree a = build_global_decl ("a", int_star);
5818 tree offset_12 = build_int_cst (size_type_node, 12);
5819 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
5820 tree offset_0 = build_int_cst (integer_type_node, 0);
5821 tree mem_ref = build2 (MEM_REF, integer_type_node,
5822 pointer_plus_expr, offset_0);
5823 region_model_manager mgr;
5824 region_model m (&mgr);
5825
5826 tree int_42 = build_int_cst (integer_type_node, 42);
5827 m.set_value (mem_ref, int_42, NULL);
5828 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
5829}
5830
5831/* Verify that malloc works. */
5832
5833static void
5834test_malloc ()
5835{
5836 tree int_star = build_pointer_type (integer_type_node);
5837 tree p = build_global_decl ("p", int_star);
5838 tree n = build_global_decl ("n", integer_type_node);
5839 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
5840 n, build_int_cst (size_type_node, 4));
5841
5842 region_model_manager mgr;
5843 test_region_model_context ctxt;
5844 region_model model (&mgr);
5845
5846 /* "p = malloc (n * 4);". */
5847 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
b9365b93 5848 const region *reg = model.create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe
DM
5849 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
5850 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 5851 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
5852}
5853
5854/* Verify that alloca works. */
5855
5856static void
5857test_alloca ()
5858{
5859 auto_vec <tree> param_types;
5860 tree fndecl = make_fndecl (integer_type_node,
5861 "test_fn",
5862 param_types);
5863 allocate_struct_function (fndecl, true);
5864
5865
5866 tree int_star = build_pointer_type (integer_type_node);
5867 tree p = build_global_decl ("p", int_star);
5868 tree n = build_global_decl ("n", integer_type_node);
5869 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
5870 n, build_int_cst (size_type_node, 4));
5871
5872 region_model_manager mgr;
5873 test_region_model_context ctxt;
5874 region_model model (&mgr);
5875
5876 /* Push stack frame. */
5877 const region *frame_reg
5878 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl),
5879 NULL, &ctxt);
5880 /* "p = alloca (n * 4);". */
5881 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
b9365b93 5882 const region *reg = model.create_region_for_alloca (size_sval, &ctxt);
808f4dfe
DM
5883 ASSERT_EQ (reg->get_parent_region (), frame_reg);
5884 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
5885 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 5886 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
5887
5888 /* Verify that the pointers to the alloca region are replaced by
5889 poisoned values when the frame is popped. */
5890 model.pop_frame (NULL, NULL, &ctxt);
33255ad3 5891 ASSERT_EQ (model.get_rvalue (p, NULL)->get_kind (), SK_POISONED);
808f4dfe
DM
5892}
5893
71fc4655
DM
5894/* Verify that svalue::involves_p works. */
5895
5896static void
5897test_involves_p ()
5898{
5899 region_model_manager mgr;
5900 tree int_star = build_pointer_type (integer_type_node);
5901 tree p = build_global_decl ("p", int_star);
5902 tree q = build_global_decl ("q", int_star);
5903
5904 test_region_model_context ctxt;
5905 region_model model (&mgr);
5906 const svalue *p_init = model.get_rvalue (p, &ctxt);
5907 const svalue *q_init = model.get_rvalue (q, &ctxt);
5908
5909 ASSERT_TRUE (p_init->involves_p (p_init));
5910 ASSERT_FALSE (p_init->involves_p (q_init));
5911
5912 const region *star_p_reg = mgr.get_symbolic_region (p_init);
5913 const region *star_q_reg = mgr.get_symbolic_region (q_init);
5914
5915 const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
5916 const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
5917
5918 ASSERT_TRUE (init_star_p->involves_p (p_init));
5919 ASSERT_FALSE (p_init->involves_p (init_star_p));
5920 ASSERT_FALSE (init_star_p->involves_p (q_init));
5921 ASSERT_TRUE (init_star_q->involves_p (q_init));
5922 ASSERT_FALSE (init_star_q->involves_p (p_init));
5923}
5924
757bf1df
DM
5925/* Run all of the selftests within this file. */
5926
5927void
5928analyzer_region_model_cc_tests ()
5929{
8c08c983 5930 test_tree_cmp_on_constants ();
757bf1df 5931 test_dump ();
808f4dfe
DM
5932 test_struct ();
5933 test_array_1 ();
90f7c300 5934 test_get_representative_tree ();
757bf1df 5935 test_unique_constants ();
808f4dfe
DM
5936 test_unique_unknowns ();
5937 test_initial_svalue_folding ();
5938 test_unaryop_svalue_folding ();
5939 test_binop_svalue_folding ();
5940 test_sub_svalue_folding ();
5941 test_descendent_of_p ();
757bf1df 5942 test_assignment ();
a96f1c38 5943 test_compound_assignment ();
757bf1df
DM
5944 test_stack_frames ();
5945 test_get_representative_path_var ();
808f4dfe 5946 test_equality_1 ();
757bf1df
DM
5947 test_canonicalization_2 ();
5948 test_canonicalization_3 ();
8c08c983 5949 test_canonicalization_4 ();
757bf1df
DM
5950 test_state_merging ();
5951 test_constraint_merging ();
808f4dfe
DM
5952 test_widening_constraints ();
5953 test_iteration_1 ();
6969ac30 5954 test_malloc_constraints ();
808f4dfe
DM
5955 test_var ();
5956 test_array_2 ();
5957 test_mem_ref ();
5958 test_POINTER_PLUS_EXPR_then_MEM_REF ();
5959 test_malloc ();
5960 test_alloca ();
71fc4655 5961 test_involves_p ();
757bf1df
DM
5962}
5963
5964} // namespace selftest
5965
5966#endif /* CHECKING_P */
5967
75038aa6
DM
5968} // namespace ana
5969
757bf1df 5970#endif /* #if ENABLE_ANALYZER */