]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/analyzer/region-model.cc
analyzer: consolidate duplicate code in region::calc_offset
[thirdparty/gcc.git] / gcc / analyzer / region-model.cc
CommitLineData
757bf1df 1/* Classes for modeling the state of memory.
7adcbafe 2 Copyright (C) 2019-2022 Free Software Foundation, Inc.
757bf1df
DM
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tree.h"
25#include "function.h"
26#include "basic-block.h"
27#include "gimple.h"
28#include "gimple-iterator.h"
7892ff37 29#include "diagnostic-core.h"
757bf1df
DM
30#include "graphviz.h"
31#include "options.h"
32#include "cgraph.h"
33#include "tree-dfa.h"
34#include "stringpool.h"
35#include "convert.h"
36#include "target.h"
37#include "fold-const.h"
38#include "tree-pretty-print.h"
39#include "diagnostic-color.h"
40#include "diagnostic-metadata.h"
757bf1df 41#include "tristate.h"
ef7827b0 42#include "bitmap.h"
757bf1df
DM
43#include "selftest.h"
44#include "function.h"
809192e7 45#include "json.h"
757bf1df
DM
46#include "analyzer/analyzer.h"
47#include "analyzer/analyzer-logging.h"
48#include "ordered-hash-map.h"
49#include "options.h"
50#include "cgraph.h"
51#include "cfg.h"
52#include "digraph.h"
53#include "analyzer/supergraph.h"
54#include "sbitmap.h"
808f4dfe
DM
55#include "analyzer/call-string.h"
56#include "analyzer/program-point.h"
57#include "analyzer/store.h"
757bf1df
DM
58#include "analyzer/region-model.h"
59#include "analyzer/constraint-manager.h"
60#include "diagnostic-event-id.h"
61#include "analyzer/sm.h"
62#include "diagnostic-event-id.h"
63#include "analyzer/sm.h"
64#include "analyzer/pending-diagnostic.h"
808f4dfe 65#include "analyzer/region-model-reachability.h"
757bf1df 66#include "analyzer/analyzer-selftests.h"
f573d351 67#include "analyzer/program-state.h"
884d9141 68#include "stor-layout.h"
c7e276b8 69#include "attribs.h"
9a2c9579 70#include "tree-object-size.h"
757bf1df
DM
71
72#if ENABLE_ANALYZER
73
75038aa6
DM
74namespace ana {
75
757bf1df
DM
76/* Dump T to PP in language-independent form, for debugging/logging/dumping
77 purposes. */
78
757bf1df 79void
808f4dfe 80dump_tree (pretty_printer *pp, tree t)
757bf1df 81{
808f4dfe 82 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
757bf1df
DM
83}
84
808f4dfe
DM
85/* Dump T to PP in language-independent form in quotes, for
86 debugging/logging/dumping purposes. */
757bf1df
DM
87
88void
808f4dfe 89dump_quoted_tree (pretty_printer *pp, tree t)
757bf1df 90{
808f4dfe
DM
91 pp_begin_quote (pp, pp_show_color (pp));
92 dump_tree (pp, t);
93 pp_end_quote (pp, pp_show_color (pp));
757bf1df
DM
94}
95
808f4dfe
DM
96/* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
97 calls within other pp_printf calls.
757bf1df 98
808f4dfe
DM
99 default_tree_printer handles 'T' and some other codes by calling
100 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
101 dump_generic_node calls pp_printf in various places, leading to
102 garbled output.
757bf1df 103
808f4dfe
DM
104 Ideally pp_printf could be made to be reentrant, but in the meantime
105 this function provides a workaround. */
6969ac30
DM
106
107void
808f4dfe 108print_quoted_type (pretty_printer *pp, tree t)
6969ac30 109{
808f4dfe
DM
110 pp_begin_quote (pp, pp_show_color (pp));
111 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
112 pp_end_quote (pp, pp_show_color (pp));
6969ac30
DM
113}
114
d726a57b
DM
115/* class region_to_value_map. */
116
117/* Assignment operator for region_to_value_map. */
118
119region_to_value_map &
120region_to_value_map::operator= (const region_to_value_map &other)
121{
122 m_hash_map.empty ();
123 for (auto iter : other.m_hash_map)
124 {
125 const region *reg = iter.first;
126 const svalue *sval = iter.second;
127 m_hash_map.put (reg, sval);
128 }
129 return *this;
130}
131
132/* Equality operator for region_to_value_map. */
133
134bool
135region_to_value_map::operator== (const region_to_value_map &other) const
136{
137 if (m_hash_map.elements () != other.m_hash_map.elements ())
138 return false;
139
140 for (auto iter : *this)
141 {
142 const region *reg = iter.first;
143 const svalue *sval = iter.second;
144 const svalue * const *other_slot = other.get (reg);
145 if (other_slot == NULL)
146 return false;
147 if (sval != *other_slot)
148 return false;
149 }
150
151 return true;
152}
153
154/* Dump this object to PP. */
155
156void
157region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
158 bool multiline) const
159{
160 auto_vec<const region *> regs;
161 for (iterator iter = begin (); iter != end (); ++iter)
162 regs.safe_push ((*iter).first);
163 regs.qsort (region::cmp_ptr_ptr);
164 if (multiline)
165 pp_newline (pp);
166 else
167 pp_string (pp, " {");
168 unsigned i;
169 const region *reg;
170 FOR_EACH_VEC_ELT (regs, i, reg)
171 {
172 if (multiline)
173 pp_string (pp, " ");
174 else if (i > 0)
175 pp_string (pp, ", ");
176 reg->dump_to_pp (pp, simple);
177 pp_string (pp, ": ");
178 const svalue *sval = *get (reg);
179 sval->dump_to_pp (pp, true);
180 if (multiline)
181 pp_newline (pp);
182 }
183 if (!multiline)
184 pp_string (pp, "}");
185}
186
187/* Dump this object to stderr. */
188
189DEBUG_FUNCTION void
190region_to_value_map::dump (bool simple) const
191{
192 pretty_printer pp;
193 pp_format_decoder (&pp) = default_tree_printer;
194 pp_show_color (&pp) = pp_show_color (global_dc->printer);
195 pp.buffer->stream = stderr;
196 dump_to_pp (&pp, simple, true);
197 pp_newline (&pp);
198 pp_flush (&pp);
199}
200
201
202/* Attempt to merge THIS with OTHER, writing the result
203 to OUT.
204
205 For now, write (region, value) mappings that are in common between THIS
206 and OTHER to OUT, effectively taking the intersection, rather than
207 rejecting differences. */
208
209bool
210region_to_value_map::can_merge_with_p (const region_to_value_map &other,
211 region_to_value_map *out) const
212{
213 for (auto iter : *this)
214 {
215 const region *iter_reg = iter.first;
216 const svalue *iter_sval = iter.second;
217 const svalue * const * other_slot = other.get (iter_reg);
218 if (other_slot)
219 if (iter_sval == *other_slot)
220 out->put (iter_reg, iter_sval);
221 }
222 return true;
223}
224
33255ad3
DM
225/* Purge any state involving SVAL. */
226
227void
228region_to_value_map::purge_state_involving (const svalue *sval)
229{
230 auto_vec<const region *> to_purge;
231 for (auto iter : *this)
232 {
233 const region *iter_reg = iter.first;
234 const svalue *iter_sval = iter.second;
235 if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
236 to_purge.safe_push (iter_reg);
237 }
238 for (auto iter : to_purge)
239 m_hash_map.remove (iter);
240}
241
757bf1df
DM
242/* class region_model. */
243
808f4dfe 244/* Ctor for region_model: construct an "empty" model. */
757bf1df 245
808f4dfe 246region_model::region_model (region_model_manager *mgr)
9a2c9579
DM
247: m_mgr (mgr), m_store (), m_current_frame (NULL),
248 m_dynamic_extents ()
757bf1df 249{
808f4dfe 250 m_constraints = new constraint_manager (mgr);
757bf1df
DM
251}
252
253/* region_model's copy ctor. */
254
255region_model::region_model (const region_model &other)
808f4dfe
DM
256: m_mgr (other.m_mgr), m_store (other.m_store),
257 m_constraints (new constraint_manager (*other.m_constraints)),
9a2c9579
DM
258 m_current_frame (other.m_current_frame),
259 m_dynamic_extents (other.m_dynamic_extents)
757bf1df 260{
757bf1df
DM
261}
262
263/* region_model's dtor. */
264
265region_model::~region_model ()
266{
267 delete m_constraints;
268}
269
270/* region_model's assignment operator. */
271
272region_model &
273region_model::operator= (const region_model &other)
274{
808f4dfe
DM
275 /* m_mgr is const. */
276 gcc_assert (m_mgr == other.m_mgr);
757bf1df 277
808f4dfe 278 m_store = other.m_store;
757bf1df
DM
279
280 delete m_constraints;
808f4dfe 281 m_constraints = new constraint_manager (*other.m_constraints);
757bf1df 282
808f4dfe 283 m_current_frame = other.m_current_frame;
757bf1df 284
9a2c9579
DM
285 m_dynamic_extents = other.m_dynamic_extents;
286
757bf1df
DM
287 return *this;
288}
289
290/* Equality operator for region_model.
291
808f4dfe
DM
292 Amongst other things this directly compares the stores and the constraint
293 managers, so for this to be meaningful both this and OTHER should
757bf1df
DM
294 have been canonicalized. */
295
296bool
297region_model::operator== (const region_model &other) const
298{
808f4dfe
DM
299 /* We can only compare instances that use the same manager. */
300 gcc_assert (m_mgr == other.m_mgr);
757bf1df 301
808f4dfe 302 if (m_store != other.m_store)
757bf1df
DM
303 return false;
304
305 if (*m_constraints != *other.m_constraints)
306 return false;
307
808f4dfe
DM
308 if (m_current_frame != other.m_current_frame)
309 return false;
757bf1df 310
9a2c9579
DM
311 if (m_dynamic_extents != other.m_dynamic_extents)
312 return false;
313
757bf1df
DM
314 gcc_checking_assert (hash () == other.hash ());
315
316 return true;
317}
318
319/* Generate a hash value for this region_model. */
320
321hashval_t
808f4dfe
DM
322region_model::hash () const
323{
324 hashval_t result = m_store.hash ();
325 result ^= m_constraints->hash ();
326 return result;
757bf1df
DM
327}
328
808f4dfe
DM
329/* Dump a representation of this model to PP, showing the
330 stack, the store, and any constraints.
331 Use SIMPLE to control how svalues and regions are printed. */
757bf1df
DM
332
333void
808f4dfe
DM
334region_model::dump_to_pp (pretty_printer *pp, bool simple,
335 bool multiline) const
757bf1df 336{
808f4dfe
DM
337 /* Dump stack. */
338 pp_printf (pp, "stack depth: %i", get_stack_depth ());
339 if (multiline)
340 pp_newline (pp);
341 else
342 pp_string (pp, " {");
343 for (const frame_region *iter_frame = m_current_frame; iter_frame;
344 iter_frame = iter_frame->get_calling_frame ())
345 {
346 if (multiline)
347 pp_string (pp, " ");
348 else if (iter_frame != m_current_frame)
349 pp_string (pp, ", ");
350 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
351 iter_frame->dump_to_pp (pp, simple);
352 if (multiline)
353 pp_newline (pp);
354 }
355 if (!multiline)
356 pp_string (pp, "}");
357
358 /* Dump store. */
359 if (!multiline)
360 pp_string (pp, ", {");
361 m_store.dump_to_pp (pp, simple, multiline,
362 m_mgr->get_store_manager ());
363 if (!multiline)
364 pp_string (pp, "}");
365
366 /* Dump constraints. */
367 pp_string (pp, "constraint_manager:");
368 if (multiline)
369 pp_newline (pp);
370 else
371 pp_string (pp, " {");
372 m_constraints->dump_to_pp (pp, multiline);
373 if (!multiline)
374 pp_string (pp, "}");
9a2c9579
DM
375
376 /* Dump sizes of dynamic regions, if any are known. */
377 if (!m_dynamic_extents.is_empty ())
378 {
379 pp_string (pp, "dynamic_extents:");
380 m_dynamic_extents.dump_to_pp (pp, simple, multiline);
381 }
808f4dfe 382}
757bf1df 383
808f4dfe 384/* Dump a representation of this model to FILE. */
757bf1df 385
808f4dfe
DM
386void
387region_model::dump (FILE *fp, bool simple, bool multiline) const
388{
389 pretty_printer pp;
390 pp_format_decoder (&pp) = default_tree_printer;
391 pp_show_color (&pp) = pp_show_color (global_dc->printer);
392 pp.buffer->stream = fp;
393 dump_to_pp (&pp, simple, multiline);
394 pp_newline (&pp);
395 pp_flush (&pp);
757bf1df
DM
396}
397
808f4dfe 398/* Dump a multiline representation of this model to stderr. */
757bf1df 399
808f4dfe
DM
400DEBUG_FUNCTION void
401region_model::dump (bool simple) const
402{
403 dump (stderr, simple, true);
404}
757bf1df 405
808f4dfe 406/* Dump a multiline representation of this model to stderr. */
757bf1df 407
808f4dfe
DM
408DEBUG_FUNCTION void
409region_model::debug () const
757bf1df 410{
808f4dfe 411 dump (true);
757bf1df
DM
412}
413
e61ffa20
DM
414/* Assert that this object is valid. */
415
416void
417region_model::validate () const
418{
419 m_store.validate ();
420}
421
808f4dfe
DM
422/* Canonicalize the store and constraints, to maximize the chance of
423 equality between region_model instances. */
757bf1df
DM
424
425void
808f4dfe 426region_model::canonicalize ()
757bf1df 427{
808f4dfe
DM
428 m_store.canonicalize (m_mgr->get_store_manager ());
429 m_constraints->canonicalize ();
757bf1df
DM
430}
431
432/* Return true if this region_model is in canonical form. */
433
434bool
435region_model::canonicalized_p () const
436{
437 region_model copy (*this);
808f4dfe 438 copy.canonicalize ();
757bf1df
DM
439 return *this == copy;
440}
441
808f4dfe
DM
442/* See the comment for store::loop_replay_fixup. */
443
444void
445region_model::loop_replay_fixup (const region_model *dst_state)
446{
447 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
448}
449
757bf1df
DM
450/* A subclass of pending_diagnostic for complaining about uses of
451 poisoned values. */
452
453class poisoned_value_diagnostic
454: public pending_diagnostic_subclass<poisoned_value_diagnostic>
455{
456public:
00e7d024
DM
457 poisoned_value_diagnostic (tree expr, enum poison_kind pkind,
458 const region *src_region)
459 : m_expr (expr), m_pkind (pkind),
460 m_src_region (src_region)
757bf1df
DM
461 {}
462
463 const char *get_kind () const FINAL OVERRIDE { return "poisoned_value_diagnostic"; }
464
33255ad3
DM
465 bool use_of_uninit_p () const FINAL OVERRIDE
466 {
467 return m_pkind == POISON_KIND_UNINIT;
468 }
469
757bf1df
DM
470 bool operator== (const poisoned_value_diagnostic &other) const
471 {
00e7d024
DM
472 return (m_expr == other.m_expr
473 && m_pkind == other.m_pkind
474 && m_src_region == other.m_src_region);
757bf1df
DM
475 }
476
477 bool emit (rich_location *rich_loc) FINAL OVERRIDE
478 {
479 switch (m_pkind)
480 {
481 default:
482 gcc_unreachable ();
33255ad3
DM
483 case POISON_KIND_UNINIT:
484 {
485 diagnostic_metadata m;
486 m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
487 return warning_meta (rich_loc, m,
488 OPT_Wanalyzer_use_of_uninitialized_value,
489 "use of uninitialized value %qE",
490 m_expr);
491 }
492 break;
757bf1df
DM
493 case POISON_KIND_FREED:
494 {
495 diagnostic_metadata m;
496 m.add_cwe (416); /* "CWE-416: Use After Free". */
6c8e5844
DM
497 return warning_meta (rich_loc, m,
498 OPT_Wanalyzer_use_after_free,
499 "use after %<free%> of %qE",
500 m_expr);
757bf1df
DM
501 }
502 break;
503 case POISON_KIND_POPPED_STACK:
504 {
757bf1df 505 /* TODO: which CWE? */
808f4dfe
DM
506 return warning_at
507 (rich_loc,
508 OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame,
509 "dereferencing pointer %qE to within stale stack frame",
510 m_expr);
757bf1df
DM
511 }
512 break;
513 }
514 }
515
516 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
517 {
518 switch (m_pkind)
519 {
520 default:
521 gcc_unreachable ();
33255ad3
DM
522 case POISON_KIND_UNINIT:
523 return ev.formatted_print ("use of uninitialized value %qE here",
524 m_expr);
757bf1df
DM
525 case POISON_KIND_FREED:
526 return ev.formatted_print ("use after %<free%> of %qE here",
527 m_expr);
528 case POISON_KIND_POPPED_STACK:
529 return ev.formatted_print
808f4dfe 530 ("dereferencing pointer %qE to within stale stack frame",
757bf1df
DM
531 m_expr);
532 }
533 }
534
00e7d024
DM
535 void mark_interesting_stuff (interesting_t *interest) FINAL OVERRIDE
536 {
537 if (m_src_region)
538 interest->add_region_creation (m_src_region);
539 }
540
757bf1df
DM
541private:
542 tree m_expr;
543 enum poison_kind m_pkind;
00e7d024 544 const region *m_src_region;
757bf1df
DM
545};
546
5e00ad3f
DM
547/* A subclass of pending_diagnostic for complaining about shifts
548 by negative counts. */
549
550class shift_count_negative_diagnostic
551: public pending_diagnostic_subclass<shift_count_negative_diagnostic>
552{
553public:
554 shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
555 : m_assign (assign), m_count_cst (count_cst)
556 {}
557
558 const char *get_kind () const FINAL OVERRIDE
559 {
560 return "shift_count_negative_diagnostic";
561 }
562
563 bool operator== (const shift_count_negative_diagnostic &other) const
564 {
565 return (m_assign == other.m_assign
566 && same_tree_p (m_count_cst, other.m_count_cst));
567 }
568
569 bool emit (rich_location *rich_loc) FINAL OVERRIDE
570 {
571 return warning_at (rich_loc, OPT_Wanalyzer_shift_count_negative,
572 "shift by negative count (%qE)", m_count_cst);
573 }
574
575 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
576 {
577 return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst);
578 }
579
580private:
581 const gassign *m_assign;
582 tree m_count_cst;
583};
584
585/* A subclass of pending_diagnostic for complaining about shifts
586 by counts >= the width of the operand type. */
587
588class shift_count_overflow_diagnostic
589: public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
590{
591public:
592 shift_count_overflow_diagnostic (const gassign *assign,
593 int operand_precision,
594 tree count_cst)
595 : m_assign (assign), m_operand_precision (operand_precision),
596 m_count_cst (count_cst)
597 {}
598
599 const char *get_kind () const FINAL OVERRIDE
600 {
601 return "shift_count_overflow_diagnostic";
602 }
603
604 bool operator== (const shift_count_overflow_diagnostic &other) const
605 {
606 return (m_assign == other.m_assign
607 && m_operand_precision == other.m_operand_precision
608 && same_tree_p (m_count_cst, other.m_count_cst));
609 }
610
611 bool emit (rich_location *rich_loc) FINAL OVERRIDE
612 {
613 return warning_at (rich_loc, OPT_Wanalyzer_shift_count_overflow,
614 "shift by count (%qE) >= precision of type (%qi)",
615 m_count_cst, m_operand_precision);
616 }
617
618 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
619 {
620 return ev.formatted_print ("shift by count %qE here", m_count_cst);
621 }
622
623private:
624 const gassign *m_assign;
625 int m_operand_precision;
626 tree m_count_cst;
627};
628
808f4dfe
DM
629/* If ASSIGN is a stmt that can be modelled via
630 set_value (lhs_reg, SVALUE, CTXT)
631 for some SVALUE, get the SVALUE.
632 Otherwise return NULL. */
757bf1df 633
808f4dfe
DM
634const svalue *
635region_model::get_gassign_result (const gassign *assign,
636 region_model_context *ctxt)
757bf1df
DM
637{
638 tree lhs = gimple_assign_lhs (assign);
639 tree rhs1 = gimple_assign_rhs1 (assign);
757bf1df
DM
640 enum tree_code op = gimple_assign_rhs_code (assign);
641 switch (op)
642 {
643 default:
808f4dfe 644 return NULL;
757bf1df
DM
645
646 case POINTER_PLUS_EXPR:
647 {
648 /* e.g. "_1 = a_10(D) + 12;" */
649 tree ptr = rhs1;
650 tree offset = gimple_assign_rhs2 (assign);
651
808f4dfe
DM
652 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
653 const svalue *offset_sval = get_rvalue (offset, ctxt);
654 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
655 is an integer of type sizetype". */
656 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
657
658 const svalue *sval_binop
659 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
660 ptr_sval, offset_sval);
661 return sval_binop;
757bf1df
DM
662 }
663 break;
664
665 case POINTER_DIFF_EXPR:
666 {
667 /* e.g. "_1 = p_2(D) - q_3(D);". */
808f4dfe
DM
668 tree rhs2 = gimple_assign_rhs2 (assign);
669 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
670 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 671
808f4dfe 672 // TODO: perhaps fold to zero if they're known to be equal?
757bf1df 673
808f4dfe
DM
674 const svalue *sval_binop
675 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
676 rhs1_sval, rhs2_sval);
677 return sval_binop;
757bf1df
DM
678 }
679 break;
680
808f4dfe
DM
681 /* Assignments of the form
682 set_value (lvalue (LHS), rvalue (EXPR))
683 for various EXPR.
684 We already have the lvalue for the LHS above, as "lhs_reg". */
685 case ADDR_EXPR: /* LHS = &RHS; */
686 case BIT_FIELD_REF:
687 case COMPONENT_REF: /* LHS = op0.op1; */
757bf1df 688 case MEM_REF:
757bf1df 689 case REAL_CST:
808f4dfe
DM
690 case COMPLEX_CST:
691 case VECTOR_CST:
757bf1df
DM
692 case INTEGER_CST:
693 case ARRAY_REF:
808f4dfe
DM
694 case SSA_NAME: /* LHS = VAR; */
695 case VAR_DECL: /* LHS = VAR; */
696 case PARM_DECL:/* LHS = VAR; */
697 case REALPART_EXPR:
698 case IMAGPART_EXPR:
699 return get_rvalue (rhs1, ctxt);
700
701 case ABS_EXPR:
702 case ABSU_EXPR:
703 case CONJ_EXPR:
704 case BIT_NOT_EXPR:
757bf1df
DM
705 case FIX_TRUNC_EXPR:
706 case FLOAT_EXPR:
808f4dfe 707 case NEGATE_EXPR:
757bf1df 708 case NOP_EXPR:
808f4dfe 709 case VIEW_CONVERT_EXPR:
757bf1df 710 {
808f4dfe
DM
711 /* Unary ops. */
712 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
713 const svalue *sval_unaryop
714 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
715 return sval_unaryop;
757bf1df 716 }
757bf1df
DM
717
718 case EQ_EXPR:
719 case GE_EXPR:
720 case LE_EXPR:
721 case NE_EXPR:
722 case GT_EXPR:
723 case LT_EXPR:
808f4dfe
DM
724 case UNORDERED_EXPR:
725 case ORDERED_EXPR:
757bf1df
DM
726 {
727 tree rhs2 = gimple_assign_rhs2 (assign);
728
808f4dfe
DM
729 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
730 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 731
2f5951bd 732 if (TREE_TYPE (lhs) == boolean_type_node)
808f4dfe 733 {
2f5951bd
DM
734 /* Consider constraints between svalues. */
735 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
736 if (t.is_known ())
737 return m_mgr->get_or_create_constant_svalue
738 (t.is_true () ? boolean_true_node : boolean_false_node);
808f4dfe 739 }
2f5951bd
DM
740
741 /* Otherwise, generate a symbolic binary op. */
742 const svalue *sval_binop
743 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
744 rhs1_sval, rhs2_sval);
745 return sval_binop;
757bf1df
DM
746 }
747 break;
748
749 case PLUS_EXPR:
750 case MINUS_EXPR:
751 case MULT_EXPR:
808f4dfe 752 case MULT_HIGHPART_EXPR:
757bf1df 753 case TRUNC_DIV_EXPR:
808f4dfe
DM
754 case CEIL_DIV_EXPR:
755 case FLOOR_DIV_EXPR:
756 case ROUND_DIV_EXPR:
757bf1df 757 case TRUNC_MOD_EXPR:
808f4dfe
DM
758 case CEIL_MOD_EXPR:
759 case FLOOR_MOD_EXPR:
760 case ROUND_MOD_EXPR:
761 case RDIV_EXPR:
762 case EXACT_DIV_EXPR:
757bf1df
DM
763 case LSHIFT_EXPR:
764 case RSHIFT_EXPR:
808f4dfe
DM
765 case LROTATE_EXPR:
766 case RROTATE_EXPR:
757bf1df
DM
767 case BIT_IOR_EXPR:
768 case BIT_XOR_EXPR:
769 case BIT_AND_EXPR:
770 case MIN_EXPR:
771 case MAX_EXPR:
808f4dfe 772 case COMPLEX_EXPR:
757bf1df
DM
773 {
774 /* Binary ops. */
775 tree rhs2 = gimple_assign_rhs2 (assign);
776
808f4dfe
DM
777 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
778 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 779
5e00ad3f
DM
780 if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
781 {
782 /* "INT34-C. Do not shift an expression by a negative number of bits
783 or by greater than or equal to the number of bits that exist in
784 the operand." */
785 if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
786 if (TREE_CODE (rhs2_cst) == INTEGER_CST)
787 {
788 if (tree_int_cst_sgn (rhs2_cst) < 0)
789 ctxt->warn (new shift_count_negative_diagnostic
790 (assign, rhs2_cst));
791 else if (compare_tree_int (rhs2_cst,
792 TYPE_PRECISION (TREE_TYPE (rhs1)))
793 >= 0)
794 ctxt->warn (new shift_count_overflow_diagnostic
795 (assign, TYPE_PRECISION (TREE_TYPE (rhs1)),
796 rhs2_cst));
797 }
798 }
799
808f4dfe
DM
800 const svalue *sval_binop
801 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
802 rhs1_sval, rhs2_sval);
803 return sval_binop;
804 }
805
806 /* Vector expressions. In theory we could implement these elementwise,
807 but for now, simply return unknown values. */
808 case VEC_DUPLICATE_EXPR:
809 case VEC_SERIES_EXPR:
810 case VEC_COND_EXPR:
811 case VEC_PERM_EXPR:
1b0be822
DM
812 case VEC_WIDEN_MULT_HI_EXPR:
813 case VEC_WIDEN_MULT_LO_EXPR:
814 case VEC_WIDEN_MULT_EVEN_EXPR:
815 case VEC_WIDEN_MULT_ODD_EXPR:
816 case VEC_UNPACK_HI_EXPR:
817 case VEC_UNPACK_LO_EXPR:
818 case VEC_UNPACK_FLOAT_HI_EXPR:
819 case VEC_UNPACK_FLOAT_LO_EXPR:
820 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
821 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
822 case VEC_PACK_TRUNC_EXPR:
823 case VEC_PACK_SAT_EXPR:
824 case VEC_PACK_FIX_TRUNC_EXPR:
825 case VEC_PACK_FLOAT_EXPR:
826 case VEC_WIDEN_LSHIFT_HI_EXPR:
827 case VEC_WIDEN_LSHIFT_LO_EXPR:
808f4dfe
DM
828 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
829 }
830}
831
33255ad3
DM
832/* Check for SVAL being poisoned, adding a warning to CTXT.
833 Return SVAL, or, if a warning is added, another value, to avoid
834 repeatedly complaining about the same poisoned value in followup code. */
835
836const svalue *
837region_model::check_for_poison (const svalue *sval,
838 tree expr,
839 region_model_context *ctxt) const
840{
841 if (!ctxt)
842 return sval;
843
844 if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
845 {
846 /* If we have an SSA name for a temporary, we don't want to print
847 '<unknown>'.
848 Poisoned values are shared by type, and so we can't reconstruct
849 the tree other than via the def stmts, using
850 fixup_tree_for_diagnostic. */
851 tree diag_arg = fixup_tree_for_diagnostic (expr);
852 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
00e7d024
DM
853 const region *src_region = NULL;
854 if (pkind == POISON_KIND_UNINIT)
855 src_region = get_region_for_poisoned_expr (expr);
856 if (ctxt->warn (new poisoned_value_diagnostic (diag_arg, pkind,
857 src_region)))
33255ad3
DM
858 {
859 /* We only want to report use of a poisoned value at the first
860 place it gets used; return an unknown value to avoid generating
861 a chain of followup warnings. */
862 sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
863 }
864
865 return sval;
866 }
867
868 return sval;
869}
870
00e7d024
DM
871/* Attempt to get a region for describing EXPR, the source of region of
872 a poisoned_svalue for use in a poisoned_value_diagnostic.
873 Return NULL if there is no good region to use. */
874
875const region *
876region_model::get_region_for_poisoned_expr (tree expr) const
877{
878 if (TREE_CODE (expr) == SSA_NAME)
879 {
880 tree decl = SSA_NAME_VAR (expr);
881 if (decl && DECL_P (decl))
882 expr = decl;
883 else
884 return NULL;
885 }
886 return get_lvalue (expr, NULL);
887}
888
808f4dfe
DM
889/* Update this model for the ASSIGN stmt, using CTXT to report any
890 diagnostics. */
891
892void
893region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
894{
895 tree lhs = gimple_assign_lhs (assign);
896 tree rhs1 = gimple_assign_rhs1 (assign);
897
898 const region *lhs_reg = get_lvalue (lhs, ctxt);
899
900 /* Most assignments are handled by:
901 set_value (lhs_reg, SVALUE, CTXT)
902 for some SVALUE. */
903 if (const svalue *sval = get_gassign_result (assign, ctxt))
904 {
33255ad3
DM
905 tree expr = get_diagnostic_tree_for_gassign (assign);
906 check_for_poison (sval, expr, ctxt);
808f4dfe
DM
907 set_value (lhs_reg, sval, ctxt);
908 return;
909 }
910
911 enum tree_code op = gimple_assign_rhs_code (assign);
912 switch (op)
913 {
914 default:
915 {
1b0be822 916 if (0)
808f4dfe
DM
917 sorry_at (assign->location, "unhandled assignment op: %qs",
918 get_tree_code_name (op));
1b0be822
DM
919 const svalue *unknown_sval
920 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
921 set_value (lhs_reg, unknown_sval, ctxt);
757bf1df
DM
922 }
923 break;
924
808f4dfe
DM
925 case CONSTRUCTOR:
926 {
927 if (TREE_CLOBBER_P (rhs1))
928 {
929 /* e.g. "x ={v} {CLOBBER};" */
930 clobber_region (lhs_reg);
931 }
932 else
933 {
934 /* Any CONSTRUCTOR that survives to this point is either
935 just a zero-init of everything, or a vector. */
936 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
937 zero_fill_region (lhs_reg);
938 unsigned ix;
939 tree index;
940 tree val;
941 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
942 {
943 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
944 if (!index)
945 index = build_int_cst (integer_type_node, ix);
946 gcc_assert (TREE_CODE (index) == INTEGER_CST);
947 const svalue *index_sval
948 = m_mgr->get_or_create_constant_svalue (index);
949 gcc_assert (index_sval);
950 const region *sub_reg
951 = m_mgr->get_element_region (lhs_reg,
952 TREE_TYPE (val),
953 index_sval);
954 const svalue *val_sval = get_rvalue (val, ctxt);
955 set_value (sub_reg, val_sval, ctxt);
956 }
957 }
958 }
959 break;
960
961 case STRING_CST:
757bf1df 962 {
808f4dfe 963 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
808f4dfe
DM
964 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
965 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 966 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
967 }
968 break;
969 }
970}
971
33255ad3
DM
972/* A pending_diagnostic subclass for implementing "__analyzer_dump_path". */
973
974class dump_path_diagnostic
975 : public pending_diagnostic_subclass<dump_path_diagnostic>
976{
977public:
978 bool emit (rich_location *richloc) FINAL OVERRIDE
979 {
980 inform (richloc, "path");
981 return true;
982 }
983
984 const char *get_kind () const FINAL OVERRIDE { return "dump_path_diagnostic"; }
985
986 bool operator== (const dump_path_diagnostic &) const
987 {
988 return true;
989 }
990};
991
992/* Handle the pre-sm-state part of STMT, modifying this object in-place.
993 Write true to *OUT_TERMINATE_PATH if the path should be terminated.
994 Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
995 side effects. */
996
997void
998region_model::on_stmt_pre (const gimple *stmt,
999 bool *out_terminate_path,
1000 bool *out_unknown_side_effects,
1001 region_model_context *ctxt)
1002{
1003 switch (gimple_code (stmt))
1004 {
1005 default:
1006 /* No-op for now. */
1007 break;
1008
1009 case GIMPLE_ASSIGN:
1010 {
1011 const gassign *assign = as_a <const gassign *> (stmt);
1012 on_assignment (assign, ctxt);
1013 }
1014 break;
1015
1016 case GIMPLE_ASM:
ded2c2c0
DM
1017 {
1018 const gasm *asm_stmt = as_a <const gasm *> (stmt);
1019 on_asm_stmt (asm_stmt, ctxt);
1020 }
33255ad3
DM
1021 break;
1022
1023 case GIMPLE_CALL:
1024 {
1025 /* Track whether we have a gcall to a function that's not recognized by
1026 anything, for which we don't have a function body, or for which we
1027 don't know the fndecl. */
1028 const gcall *call = as_a <const gcall *> (stmt);
1029
1030 /* Debugging/test support. */
1031 if (is_special_named_call_p (call, "__analyzer_describe", 2))
1032 impl_call_analyzer_describe (call, ctxt);
1033 else if (is_special_named_call_p (call, "__analyzer_dump_capacity", 1))
1034 impl_call_analyzer_dump_capacity (call, ctxt);
4409152a
DM
1035 else if (is_special_named_call_p (call, "__analyzer_dump_escaped", 0))
1036 impl_call_analyzer_dump_escaped (call);
33255ad3
DM
1037 else if (is_special_named_call_p (call, "__analyzer_dump_path", 0))
1038 {
1039 /* Handle the builtin "__analyzer_dump_path" by queuing a
1040 diagnostic at this exploded_node. */
1041 ctxt->warn (new dump_path_diagnostic ());
1042 }
1043 else if (is_special_named_call_p (call, "__analyzer_dump_region_model",
1044 0))
1045 {
1046 /* Handle the builtin "__analyzer_dump_region_model" by dumping
1047 the region model's state to stderr. */
1048 dump (false);
1049 }
1050 else if (is_special_named_call_p (call, "__analyzer_eval", 1))
1051 impl_call_analyzer_eval (call, ctxt);
1052 else if (is_special_named_call_p (call, "__analyzer_break", 0))
1053 {
1054 /* Handle the builtin "__analyzer_break" by triggering a
1055 breakpoint. */
1056 /* TODO: is there a good cross-platform way to do this? */
1057 raise (SIGINT);
1058 }
1059 else if (is_special_named_call_p (call,
1060 "__analyzer_dump_exploded_nodes",
1061 1))
1062 {
1063 /* This is handled elsewhere. */
1064 }
1065 else
1066 *out_unknown_side_effects = on_call_pre (call, ctxt,
1067 out_terminate_path);
1068 }
1069 break;
1070
1071 case GIMPLE_RETURN:
1072 {
1073 const greturn *return_ = as_a <const greturn *> (stmt);
1074 on_return (return_, ctxt);
1075 }
1076 break;
1077 }
1078}
1079
9ff3e236
DM
1080/* Ensure that all arguments at the call described by CD are checked
1081 for poisoned values, by calling get_rvalue on each argument. */
1082
1083void
1084region_model::check_call_args (const call_details &cd) const
1085{
1086 for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++)
1087 cd.get_arg_svalue (arg_idx);
1088}
1089
757bf1df
DM
1090/* Update this model for the CALL stmt, using CTXT to report any
1091 diagnostics - the first half.
1092
1093 Updates to the region_model that should be made *before* sm-states
1094 are updated are done here; other updates to the region_model are done
ef7827b0 1095 in region_model::on_call_post.
757bf1df 1096
ef7827b0
DM
1097 Return true if the function call has unknown side effects (it wasn't
1098 recognized and we don't have a body for it, or are unable to tell which
5ee4ba03
DM
1099 fndecl it is).
1100
1101 Write true to *OUT_TERMINATE_PATH if this execution path should be
1102 terminated (e.g. the function call terminates the process). */
ef7827b0
DM
1103
1104bool
5ee4ba03
DM
1105region_model::on_call_pre (const gcall *call, region_model_context *ctxt,
1106 bool *out_terminate_path)
757bf1df 1107{
48e8a7a6
DM
1108 call_details cd (call, this, ctxt);
1109
ef7827b0
DM
1110 bool unknown_side_effects = false;
1111
9b4eee5f
DM
1112 /* Special-case for IFN_DEFERRED_INIT.
1113 We want to report uninitialized variables with -fanalyzer (treating
1114 -ftrivial-auto-var-init= as purely a mitigation feature).
1115 Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the
1116 lhs of the call, so that it is still uninitialized from the point of
1117 view of the analyzer. */
1118 if (gimple_call_internal_p (call)
1119 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1120 return false;
1121
33255ad3
DM
1122 /* Some of the cases below update the lhs of the call based on the
1123 return value, but not all. Provide a default value, which may
1124 get overwritten below. */
1125 if (tree lhs = gimple_call_lhs (call))
1126 {
1127 const region *lhs_region = get_lvalue (lhs, ctxt);
3a1d168e
DM
1128 const svalue *sval
1129 = m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs), call,
1130 lhs_region);
1131 purge_state_involving (sval, ctxt);
1132 set_value (lhs_region, sval, ctxt);
33255ad3
DM
1133 }
1134
48e8a7a6 1135 if (gimple_call_internal_p (call))
757bf1df 1136 {
48e8a7a6
DM
1137 switch (gimple_call_internal_fn (call))
1138 {
1139 default:
1140 break;
1141 case IFN_BUILTIN_EXPECT:
b5081130
DM
1142 impl_call_builtin_expect (cd);
1143 return false;
37eb3ef4
DM
1144 case IFN_UBSAN_BOUNDS:
1145 return false;
48e8a7a6
DM
1146 }
1147 }
808f4dfe 1148
48e8a7a6
DM
1149 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1150 {
808f4dfe
DM
1151 /* The various impl_call_* member functions are implemented
1152 in region-model-impl-calls.cc.
1153 Having them split out into separate functions makes it easier
1154 to put breakpoints on the handling of specific functions. */
ee7bfbe5 1155
47997a32 1156 if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
ee7bfbe5
DM
1157 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
1158 switch (DECL_UNCHECKED_FUNCTION_CODE (callee_fndecl))
1159 {
1160 default:
b7028f06
DM
1161 if (!DECL_PURE_P (callee_fndecl))
1162 unknown_side_effects = true;
ee7bfbe5
DM
1163 break;
1164 case BUILT_IN_ALLOCA:
1165 case BUILT_IN_ALLOCA_WITH_ALIGN:
b5081130
DM
1166 impl_call_alloca (cd);
1167 return false;
ee7bfbe5 1168 case BUILT_IN_CALLOC:
b5081130
DM
1169 impl_call_calloc (cd);
1170 return false;
ee7bfbe5
DM
1171 case BUILT_IN_EXPECT:
1172 case BUILT_IN_EXPECT_WITH_PROBABILITY:
b5081130
DM
1173 impl_call_builtin_expect (cd);
1174 return false;
ee7bfbe5
DM
1175 case BUILT_IN_FREE:
1176 /* Handle in "on_call_post". */
1177 break;
1178 case BUILT_IN_MALLOC:
b5081130
DM
1179 impl_call_malloc (cd);
1180 return false;
b7028f06
DM
1181 case BUILT_IN_MEMCPY:
1182 case BUILT_IN_MEMCPY_CHK:
1183 impl_call_memcpy (cd);
1184 return false;
ee7bfbe5 1185 case BUILT_IN_MEMSET:
bc62bfb0 1186 case BUILT_IN_MEMSET_CHK:
ee7bfbe5
DM
1187 impl_call_memset (cd);
1188 return false;
1189 break;
a6baafca 1190 case BUILT_IN_REALLOC:
a6baafca 1191 return false;
111fd515
DM
1192 case BUILT_IN_STRCHR:
1193 impl_call_strchr (cd);
1194 return false;
b7028f06
DM
1195 case BUILT_IN_STRCPY:
1196 case BUILT_IN_STRCPY_CHK:
1197 impl_call_strcpy (cd);
1198 return false;
ee7bfbe5 1199 case BUILT_IN_STRLEN:
b5081130
DM
1200 impl_call_strlen (cd);
1201 return false;
b7028f06 1202
37eb3ef4
DM
1203 case BUILT_IN_STACK_SAVE:
1204 case BUILT_IN_STACK_RESTORE:
1205 return false;
1206
b7028f06
DM
1207 /* Stdio builtins. */
1208 case BUILT_IN_FPRINTF:
1209 case BUILT_IN_FPRINTF_UNLOCKED:
1210 case BUILT_IN_PUTC:
1211 case BUILT_IN_PUTC_UNLOCKED:
1212 case BUILT_IN_FPUTC:
1213 case BUILT_IN_FPUTC_UNLOCKED:
1214 case BUILT_IN_FPUTS:
1215 case BUILT_IN_FPUTS_UNLOCKED:
1216 case BUILT_IN_FWRITE:
1217 case BUILT_IN_FWRITE_UNLOCKED:
1218 case BUILT_IN_PRINTF:
1219 case BUILT_IN_PRINTF_UNLOCKED:
1220 case BUILT_IN_PUTCHAR:
1221 case BUILT_IN_PUTCHAR_UNLOCKED:
1222 case BUILT_IN_PUTS:
1223 case BUILT_IN_PUTS_UNLOCKED:
1224 case BUILT_IN_VFPRINTF:
1225 case BUILT_IN_VPRINTF:
1226 /* These stdio builtins have external effects that are out
1227 of scope for the analyzer: we only want to model the effects
1228 on the return value. */
9ff3e236 1229 check_call_args (cd);
b7028f06 1230 break;
ee7bfbe5 1231 }
ee7bfbe5 1232 else if (is_named_call_p (callee_fndecl, "malloc", call, 1))
b5081130
DM
1233 {
1234 impl_call_malloc (cd);
1235 return false;
1236 }
808f4dfe 1237 else if (is_named_call_p (callee_fndecl, "calloc", call, 2))
b5081130
DM
1238 {
1239 impl_call_calloc (cd);
1240 return false;
1241 }
ee7bfbe5 1242 else if (is_named_call_p (callee_fndecl, "alloca", call, 1))
b5081130
DM
1243 {
1244 impl_call_alloca (cd);
1245 return false;
1246 }
a6baafca
DM
1247 else if (is_named_call_p (callee_fndecl, "realloc", call, 2))
1248 {
1249 impl_call_realloc (cd);
1250 return false;
1251 }
5ee4ba03
DM
1252 else if (is_named_call_p (callee_fndecl, "error"))
1253 {
1254 if (impl_call_error (cd, 3, out_terminate_path))
1255 return false;
1256 else
1257 unknown_side_effects = true;
1258 }
1259 else if (is_named_call_p (callee_fndecl, "error_at_line"))
1260 {
1261 if (impl_call_error (cd, 5, out_terminate_path))
1262 return false;
1263 else
1264 unknown_side_effects = true;
1265 }
33255ad3
DM
1266 else if (is_named_call_p (callee_fndecl, "fgets", call, 3)
1267 || is_named_call_p (callee_fndecl, "fgets_unlocked", call, 3))
1268 {
1269 impl_call_fgets (cd);
1270 return false;
1271 }
1272 else if (is_named_call_p (callee_fndecl, "fread", call, 4))
1273 {
1274 impl_call_fread (cd);
1275 return false;
1276 }
e097c9ab
DM
1277 else if (is_named_call_p (callee_fndecl, "getchar", call, 0))
1278 {
1279 /* No side-effects (tracking stream state is out-of-scope
1280 for the analyzer). */
1281 }
1e19ecd7
DM
1282 else if (is_named_call_p (callee_fndecl, "memset", call, 3)
1283 && POINTER_TYPE_P (cd.get_arg_type (0)))
e516294a 1284 {
808f4dfe 1285 impl_call_memset (cd);
e516294a
DM
1286 return false;
1287 }
111fd515
DM
1288 else if (is_named_call_p (callee_fndecl, "strchr", call, 2)
1289 && POINTER_TYPE_P (cd.get_arg_type (0)))
1290 {
1291 impl_call_strchr (cd);
1292 return false;
1293 }
1e19ecd7
DM
1294 else if (is_named_call_p (callee_fndecl, "strlen", call, 1)
1295 && POINTER_TYPE_P (cd.get_arg_type (0)))
757bf1df 1296 {
b5081130
DM
1297 impl_call_strlen (cd);
1298 return false;
757bf1df 1299 }
1690a839 1300 else if (is_named_call_p (callee_fndecl, "operator new", call, 1))
b5081130
DM
1301 {
1302 impl_call_operator_new (cd);
1303 return false;
1304 }
1690a839 1305 else if (is_named_call_p (callee_fndecl, "operator new []", call, 1))
b5081130
DM
1306 {
1307 impl_call_operator_new (cd);
1308 return false;
1309 }
1690a839
DM
1310 else if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
1311 || is_named_call_p (callee_fndecl, "operator delete", call, 2)
1312 || is_named_call_p (callee_fndecl, "operator delete []", call, 1))
1313 {
1314 /* Handle in "on_call_post". */
1315 }
ef7827b0 1316 else if (!fndecl_has_gimple_body_p (callee_fndecl)
808f4dfe
DM
1317 && !DECL_PURE_P (callee_fndecl)
1318 && !fndecl_built_in_p (callee_fndecl))
ef7827b0 1319 unknown_side_effects = true;
757bf1df 1320 }
ef7827b0
DM
1321 else
1322 unknown_side_effects = true;
757bf1df 1323
ef7827b0 1324 return unknown_side_effects;
757bf1df
DM
1325}
1326
1327/* Update this model for the CALL stmt, using CTXT to report any
1328 diagnostics - the second half.
1329
1330 Updates to the region_model that should be made *after* sm-states
1331 are updated are done here; other updates to the region_model are done
ef7827b0
DM
1332 in region_model::on_call_pre.
1333
1334 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
1335 to purge state. */
757bf1df
DM
1336
1337void
ef7827b0
DM
1338region_model::on_call_post (const gcall *call,
1339 bool unknown_side_effects,
1340 region_model_context *ctxt)
757bf1df 1341{
757bf1df 1342 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1690a839 1343 {
eafa9d96 1344 call_details cd (call, this, ctxt);
1690a839
DM
1345 if (is_named_call_p (callee_fndecl, "free", call, 1))
1346 {
1690a839
DM
1347 impl_call_free (cd);
1348 return;
1349 }
1350 if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
1351 || is_named_call_p (callee_fndecl, "operator delete", call, 2)
1352 || is_named_call_p (callee_fndecl, "operator delete []", call, 1))
1353 {
1690a839
DM
1354 impl_call_operator_delete (cd);
1355 return;
1356 }
c7e276b8
DM
1357 /* Was this fndecl referenced by
1358 __attribute__((malloc(FOO)))? */
1359 if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
1360 {
c7e276b8
DM
1361 impl_deallocation_call (cd);
1362 return;
1363 }
eafa9d96
DM
1364 if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
1365 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
1366 switch (DECL_UNCHECKED_FUNCTION_CODE (callee_fndecl))
1367 {
1368 default:
1369 break;
1370 case BUILT_IN_REALLOC:
1371 impl_call_realloc (cd);
1372 return;
1373 }
1690a839 1374 }
ef7827b0
DM
1375
1376 if (unknown_side_effects)
1377 handle_unrecognized_call (call, ctxt);
1378}
1379
33255ad3
DM
1380/* Purge state involving SVAL from this region_model, using CTXT
1381 (if non-NULL) to purge other state in a program_state.
1382
1383 For example, if we're at the def-stmt of an SSA name, then we need to
1384 purge any state for svalues that involve that SSA name. This avoids
1385 false positives in loops, since a symbolic value referring to the
1386 SSA name will be referring to the previous value of that SSA name.
1387
1388 For example, in:
1389 while ((e = hashmap_iter_next(&iter))) {
1390 struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
1391 free (e_strbuf->value);
1392 }
1393 at the def-stmt of e_8:
1394 e_8 = hashmap_iter_next (&iter);
1395 we should purge the "freed" state of:
1396 INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
1397 which is the "e_strbuf->value" value from the previous iteration,
1398 or we will erroneously report a double-free - the "e_8" within it
1399 refers to the previous value. */
1400
1401void
1402region_model::purge_state_involving (const svalue *sval,
1403 region_model_context *ctxt)
1404{
a113b143
DM
1405 if (!sval->can_have_associated_state_p ())
1406 return;
33255ad3
DM
1407 m_store.purge_state_involving (sval, m_mgr);
1408 m_constraints->purge_state_involving (sval);
1409 m_dynamic_extents.purge_state_involving (sval);
1410 if (ctxt)
1411 ctxt->purge_state_involving (sval);
1412}
1413
ef7827b0
DM
1414/* Handle a call CALL to a function with unknown behavior.
1415
1416 Traverse the regions in this model, determining what regions are
1417 reachable from pointer arguments to CALL and from global variables,
1418 recursively.
1419
1420 Set all reachable regions to new unknown values and purge sm-state
1421 from their values, and from values that point to them. */
1422
1423void
1424region_model::handle_unrecognized_call (const gcall *call,
1425 region_model_context *ctxt)
1426{
1427 tree fndecl = get_fndecl_for_call (call, ctxt);
1428
c710051a 1429 reachable_regions reachable_regs (this);
ef7827b0
DM
1430
1431 /* Determine the reachable regions and their mutability. */
1432 {
808f4dfe
DM
1433 /* Add globals and regions that already escaped in previous
1434 unknown calls. */
1435 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1436 &reachable_regs);
ef7827b0
DM
1437
1438 /* Params that are pointers. */
1439 tree iter_param_types = NULL_TREE;
1440 if (fndecl)
1441 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
1442 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
1443 {
1444 /* Track expected param type, where available. */
1445 tree param_type = NULL_TREE;
1446 if (iter_param_types)
1447 {
1448 param_type = TREE_VALUE (iter_param_types);
1449 gcc_assert (param_type);
1450 iter_param_types = TREE_CHAIN (iter_param_types);
1451 }
1452
1453 tree parm = gimple_call_arg (call, arg_idx);
808f4dfe
DM
1454 const svalue *parm_sval = get_rvalue (parm, ctxt);
1455 reachable_regs.handle_parm (parm_sval, param_type);
ef7827b0
DM
1456 }
1457 }
1458
33255ad3 1459 uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULL;
3a66c289 1460
808f4dfe
DM
1461 /* Purge sm-state for the svalues that were reachable,
1462 both in non-mutable and mutable form. */
1463 for (svalue_set::iterator iter
1464 = reachable_regs.begin_reachable_svals ();
1465 iter != reachable_regs.end_reachable_svals (); ++iter)
ef7827b0 1466 {
808f4dfe 1467 const svalue *sval = (*iter);
33255ad3
DM
1468 if (ctxt)
1469 ctxt->on_unknown_change (sval, false);
808f4dfe
DM
1470 }
1471 for (svalue_set::iterator iter
1472 = reachable_regs.begin_mutable_svals ();
1473 iter != reachable_regs.end_mutable_svals (); ++iter)
1474 {
1475 const svalue *sval = (*iter);
33255ad3
DM
1476 if (ctxt)
1477 ctxt->on_unknown_change (sval, true);
3a66c289
DM
1478 if (uncertainty)
1479 uncertainty->on_mutable_sval_at_unknown_call (sval);
808f4dfe 1480 }
ef7827b0 1481
808f4dfe 1482 /* Mark any clusters that have escaped. */
af66094d 1483 reachable_regs.mark_escaped_clusters (ctxt);
ef7827b0 1484
808f4dfe
DM
1485 /* Update bindings for all clusters that have escaped, whether above,
1486 or previously. */
1487 m_store.on_unknown_fncall (call, m_mgr->get_store_manager ());
9a2c9579
DM
1488
1489 /* Purge dynamic extents from any regions that have escaped mutably:
1490 realloc could have been called on them. */
1491 for (hash_set<const region *>::iterator
1492 iter = reachable_regs.begin_mutable_base_regs ();
1493 iter != reachable_regs.end_mutable_base_regs ();
1494 ++iter)
1495 {
1496 const region *base_reg = (*iter);
1497 unset_dynamic_extents (base_reg);
1498 }
808f4dfe 1499}
ef7827b0 1500
808f4dfe
DM
1501/* Traverse the regions in this model, determining what regions are
1502 reachable from the store and populating *OUT.
ef7827b0 1503
808f4dfe
DM
1504 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
1505 for reachability (for handling return values from functions when
1506 analyzing return of the only function on the stack).
1507
3a66c289
DM
1508 If UNCERTAINTY is non-NULL, treat any svalues that were recorded
1509 within it as being maybe-bound as additional "roots" for reachability.
1510
808f4dfe
DM
1511 Find svalues that haven't leaked. */
1512
1513void
1514region_model::get_reachable_svalues (svalue_set *out,
3a66c289
DM
1515 const svalue *extra_sval,
1516 const uncertainty_t *uncertainty)
808f4dfe 1517{
c710051a 1518 reachable_regions reachable_regs (this);
808f4dfe
DM
1519
1520 /* Add globals and regions that already escaped in previous
1521 unknown calls. */
1522 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1523 &reachable_regs);
1524
1525 if (extra_sval)
1526 reachable_regs.handle_sval (extra_sval);
ef7827b0 1527
3a66c289
DM
1528 if (uncertainty)
1529 for (uncertainty_t::iterator iter
1530 = uncertainty->begin_maybe_bound_svals ();
1531 iter != uncertainty->end_maybe_bound_svals (); ++iter)
1532 reachable_regs.handle_sval (*iter);
1533
808f4dfe
DM
1534 /* Get regions for locals that have explicitly bound values. */
1535 for (store::cluster_map_t::iterator iter = m_store.begin ();
1536 iter != m_store.end (); ++iter)
1537 {
1538 const region *base_reg = (*iter).first;
1539 if (const region *parent = base_reg->get_parent_region ())
1540 if (parent->get_kind () == RK_FRAME)
1541 reachable_regs.add (base_reg, false);
1542 }
1543
1544 /* Populate *OUT based on the values that were reachable. */
1545 for (svalue_set::iterator iter
1546 = reachable_regs.begin_reachable_svals ();
1547 iter != reachable_regs.end_reachable_svals (); ++iter)
1548 out->add (*iter);
757bf1df
DM
1549}
1550
1551/* Update this model for the RETURN_STMT, using CTXT to report any
1552 diagnostics. */
1553
1554void
1555region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
1556{
1557 tree callee = get_current_function ()->decl;
1558 tree lhs = DECL_RESULT (callee);
1559 tree rhs = gimple_return_retval (return_stmt);
1560
1561 if (lhs && rhs)
a96f1c38 1562 copy_region (get_lvalue (lhs, ctxt), get_lvalue (rhs, ctxt), ctxt);
757bf1df
DM
1563}
1564
342e14ff
DM
1565/* Update this model for a call and return of setjmp/sigsetjmp at CALL within
1566 ENODE, using CTXT to report any diagnostics.
757bf1df 1567
342e14ff
DM
1568 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
1569 0), as opposed to any second return due to longjmp/sigsetjmp. */
757bf1df
DM
1570
1571void
1572region_model::on_setjmp (const gcall *call, const exploded_node *enode,
1573 region_model_context *ctxt)
1574{
808f4dfe
DM
1575 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
1576 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
1577 ctxt);
757bf1df 1578
808f4dfe
DM
1579 /* Create a setjmp_svalue for this call and store it in BUF_REG's
1580 region. */
1581 if (buf_reg)
757bf1df 1582 {
fd9982bb 1583 setjmp_record r (enode, call);
808f4dfe
DM
1584 const svalue *sval
1585 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
1586 set_value (buf_reg, sval, ctxt);
757bf1df
DM
1587 }
1588
1589 /* Direct calls to setjmp return 0. */
1590 if (tree lhs = gimple_call_lhs (call))
1591 {
1aff29d4
DM
1592 const svalue *new_sval
1593 = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
808f4dfe
DM
1594 const region *lhs_reg = get_lvalue (lhs, ctxt);
1595 set_value (lhs_reg, new_sval, ctxt);
757bf1df
DM
1596 }
1597}
1598
1599/* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
1600 to a "setjmp" at SETJMP_CALL where the final stack depth should be
808f4dfe
DM
1601 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
1602 done, and should be done by the caller. */
757bf1df
DM
1603
1604void
1605region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
808f4dfe 1606 int setjmp_stack_depth, region_model_context *ctxt)
757bf1df
DM
1607{
1608 /* Evaluate the val, using the frame of the "longjmp". */
1609 tree fake_retval = gimple_call_arg (longjmp_call, 1);
808f4dfe 1610 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
757bf1df
DM
1611
1612 /* Pop any frames until we reach the stack depth of the function where
1613 setjmp was called. */
1614 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
1615 while (get_stack_depth () > setjmp_stack_depth)
808f4dfe 1616 pop_frame (NULL, NULL, ctxt);
757bf1df
DM
1617
1618 gcc_assert (get_stack_depth () == setjmp_stack_depth);
1619
1620 /* Assign to LHS of "setjmp" in new_state. */
1621 if (tree lhs = gimple_call_lhs (setjmp_call))
1622 {
1623 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
1aff29d4
DM
1624 const svalue *zero_sval
1625 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
808f4dfe 1626 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
757bf1df
DM
1627 /* If we have 0, use 1. */
1628 if (eq_zero.is_true ())
1629 {
808f4dfe 1630 const svalue *one_sval
1aff29d4 1631 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
808f4dfe 1632 fake_retval_sval = one_sval;
757bf1df
DM
1633 }
1634 else
1635 {
1636 /* Otherwise note that the value is nonzero. */
808f4dfe 1637 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
757bf1df
DM
1638 }
1639
808f4dfe
DM
1640 /* Decorate the return value from setjmp as being unmergeable,
1641 so that we don't attempt to merge states with it as zero
1642 with states in which it's nonzero, leading to a clean distinction
1643 in the exploded_graph betweeen the first return and the second
1644 return. */
1645 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
757bf1df 1646
808f4dfe
DM
1647 const region *lhs_reg = get_lvalue (lhs, ctxt);
1648 set_value (lhs_reg, fake_retval_sval, ctxt);
1649 }
757bf1df
DM
1650}
1651
1652/* Update this region_model for a phi stmt of the form
1653 LHS = PHI <...RHS...>.
e0a7a675
DM
1654 where RHS is for the appropriate edge.
1655 Get state from OLD_STATE so that all of the phi stmts for a basic block
1656 are effectively handled simultaneously. */
757bf1df
DM
1657
1658void
8525d1f5 1659region_model::handle_phi (const gphi *phi,
808f4dfe 1660 tree lhs, tree rhs,
e0a7a675 1661 const region_model &old_state,
757bf1df
DM
1662 region_model_context *ctxt)
1663{
1664 /* For now, don't bother tracking the .MEM SSA names. */
1665 if (tree var = SSA_NAME_VAR (lhs))
1666 if (TREE_CODE (var) == VAR_DECL)
1667 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
1668 return;
1669
e0a7a675
DM
1670 const svalue *src_sval = old_state.get_rvalue (rhs, ctxt);
1671 const region *dst_reg = old_state.get_lvalue (lhs, ctxt);
757bf1df 1672
e0a7a675 1673 set_value (dst_reg, src_sval, ctxt);
8525d1f5
DM
1674
1675 if (ctxt)
1676 ctxt->on_phi (phi, rhs);
757bf1df
DM
1677}
1678
1679/* Implementation of region_model::get_lvalue; the latter adds type-checking.
1680
1681 Get the id of the region for PV within this region_model,
1682 emitting any diagnostics to CTXT. */
1683
808f4dfe 1684const region *
53cb324c 1685region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
1686{
1687 tree expr = pv.m_tree;
1688
1689 gcc_assert (expr);
1690
1691 switch (TREE_CODE (expr))
1692 {
1693 default:
808f4dfe
DM
1694 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
1695 dump_location_t ());
757bf1df
DM
1696
1697 case ARRAY_REF:
1698 {
1699 tree array = TREE_OPERAND (expr, 0);
1700 tree index = TREE_OPERAND (expr, 1);
757bf1df 1701
808f4dfe
DM
1702 const region *array_reg = get_lvalue (array, ctxt);
1703 const svalue *index_sval = get_rvalue (index, ctxt);
1704 return m_mgr->get_element_region (array_reg,
1705 TREE_TYPE (TREE_TYPE (array)),
1706 index_sval);
757bf1df
DM
1707 }
1708 break;
1709
93e759fc
DM
1710 case BIT_FIELD_REF:
1711 {
1712 tree inner_expr = TREE_OPERAND (expr, 0);
1713 const region *inner_reg = get_lvalue (inner_expr, ctxt);
1714 tree num_bits = TREE_OPERAND (expr, 1);
1715 tree first_bit_offset = TREE_OPERAND (expr, 2);
1716 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
1717 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
1718 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
1719 TREE_INT_CST_LOW (num_bits));
1720 return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits);
1721 }
1722 break;
1723
757bf1df
DM
1724 case MEM_REF:
1725 {
1726 tree ptr = TREE_OPERAND (expr, 0);
1727 tree offset = TREE_OPERAND (expr, 1);
808f4dfe
DM
1728 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
1729 const svalue *offset_sval = get_rvalue (offset, ctxt);
1730 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
1731 return m_mgr->get_offset_region (star_ptr,
1732 TREE_TYPE (expr),
1733 offset_sval);
757bf1df
DM
1734 }
1735 break;
1736
808f4dfe
DM
1737 case FUNCTION_DECL:
1738 return m_mgr->get_region_for_fndecl (expr);
1739
1740 case LABEL_DECL:
1741 return m_mgr->get_region_for_label (expr);
1742
757bf1df
DM
1743 case VAR_DECL:
1744 /* Handle globals. */
1745 if (is_global_var (expr))
808f4dfe 1746 return m_mgr->get_region_for_global (expr);
757bf1df
DM
1747
1748 /* Fall through. */
1749
1750 case SSA_NAME:
1751 case PARM_DECL:
1752 case RESULT_DECL:
1753 {
1754 gcc_assert (TREE_CODE (expr) == SSA_NAME
1755 || TREE_CODE (expr) == PARM_DECL
1756 || TREE_CODE (expr) == VAR_DECL
1757 || TREE_CODE (expr) == RESULT_DECL);
1758
808f4dfe
DM
1759 int stack_index = pv.m_stack_depth;
1760 const frame_region *frame = get_frame_at_index (stack_index);
757bf1df 1761 gcc_assert (frame);
808f4dfe 1762 return frame->get_region_for_local (m_mgr, expr);
757bf1df
DM
1763 }
1764
1765 case COMPONENT_REF:
1766 {
1767 /* obj.field */
1768 tree obj = TREE_OPERAND (expr, 0);
1769 tree field = TREE_OPERAND (expr, 1);
808f4dfe
DM
1770 const region *obj_reg = get_lvalue (obj, ctxt);
1771 return m_mgr->get_field_region (obj_reg, field);
41a9e940
DM
1772 }
1773 break;
1774
757bf1df 1775 case STRING_CST:
808f4dfe 1776 return m_mgr->get_region_for_string (expr);
757bf1df
DM
1777 }
1778}
1779
1780/* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
1781
09bea584
DM
1782static void
1783assert_compat_types (tree src_type, tree dst_type)
1784{
1785 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
808f4dfe
DM
1786 {
1787#if CHECKING_P
1788 if (!(useless_type_conversion_p (src_type, dst_type)))
1789 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
1790#endif
1791 }
09bea584 1792}
757bf1df 1793
ea4e3218
DM
1794/* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
1795
e66b9f67 1796bool
ea4e3218
DM
1797compat_types_p (tree src_type, tree dst_type)
1798{
1799 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
1800 if (!(useless_type_conversion_p (src_type, dst_type)))
1801 return false;
1802 return true;
1803}
1804
808f4dfe 1805/* Get the region for PV within this region_model,
757bf1df
DM
1806 emitting any diagnostics to CTXT. */
1807
808f4dfe 1808const region *
53cb324c 1809region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
1810{
1811 if (pv.m_tree == NULL_TREE)
808f4dfe 1812 return NULL;
757bf1df 1813
808f4dfe
DM
1814 const region *result_reg = get_lvalue_1 (pv, ctxt);
1815 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
1816 return result_reg;
757bf1df
DM
1817}
1818
808f4dfe 1819/* Get the region for EXPR within this region_model (assuming the most
757bf1df
DM
1820 recent stack frame if it's a local). */
1821
808f4dfe 1822const region *
53cb324c 1823region_model::get_lvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
1824{
1825 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
1826}
1827
1828/* Implementation of region_model::get_rvalue; the latter adds type-checking.
1829
1830 Get the value of PV within this region_model,
1831 emitting any diagnostics to CTXT. */
1832
808f4dfe 1833const svalue *
53cb324c 1834region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
1835{
1836 gcc_assert (pv.m_tree);
1837
1838 switch (TREE_CODE (pv.m_tree))
1839 {
1840 default:
2242b975 1841 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
757bf1df
DM
1842
1843 case ADDR_EXPR:
1844 {
1845 /* "&EXPR". */
1846 tree expr = pv.m_tree;
1847 tree op0 = TREE_OPERAND (expr, 0);
808f4dfe
DM
1848 const region *expr_reg = get_lvalue (op0, ctxt);
1849 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
757bf1df
DM
1850 }
1851 break;
1852
808f4dfe 1853 case BIT_FIELD_REF:
d3b1ef7a
DM
1854 {
1855 tree expr = pv.m_tree;
1856 tree op0 = TREE_OPERAND (expr, 0);
1857 const region *reg = get_lvalue (op0, ctxt);
1858 tree num_bits = TREE_OPERAND (expr, 1);
1859 tree first_bit_offset = TREE_OPERAND (expr, 2);
1860 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
1861 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
1862 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
1863 TREE_INT_CST_LOW (num_bits));
9faf8348 1864 return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
d3b1ef7a 1865 }
808f4dfe
DM
1866
1867 case SSA_NAME:
1868 case VAR_DECL:
1869 case PARM_DECL:
1870 case RESULT_DECL:
757bf1df
DM
1871 case ARRAY_REF:
1872 {
da7c2773 1873 const region *reg = get_lvalue (pv, ctxt);
9faf8348 1874 return get_store_value (reg, ctxt);
757bf1df
DM
1875 }
1876
808f4dfe
DM
1877 case REALPART_EXPR:
1878 case IMAGPART_EXPR:
1879 case VIEW_CONVERT_EXPR:
1880 {
1881 tree expr = pv.m_tree;
1882 tree arg = TREE_OPERAND (expr, 0);
1883 const svalue *arg_sval = get_rvalue (arg, ctxt);
1884 const svalue *sval_unaryop
1885 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
1886 arg_sval);
1887 return sval_unaryop;
1888 };
1889
757bf1df
DM
1890 case INTEGER_CST:
1891 case REAL_CST:
808f4dfe
DM
1892 case COMPLEX_CST:
1893 case VECTOR_CST:
757bf1df 1894 case STRING_CST:
808f4dfe
DM
1895 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
1896
1897 case POINTER_PLUS_EXPR:
1898 {
1899 tree expr = pv.m_tree;
1900 tree ptr = TREE_OPERAND (expr, 0);
1901 tree offset = TREE_OPERAND (expr, 1);
1902 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
1903 const svalue *offset_sval = get_rvalue (offset, ctxt);
1904 const svalue *sval_binop
1905 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
1906 ptr_sval, offset_sval);
1907 return sval_binop;
1908 }
1909
1910 /* Binary ops. */
1911 case PLUS_EXPR:
1912 case MULT_EXPR:
1913 {
1914 tree expr = pv.m_tree;
1915 tree arg0 = TREE_OPERAND (expr, 0);
1916 tree arg1 = TREE_OPERAND (expr, 1);
1917 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
1918 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
1919 const svalue *sval_binop
1920 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
1921 arg0_sval, arg1_sval);
1922 return sval_binop;
1923 }
757bf1df
DM
1924
1925 case COMPONENT_REF:
1926 case MEM_REF:
757bf1df 1927 {
808f4dfe 1928 const region *ref_reg = get_lvalue (pv, ctxt);
9faf8348 1929 return get_store_value (ref_reg, ctxt);
757bf1df 1930 }
1b342485
AS
1931 case OBJ_TYPE_REF:
1932 {
1933 tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
1934 return get_rvalue (expr, ctxt);
1935 }
757bf1df
DM
1936 }
1937}
1938
1939/* Get the value of PV within this region_model,
1940 emitting any diagnostics to CTXT. */
1941
808f4dfe 1942const svalue *
53cb324c 1943region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
1944{
1945 if (pv.m_tree == NULL_TREE)
808f4dfe 1946 return NULL;
757bf1df 1947
808f4dfe 1948 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
757bf1df 1949
808f4dfe
DM
1950 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
1951
33255ad3
DM
1952 result_sval = check_for_poison (result_sval, pv.m_tree, ctxt);
1953
808f4dfe 1954 return result_sval;
757bf1df
DM
1955}
1956
1957/* Get the value of EXPR within this region_model (assuming the most
1958 recent stack frame if it's a local). */
1959
808f4dfe 1960const svalue *
53cb324c 1961region_model::get_rvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
1962{
1963 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
1964}
1965
623bc027
DM
1966/* Return true if this model is on a path with "main" as the entrypoint
1967 (as opposed to one in which we're merely analyzing a subset of the
1968 path through the code). */
1969
1970bool
1971region_model::called_from_main_p () const
1972{
1973 if (!m_current_frame)
1974 return false;
1975 /* Determine if the oldest stack frame in this model is for "main". */
1976 const frame_region *frame0 = get_frame_at_index (0);
1977 gcc_assert (frame0);
1978 return id_equal (DECL_NAME (frame0->get_function ()->decl), "main");
1979}
1980
1981/* Subroutine of region_model::get_store_value for when REG is (or is within)
1982 a global variable that hasn't been touched since the start of this path
1983 (or was implicitly touched due to a call to an unknown function). */
1984
1985const svalue *
1986region_model::get_initial_value_for_global (const region *reg) const
1987{
1988 /* Get the decl that REG is for (or is within). */
1989 const decl_region *base_reg
1990 = reg->get_base_region ()->dyn_cast_decl_region ();
1991 gcc_assert (base_reg);
1992 tree decl = base_reg->get_decl ();
1993
1994 /* Special-case: to avoid having to explicitly update all previously
1995 untracked globals when calling an unknown fn, they implicitly have
1996 an unknown value if an unknown call has occurred, unless this is
1997 static to-this-TU and hasn't escaped. Globals that have escaped
1998 are explicitly tracked, so we shouldn't hit this case for them. */
af66094d
DM
1999 if (m_store.called_unknown_fn_p ()
2000 && TREE_PUBLIC (decl)
2001 && !TREE_READONLY (decl))
623bc027
DM
2002 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2003
2004 /* If we are on a path from the entrypoint from "main" and we have a
2005 global decl defined in this TU that hasn't been touched yet, then
2006 the initial value of REG can be taken from the initialization value
2007 of the decl. */
16ad9ae8 2008 if (called_from_main_p () || TREE_READONLY (decl))
623bc027 2009 {
61a43de5
DM
2010 /* Attempt to get the initializer value for base_reg. */
2011 if (const svalue *base_reg_init
2012 = base_reg->get_svalue_for_initializer (m_mgr))
623bc027 2013 {
61a43de5
DM
2014 if (reg == base_reg)
2015 return base_reg_init;
2016 else
623bc027 2017 {
61a43de5
DM
2018 /* Get the value for REG within base_reg_init. */
2019 binding_cluster c (base_reg);
e61ffa20 2020 c.bind (m_mgr->get_store_manager (), base_reg, base_reg_init);
61a43de5
DM
2021 const svalue *sval
2022 = c.get_any_binding (m_mgr->get_store_manager (), reg);
2023 if (sval)
2024 {
2025 if (reg->get_type ())
2026 sval = m_mgr->get_or_create_cast (reg->get_type (),
2027 sval);
2028 return sval;
2029 }
623bc027
DM
2030 }
2031 }
2032 }
2033
2034 /* Otherwise, return INIT_VAL(REG). */
2035 return m_mgr->get_or_create_initial_value (reg);
2036}
2037
808f4dfe 2038/* Get a value for REG, looking it up in the store, or otherwise falling
9faf8348
DM
2039 back to "initial" or "unknown" values.
2040 Use CTXT to report any warnings associated with reading from REG. */
757bf1df 2041
808f4dfe 2042const svalue *
9faf8348
DM
2043region_model::get_store_value (const region *reg,
2044 region_model_context *ctxt) const
757bf1df 2045{
9faf8348
DM
2046 check_region_for_read (reg, ctxt);
2047
2867118d
DM
2048 /* Special-case: handle var_decls in the constant pool. */
2049 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
2050 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
2051 return sval;
2052
808f4dfe
DM
2053 const svalue *sval
2054 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
2055 if (sval)
757bf1df 2056 {
808f4dfe
DM
2057 if (reg->get_type ())
2058 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
2059 return sval;
757bf1df 2060 }
757bf1df 2061
808f4dfe
DM
2062 /* Special-case: read at a constant index within a STRING_CST. */
2063 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
2064 if (tree byte_offset_cst
2065 = offset_reg->get_byte_offset ()->maybe_get_constant ())
2066 if (const string_region *str_reg
2067 = reg->get_parent_region ()->dyn_cast_string_region ())
757bf1df 2068 {
808f4dfe
DM
2069 tree string_cst = str_reg->get_string_cst ();
2070 if (const svalue *char_sval
2071 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2072 byte_offset_cst))
2073 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
757bf1df 2074 }
757bf1df 2075
808f4dfe
DM
2076 /* Special-case: read the initial char of a STRING_CST. */
2077 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
2078 if (const string_region *str_reg
2079 = cast_reg->get_original_region ()->dyn_cast_string_region ())
2080 {
2081 tree string_cst = str_reg->get_string_cst ();
2082 tree byte_offset_cst = build_int_cst (integer_type_node, 0);
2083 if (const svalue *char_sval
2084 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2085 byte_offset_cst))
2086 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2087 }
757bf1df 2088
808f4dfe
DM
2089 /* Otherwise we implicitly have the initial value of the region
2090 (if the cluster had been touched, binding_cluster::get_any_binding,
2091 would have returned UNKNOWN, and we would already have returned
2092 that above). */
757bf1df 2093
623bc027
DM
2094 /* Handle globals. */
2095 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
2096 == RK_GLOBALS)
2097 return get_initial_value_for_global (reg);
757bf1df 2098
808f4dfe 2099 return m_mgr->get_or_create_initial_value (reg);
757bf1df
DM
2100}
2101
808f4dfe
DM
2102/* Return false if REG does not exist, true if it may do.
2103 This is for detecting regions within the stack that don't exist anymore
2104 after frames are popped. */
757bf1df 2105
808f4dfe
DM
2106bool
2107region_model::region_exists_p (const region *reg) const
757bf1df 2108{
808f4dfe
DM
2109 /* If within a stack frame, check that the stack frame is live. */
2110 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
757bf1df 2111 {
808f4dfe
DM
2112 /* Check that the current frame is the enclosing frame, or is called
2113 by it. */
2114 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
2115 iter_frame = iter_frame->get_calling_frame ())
2116 if (iter_frame == enclosing_frame)
2117 return true;
2118 return false;
757bf1df 2119 }
808f4dfe
DM
2120
2121 return true;
757bf1df
DM
2122}
2123
808f4dfe
DM
2124/* Get a region for referencing PTR_SVAL, creating a region if need be, and
2125 potentially generating warnings via CTXT.
35e3f082 2126 PTR_SVAL must be of pointer type.
808f4dfe 2127 PTR_TREE if non-NULL can be used when emitting diagnostics. */
757bf1df 2128
808f4dfe
DM
2129const region *
2130region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
53cb324c 2131 region_model_context *ctxt) const
757bf1df 2132{
808f4dfe 2133 gcc_assert (ptr_sval);
35e3f082 2134 gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
757bf1df 2135
49bfbf18
DM
2136 /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
2137 as a constraint. This suppresses false positives from
2138 -Wanalyzer-null-dereference for the case where we later have an
2139 if (PTR_SVAL) that would occur if we considered the false branch
2140 and transitioned the malloc state machine from start->null. */
2141 tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
2142 const svalue *null_ptr = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
2143 m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
2144
808f4dfe 2145 switch (ptr_sval->get_kind ())
757bf1df 2146 {
808f4dfe 2147 default:
23ebfda0 2148 break;
808f4dfe 2149
757bf1df
DM
2150 case SK_REGION:
2151 {
808f4dfe
DM
2152 const region_svalue *region_sval
2153 = as_a <const region_svalue *> (ptr_sval);
757bf1df
DM
2154 return region_sval->get_pointee ();
2155 }
2156
808f4dfe
DM
2157 case SK_BINOP:
2158 {
2159 const binop_svalue *binop_sval
2160 = as_a <const binop_svalue *> (ptr_sval);
2161 switch (binop_sval->get_op ())
2162 {
2163 case POINTER_PLUS_EXPR:
2164 {
2165 /* If we have a symbolic value expressing pointer arithmentic,
2166 try to convert it to a suitable region. */
2167 const region *parent_region
2168 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
2169 const svalue *offset = binop_sval->get_arg1 ();
2170 tree type= TREE_TYPE (ptr_sval->get_type ());
2171 return m_mgr->get_offset_region (parent_region, type, offset);
2172 }
2173 default:
23ebfda0 2174 break;
808f4dfe
DM
2175 }
2176 }
23ebfda0 2177 break;
757bf1df
DM
2178
2179 case SK_POISONED:
2180 {
2181 if (ctxt)
808f4dfe
DM
2182 {
2183 tree ptr = get_representative_tree (ptr_sval);
2184 /* If we can't get a representative tree for PTR_SVAL
2185 (e.g. if it hasn't been bound into the store), then
2186 fall back on PTR_TREE, if non-NULL. */
2187 if (!ptr)
2188 ptr = ptr_tree;
2189 if (ptr)
2190 {
2191 const poisoned_svalue *poisoned_sval
2192 = as_a <const poisoned_svalue *> (ptr_sval);
2193 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
00e7d024 2194 ctxt->warn (new poisoned_value_diagnostic (ptr, pkind, NULL));
808f4dfe
DM
2195 }
2196 }
757bf1df 2197 }
23ebfda0 2198 break;
757bf1df
DM
2199 }
2200
23ebfda0 2201 return m_mgr->get_symbolic_region (ptr_sval);
757bf1df
DM
2202}
2203
d3b1ef7a
DM
2204/* Attempt to get BITS within any value of REG, as TYPE.
2205 In particular, extract values from compound_svalues for the case
2206 where there's a concrete binding at BITS.
9faf8348
DM
2207 Return an unknown svalue if we can't handle the given case.
2208 Use CTXT to report any warnings associated with reading from REG. */
d3b1ef7a
DM
2209
2210const svalue *
2211region_model::get_rvalue_for_bits (tree type,
2212 const region *reg,
9faf8348
DM
2213 const bit_range &bits,
2214 region_model_context *ctxt) const
d3b1ef7a 2215{
9faf8348 2216 const svalue *sval = get_store_value (reg, ctxt);
e61ffa20 2217 return m_mgr->get_or_create_bits_within (type, bits, sval);
d3b1ef7a
DM
2218}
2219
3175d40f
DM
2220/* A subclass of pending_diagnostic for complaining about writes to
2221 constant regions of memory. */
2222
2223class write_to_const_diagnostic
2224: public pending_diagnostic_subclass<write_to_const_diagnostic>
2225{
2226public:
2227 write_to_const_diagnostic (const region *reg, tree decl)
2228 : m_reg (reg), m_decl (decl)
2229 {}
2230
2231 const char *get_kind () const FINAL OVERRIDE
2232 {
2233 return "write_to_const_diagnostic";
2234 }
2235
2236 bool operator== (const write_to_const_diagnostic &other) const
2237 {
2238 return (m_reg == other.m_reg
2239 && m_decl == other.m_decl);
2240 }
2241
2242 bool emit (rich_location *rich_loc) FINAL OVERRIDE
2243 {
111fd515
DM
2244 auto_diagnostic_group d;
2245 bool warned;
2246 switch (m_reg->get_kind ())
2247 {
2248 default:
2249 warned = warning_at (rich_loc, OPT_Wanalyzer_write_to_const,
2250 "write to %<const%> object %qE", m_decl);
2251 break;
2252 case RK_FUNCTION:
2253 warned = warning_at (rich_loc, OPT_Wanalyzer_write_to_const,
2254 "write to function %qE", m_decl);
2255 break;
2256 case RK_LABEL:
2257 warned = warning_at (rich_loc, OPT_Wanalyzer_write_to_const,
2258 "write to label %qE", m_decl);
2259 break;
2260 }
3175d40f
DM
2261 if (warned)
2262 inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
2263 return warned;
2264 }
2265
2266 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
2267 {
111fd515
DM
2268 switch (m_reg->get_kind ())
2269 {
2270 default:
2271 return ev.formatted_print ("write to %<const%> object %qE here", m_decl);
2272 case RK_FUNCTION:
2273 return ev.formatted_print ("write to function %qE here", m_decl);
2274 case RK_LABEL:
2275 return ev.formatted_print ("write to label %qE here", m_decl);
2276 }
3175d40f
DM
2277 }
2278
2279private:
2280 const region *m_reg;
2281 tree m_decl;
2282};
2283
2284/* A subclass of pending_diagnostic for complaining about writes to
2285 string literals. */
2286
2287class write_to_string_literal_diagnostic
2288: public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
2289{
2290public:
2291 write_to_string_literal_diagnostic (const region *reg)
2292 : m_reg (reg)
2293 {}
2294
2295 const char *get_kind () const FINAL OVERRIDE
2296 {
2297 return "write_to_string_literal_diagnostic";
2298 }
2299
2300 bool operator== (const write_to_string_literal_diagnostic &other) const
2301 {
2302 return m_reg == other.m_reg;
2303 }
2304
2305 bool emit (rich_location *rich_loc) FINAL OVERRIDE
2306 {
2307 return warning_at (rich_loc, OPT_Wanalyzer_write_to_string_literal,
2308 "write to string literal");
2309 /* Ideally we would show the location of the STRING_CST as well,
2310 but it is not available at this point. */
2311 }
2312
2313 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
2314 {
2315 return ev.formatted_print ("write to string literal here");
2316 }
2317
2318private:
2319 const region *m_reg;
2320};
2321
2322/* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
2323
2324void
2325region_model::check_for_writable_region (const region* dest_reg,
2326 region_model_context *ctxt) const
2327{
2328 /* Fail gracefully if CTXT is NULL. */
2329 if (!ctxt)
2330 return;
2331
2332 const region *base_reg = dest_reg->get_base_region ();
2333 switch (base_reg->get_kind ())
2334 {
2335 default:
2336 break;
111fd515
DM
2337 case RK_FUNCTION:
2338 {
2339 const function_region *func_reg = as_a <const function_region *> (base_reg);
2340 tree fndecl = func_reg->get_fndecl ();
2341 ctxt->warn (new write_to_const_diagnostic (func_reg, fndecl));
2342 }
2343 break;
2344 case RK_LABEL:
2345 {
2346 const label_region *label_reg = as_a <const label_region *> (base_reg);
2347 tree label = label_reg->get_label ();
2348 ctxt->warn (new write_to_const_diagnostic (label_reg, label));
2349 }
2350 break;
3175d40f
DM
2351 case RK_DECL:
2352 {
2353 const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
2354 tree decl = decl_reg->get_decl ();
2355 /* Warn about writes to const globals.
2356 Don't warn for writes to const locals, and params in particular,
2357 since we would warn in push_frame when setting them up (e.g the
2358 "this" param is "T* const"). */
2359 if (TREE_READONLY (decl)
2360 && is_global_var (decl))
2361 ctxt->warn (new write_to_const_diagnostic (dest_reg, decl));
2362 }
2363 break;
2364 case RK_STRING:
2365 ctxt->warn (new write_to_string_literal_diagnostic (dest_reg));
2366 break;
2367 }
2368}
2369
9a2c9579
DM
2370/* Get the capacity of REG in bytes. */
2371
2372const svalue *
2373region_model::get_capacity (const region *reg) const
2374{
2375 switch (reg->get_kind ())
2376 {
2377 default:
2378 break;
2379 case RK_DECL:
2380 {
2381 const decl_region *decl_reg = as_a <const decl_region *> (reg);
2382 tree decl = decl_reg->get_decl ();
2383 if (TREE_CODE (decl) == SSA_NAME)
2384 {
2385 tree type = TREE_TYPE (decl);
2386 tree size = TYPE_SIZE (type);
2387 return get_rvalue (size, NULL);
2388 }
2389 else
2390 {
2391 tree size = decl_init_size (decl, false);
2392 if (size)
2393 return get_rvalue (size, NULL);
2394 }
2395 }
2396 break;
e61ffa20
DM
2397 case RK_SIZED:
2398 /* Look through sized regions to get at the capacity
2399 of the underlying regions. */
2400 return get_capacity (reg->get_parent_region ());
9a2c9579
DM
2401 }
2402
2403 if (const svalue *recorded = get_dynamic_extents (reg))
2404 return recorded;
2405
2406 return m_mgr->get_or_create_unknown_svalue (sizetype);
2407}
2408
9faf8348
DM
2409/* If CTXT is non-NULL, use it to warn about any problems accessing REG,
2410 using DIR to determine if this access is a read or write. */
2411
2412void
2413region_model::check_region_access (const region *reg,
2414 enum access_direction dir,
2415 region_model_context *ctxt) const
2416{
2417 /* Fail gracefully if CTXT is NULL. */
2418 if (!ctxt)
2419 return;
2420
b9365b93
DM
2421 check_region_for_taint (reg, dir, ctxt);
2422
9faf8348
DM
2423 switch (dir)
2424 {
2425 default:
2426 gcc_unreachable ();
2427 case DIR_READ:
2428 /* Currently a no-op. */
2429 break;
2430 case DIR_WRITE:
2431 check_for_writable_region (reg, ctxt);
2432 break;
2433 }
2434}
2435
2436/* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
2437
2438void
2439region_model::check_region_for_write (const region *dest_reg,
2440 region_model_context *ctxt) const
2441{
2442 check_region_access (dest_reg, DIR_WRITE, ctxt);
2443}
2444
2445/* If CTXT is non-NULL, use it to warn about any problems reading from REG. */
2446
2447void
2448region_model::check_region_for_read (const region *src_reg,
2449 region_model_context *ctxt) const
2450{
2451 check_region_access (src_reg, DIR_READ, ctxt);
2452}
2453
808f4dfe 2454/* Set the value of the region given by LHS_REG to the value given
9faf8348
DM
2455 by RHS_SVAL.
2456 Use CTXT to report any warnings associated with writing to LHS_REG. */
757bf1df 2457
808f4dfe
DM
2458void
2459region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
3175d40f 2460 region_model_context *ctxt)
757bf1df 2461{
808f4dfe
DM
2462 gcc_assert (lhs_reg);
2463 gcc_assert (rhs_sval);
2464
9faf8348 2465 check_region_for_write (lhs_reg, ctxt);
3175d40f 2466
808f4dfe 2467 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 2468 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
2469}
2470
808f4dfe 2471/* Set the value of the region given by LHS to the value given by RHS. */
757bf1df
DM
2472
2473void
808f4dfe 2474region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
757bf1df 2475{
808f4dfe
DM
2476 const region *lhs_reg = get_lvalue (lhs, ctxt);
2477 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
2478 gcc_assert (lhs_reg);
2479 gcc_assert (rhs_sval);
2480 set_value (lhs_reg, rhs_sval, ctxt);
757bf1df
DM
2481}
2482
808f4dfe 2483/* Remove all bindings overlapping REG within the store. */
884d9141
DM
2484
2485void
808f4dfe
DM
2486region_model::clobber_region (const region *reg)
2487{
2488 m_store.clobber_region (m_mgr->get_store_manager(), reg);
2489}
2490
2491/* Remove any bindings for REG within the store. */
2492
2493void
2494region_model::purge_region (const region *reg)
2495{
2496 m_store.purge_region (m_mgr->get_store_manager(), reg);
2497}
2498
e61ffa20
DM
2499/* Fill REG with SVAL. */
2500
2501void
2502region_model::fill_region (const region *reg, const svalue *sval)
2503{
2504 m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
2505}
2506
808f4dfe
DM
2507/* Zero-fill REG. */
2508
2509void
2510region_model::zero_fill_region (const region *reg)
2511{
2512 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
2513}
2514
2515/* Mark REG as having unknown content. */
2516
2517void
3a66c289
DM
2518region_model::mark_region_as_unknown (const region *reg,
2519 uncertainty_t *uncertainty)
884d9141 2520{
3a66c289
DM
2521 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
2522 uncertainty);
884d9141
DM
2523}
2524
808f4dfe 2525/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
757bf1df
DM
2526 this model. */
2527
2528tristate
808f4dfe
DM
2529region_model::eval_condition (const svalue *lhs,
2530 enum tree_code op,
2531 const svalue *rhs) const
757bf1df 2532{
e978955d
DM
2533 /* For now, make no attempt to capture constraints on floating-point
2534 values. */
2535 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
2536 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
2537 return tristate::unknown ();
2538
808f4dfe 2539 tristate ts = eval_condition_without_cm (lhs, op, rhs);
757bf1df
DM
2540 if (ts.is_known ())
2541 return ts;
2542
2543 /* Otherwise, try constraints. */
808f4dfe 2544 return m_constraints->eval_condition (lhs, op, rhs);
757bf1df
DM
2545}
2546
808f4dfe 2547/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
757bf1df
DM
2548 this model, without resorting to the constraint_manager.
2549
2550 This is exposed so that impl_region_model_context::on_state_leak can
2551 check for equality part-way through region_model::purge_unused_svalues
2552 without risking creating new ECs. */
2553
2554tristate
808f4dfe
DM
2555region_model::eval_condition_without_cm (const svalue *lhs,
2556 enum tree_code op,
2557 const svalue *rhs) const
757bf1df 2558{
757bf1df
DM
2559 gcc_assert (lhs);
2560 gcc_assert (rhs);
2561
2562 /* See what we know based on the values. */
808f4dfe
DM
2563
2564 /* For now, make no attempt to capture constraints on floating-point
2565 values. */
2566 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
2567 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
2568 return tristate::unknown ();
2569
2570 /* Unwrap any unmergeable values. */
2571 lhs = lhs->unwrap_any_unmergeable ();
2572 rhs = rhs->unwrap_any_unmergeable ();
2573
2574 if (lhs == rhs)
757bf1df 2575 {
808f4dfe
DM
2576 /* If we have the same svalue, then we have equality
2577 (apart from NaN-handling).
2578 TODO: should this definitely be the case for poisoned values? */
2579 /* Poisoned and unknown values are "unknowable". */
2580 if (lhs->get_kind () == SK_POISONED
2581 || lhs->get_kind () == SK_UNKNOWN)
2582 return tristate::TS_UNKNOWN;
e978955d 2583
808f4dfe 2584 switch (op)
757bf1df 2585 {
808f4dfe
DM
2586 case EQ_EXPR:
2587 case GE_EXPR:
2588 case LE_EXPR:
2589 return tristate::TS_TRUE;
07c86323 2590
808f4dfe
DM
2591 case NE_EXPR:
2592 case GT_EXPR:
2593 case LT_EXPR:
2594 return tristate::TS_FALSE;
2595
2596 default:
2597 /* For other ops, use the logic below. */
2598 break;
757bf1df 2599 }
808f4dfe 2600 }
757bf1df 2601
808f4dfe
DM
2602 /* If we have a pair of region_svalues, compare them. */
2603 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
2604 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
2605 {
2606 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
2607 if (res.is_known ())
2608 return res;
2609 /* Otherwise, only known through constraints. */
2610 }
757bf1df 2611
808f4dfe
DM
2612 /* If we have a pair of constants, compare them. */
2613 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
2614 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
2615 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
757bf1df 2616
e82e0f14
DM
2617 /* Handle comparison against zero. */
2618 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
2619 if (zerop (cst_rhs->get_constant ()))
2620 {
2621 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
2622 {
2623 /* A region_svalue is a non-NULL pointer, except in certain
2624 special cases (see the comment for region::non_null_p). */
2625 const region *pointee = ptr->get_pointee ();
2626 if (pointee->non_null_p ())
2627 {
2628 switch (op)
2629 {
2630 default:
2631 gcc_unreachable ();
2632
2633 case EQ_EXPR:
2634 case GE_EXPR:
2635 case LE_EXPR:
2636 return tristate::TS_FALSE;
2637
2638 case NE_EXPR:
2639 case GT_EXPR:
2640 case LT_EXPR:
2641 return tristate::TS_TRUE;
2642 }
2643 }
2644 }
2645 else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
2646 {
2647 /* Treat offsets from a non-NULL pointer as being non-NULL. This
2648 isn't strictly true, in that eventually ptr++ will wrap
2649 around and be NULL, but it won't occur in practise and thus
2650 can be used to suppress effectively false positives that we
2651 shouldn't warn for. */
2652 if (binop->get_op () == POINTER_PLUS_EXPR)
2653 {
2654 tristate lhs_ts
2655 = eval_condition_without_cm (binop->get_arg0 (),
2656 op, rhs);
2657 if (lhs_ts.is_known ())
2658 return lhs_ts;
2659 }
2660 }
2661 }
808f4dfe
DM
2662
2663 /* Handle rejection of equality for comparisons of the initial values of
2664 "external" values (such as params) with the address of locals. */
2665 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
2666 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
2667 {
2668 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
2669 if (res.is_known ())
2670 return res;
2671 }
2672 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
2673 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
2674 {
2675 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
2676 if (res.is_known ())
2677 return res;
2678 }
2679
2680 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
2681 if (tree rhs_cst = rhs->maybe_get_constant ())
2682 {
2683 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
2684 if (res.is_known ())
2685 return res;
2686 }
2687
2688 return tristate::TS_UNKNOWN;
2689}
2690
2691/* Subroutine of region_model::eval_condition_without_cm, for rejecting
2692 equality of INIT_VAL(PARM) with &LOCAL. */
2693
2694tristate
2695region_model::compare_initial_and_pointer (const initial_svalue *init,
2696 const region_svalue *ptr) const
2697{
2698 const region *pointee = ptr->get_pointee ();
2699
2700 /* If we have a pointer to something within a stack frame, it can't be the
2701 initial value of a param. */
2702 if (pointee->maybe_get_frame_region ())
e0139b2a
DM
2703 if (init->initial_value_of_param_p ())
2704 return tristate::TS_FALSE;
757bf1df
DM
2705
2706 return tristate::TS_UNKNOWN;
2707}
2708
48e8a7a6
DM
2709/* Handle various constraints of the form:
2710 LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
2711 OP : == or !=
2712 RHS: zero
2713 and (with a cast):
2714 LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
2715 OP : == or !=
2716 RHS: zero
2717 by adding constraints for INNER_LHS INNEROP INNER_RHS.
2718
2719 Return true if this function can fully handle the constraint; if
2720 so, add the implied constraint(s) and write true to *OUT if they
2721 are consistent with existing constraints, or write false to *OUT
2722 if they contradicts existing constraints.
2723
2724 Return false for cases that this function doeesn't know how to handle.
2725
2726 For example, if we're checking a stored conditional, we'll have
2727 something like:
2728 LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
2729 OP : NE_EXPR
2730 RHS: zero
2731 which this function can turn into an add_constraint of:
2732 (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
2733
2734 Similarly, optimized && and || conditionals lead to e.g.
2735 if (p && q)
2736 becoming gimple like this:
2737 _1 = p_6 == 0B;
2738 _2 = q_8 == 0B
2739 _3 = _1 | _2
2740 On the "_3 is false" branch we can have constraints of the form:
2741 ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
2742 | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
2743 == 0
2744 which implies that both _1 and _2 are false,
2745 which this function can turn into a pair of add_constraints of
2746 (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
2747 and:
2748 (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
2749
2750bool
2751region_model::add_constraints_from_binop (const svalue *outer_lhs,
2752 enum tree_code outer_op,
2753 const svalue *outer_rhs,
2754 bool *out,
2755 region_model_context *ctxt)
2756{
2757 while (const svalue *cast = outer_lhs->maybe_undo_cast ())
2758 outer_lhs = cast;
2759 const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
2760 if (!binop_sval)
2761 return false;
2762 if (!outer_rhs->all_zeroes_p ())
2763 return false;
2764
2765 const svalue *inner_lhs = binop_sval->get_arg0 ();
2766 enum tree_code inner_op = binop_sval->get_op ();
2767 const svalue *inner_rhs = binop_sval->get_arg1 ();
2768
2769 if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
2770 return false;
2771
2772 /* We have either
2773 - "OUTER_LHS != false" (i.e. OUTER is true), or
2774 - "OUTER_LHS == false" (i.e. OUTER is false). */
2775 bool is_true = outer_op == NE_EXPR;
2776
2777 switch (inner_op)
2778 {
2779 default:
2780 return false;
2781
2782 case EQ_EXPR:
2783 case NE_EXPR:
2784 {
2785 /* ...and "(inner_lhs OP inner_rhs) == 0"
2786 then (inner_lhs OP inner_rhs) must have the same
2787 logical value as LHS. */
2788 if (!is_true)
2789 inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
2790 *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
2791 return true;
2792 }
2793 break;
2794
2795 case BIT_AND_EXPR:
2796 if (is_true)
2797 {
2798 /* ...and "(inner_lhs & inner_rhs) != 0"
2799 then both inner_lhs and inner_rhs must be true. */
2800 const svalue *false_sval
2801 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
2802 bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
2803 bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
2804 *out = sat1 && sat2;
2805 return true;
2806 }
2807 return false;
2808
2809 case BIT_IOR_EXPR:
2810 if (!is_true)
2811 {
2812 /* ...and "(inner_lhs | inner_rhs) == 0"
2813 i.e. "(inner_lhs | inner_rhs)" is false
2814 then both inner_lhs and inner_rhs must be false. */
2815 const svalue *false_sval
2816 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
2817 bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
2818 bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
2819 *out = sat1 && sat2;
2820 return true;
2821 }
2822 return false;
2823 }
2824}
2825
757bf1df
DM
2826/* Attempt to add the constraint "LHS OP RHS" to this region_model.
2827 If it is consistent with existing constraints, add it, and return true.
2828 Return false if it contradicts existing constraints.
2829 Use CTXT for reporting any diagnostics associated with the accesses. */
2830
2831bool
2832region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
2833 region_model_context *ctxt)
2834{
e978955d
DM
2835 /* For now, make no attempt to capture constraints on floating-point
2836 values. */
2837 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
2838 return true;
2839
808f4dfe
DM
2840 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
2841 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
757bf1df 2842
48e8a7a6
DM
2843 return add_constraint (lhs_sval, op, rhs_sval, ctxt);
2844}
2845
2846/* Attempt to add the constraint "LHS OP RHS" to this region_model.
2847 If it is consistent with existing constraints, add it, and return true.
2848 Return false if it contradicts existing constraints.
2849 Use CTXT for reporting any diagnostics associated with the accesses. */
2850
2851bool
2852region_model::add_constraint (const svalue *lhs,
2853 enum tree_code op,
2854 const svalue *rhs,
2855 region_model_context *ctxt)
2856{
2857 tristate t_cond = eval_condition (lhs, op, rhs);
757bf1df
DM
2858
2859 /* If we already have the condition, do nothing. */
2860 if (t_cond.is_true ())
2861 return true;
2862
2863 /* Reject a constraint that would contradict existing knowledge, as
2864 unsatisfiable. */
2865 if (t_cond.is_false ())
2866 return false;
2867
48e8a7a6
DM
2868 bool out;
2869 if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
2870 return out;
757bf1df 2871
c4b8f373
DM
2872 /* Attempt to store the constraint. */
2873 if (!m_constraints->add_constraint (lhs, op, rhs))
2874 return false;
757bf1df
DM
2875
2876 /* Notify the context, if any. This exists so that the state machines
2877 in a program_state can be notified about the condition, and so can
2878 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
2879 when synthesizing constraints as above. */
2880 if (ctxt)
2881 ctxt->on_condition (lhs, op, rhs);
2882
9a2c9579
DM
2883 /* If we have &REGION == NULL, then drop dynamic extents for REGION (for
2884 the case where REGION is heap-allocated and thus could be NULL). */
48e8a7a6
DM
2885 if (tree rhs_cst = rhs->maybe_get_constant ())
2886 if (op == EQ_EXPR && zerop (rhs_cst))
2887 if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
2888 unset_dynamic_extents (region_sval->get_pointee ());
9a2c9579 2889
757bf1df
DM
2890 return true;
2891}
2892
84fb3546
DM
2893/* As above, but when returning false, if OUT is non-NULL, write a
2894 new rejected_constraint to *OUT. */
2895
2896bool
2897region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
2898 region_model_context *ctxt,
2899 rejected_constraint **out)
2900{
2901 bool sat = add_constraint (lhs, op, rhs, ctxt);
2902 if (!sat && out)
8ca7fa84 2903 *out = new rejected_op_constraint (*this, lhs, op, rhs);
84fb3546
DM
2904 return sat;
2905}
2906
757bf1df
DM
2907/* Determine what is known about the condition "LHS OP RHS" within
2908 this model.
2909 Use CTXT for reporting any diagnostics associated with the accesses. */
2910
2911tristate
2912region_model::eval_condition (tree lhs,
2913 enum tree_code op,
2914 tree rhs,
2915 region_model_context *ctxt)
2916{
e978955d
DM
2917 /* For now, make no attempt to model constraints on floating-point
2918 values. */
2919 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
2920 return tristate::unknown ();
2921
757bf1df
DM
2922 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
2923}
2924
467a4820
DM
2925/* Implementation of region_model::get_representative_path_var.
2926 Attempt to return a path_var that represents SVAL, or return NULL_TREE.
808f4dfe
DM
2927 Use VISITED to prevent infinite mutual recursion with the overload for
2928 regions. */
757bf1df 2929
808f4dfe 2930path_var
467a4820
DM
2931region_model::get_representative_path_var_1 (const svalue *sval,
2932 svalue_set *visited) const
757bf1df 2933{
467a4820 2934 gcc_assert (sval);
757bf1df 2935
808f4dfe
DM
2936 /* Prevent infinite recursion. */
2937 if (visited->contains (sval))
2938 return path_var (NULL_TREE, 0);
2939 visited->add (sval);
757bf1df 2940
467a4820
DM
2941 /* Handle casts by recursion into get_representative_path_var. */
2942 if (const svalue *cast_sval = sval->maybe_undo_cast ())
2943 {
2944 path_var result = get_representative_path_var (cast_sval, visited);
2945 tree orig_type = sval->get_type ();
2946 /* If necessary, wrap the result in a cast. */
2947 if (result.m_tree && orig_type)
2948 result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
2949 return result;
2950 }
2951
808f4dfe
DM
2952 auto_vec<path_var> pvs;
2953 m_store.get_representative_path_vars (this, visited, sval, &pvs);
757bf1df 2954
808f4dfe
DM
2955 if (tree cst = sval->maybe_get_constant ())
2956 pvs.safe_push (path_var (cst, 0));
757bf1df 2957
90f7c300 2958 /* Handle string literals and various other pointers. */
808f4dfe
DM
2959 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
2960 {
2961 const region *reg = ptr_sval->get_pointee ();
2962 if (path_var pv = get_representative_path_var (reg, visited))
2963 return path_var (build1 (ADDR_EXPR,
467a4820 2964 sval->get_type (),
808f4dfe
DM
2965 pv.m_tree),
2966 pv.m_stack_depth);
2967 }
2968
2969 /* If we have a sub_svalue, look for ways to represent the parent. */
2970 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
90f7c300 2971 {
808f4dfe
DM
2972 const svalue *parent_sval = sub_sval->get_parent ();
2973 const region *subreg = sub_sval->get_subregion ();
2974 if (path_var parent_pv
2975 = get_representative_path_var (parent_sval, visited))
2976 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
2977 return path_var (build3 (COMPONENT_REF,
2978 sval->get_type (),
2979 parent_pv.m_tree,
2980 field_reg->get_field (),
2981 NULL_TREE),
2982 parent_pv.m_stack_depth);
90f7c300
DM
2983 }
2984
b9365b93
DM
2985 /* Handle binops. */
2986 if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
2987 if (path_var lhs_pv
2988 = get_representative_path_var (binop_sval->get_arg0 (), visited))
2989 if (path_var rhs_pv
2990 = get_representative_path_var (binop_sval->get_arg1 (), visited))
2991 return path_var (build2 (binop_sval->get_op (),
2992 sval->get_type (),
2993 lhs_pv.m_tree, rhs_pv.m_tree),
2994 lhs_pv.m_stack_depth);
2995
808f4dfe
DM
2996 if (pvs.length () < 1)
2997 return path_var (NULL_TREE, 0);
2998
2999 pvs.qsort (readability_comparator);
3000 return pvs[0];
757bf1df
DM
3001}
3002
467a4820
DM
3003/* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
3004 Use VISITED to prevent infinite mutual recursion with the overload for
3005 regions
3006
3007 This function defers to get_representative_path_var_1 to do the work;
3008 it adds verification that get_representative_path_var_1 returned a tree
3009 of the correct type. */
3010
3011path_var
3012region_model::get_representative_path_var (const svalue *sval,
3013 svalue_set *visited) const
3014{
3015 if (sval == NULL)
3016 return path_var (NULL_TREE, 0);
3017
3018 tree orig_type = sval->get_type ();
3019
3020 path_var result = get_representative_path_var_1 (sval, visited);
3021
3022 /* Verify that the result has the same type as SVAL, if any. */
3023 if (result.m_tree && orig_type)
3024 gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
3025
3026 return result;
3027}
3028
3029/* Attempt to return a tree that represents SVAL, or return NULL_TREE.
3030
3031 Strip off any top-level cast, to avoid messages like
3032 double-free of '(void *)ptr'
3033 from analyzer diagnostics. */
757bf1df 3034
808f4dfe
DM
3035tree
3036region_model::get_representative_tree (const svalue *sval) const
757bf1df 3037{
808f4dfe 3038 svalue_set visited;
467a4820
DM
3039 tree expr = get_representative_path_var (sval, &visited).m_tree;
3040
3041 /* Strip off any top-level cast. */
3042 if (expr && TREE_CODE (expr) == NOP_EXPR)
e4bb1bd6 3043 expr = TREE_OPERAND (expr, 0);
467a4820 3044
e4bb1bd6 3045 return fixup_tree_for_diagnostic (expr);
808f4dfe
DM
3046}
3047
467a4820
DM
3048/* Implementation of region_model::get_representative_path_var.
3049
3050 Attempt to return a path_var that represents REG, or return
808f4dfe
DM
3051 the NULL path_var.
3052 For example, a region for a field of a local would be a path_var
3053 wrapping a COMPONENT_REF.
3054 Use VISITED to prevent infinite mutual recursion with the overload for
3055 svalues. */
757bf1df 3056
808f4dfe 3057path_var
467a4820
DM
3058region_model::get_representative_path_var_1 (const region *reg,
3059 svalue_set *visited) const
808f4dfe
DM
3060{
3061 switch (reg->get_kind ())
757bf1df 3062 {
808f4dfe
DM
3063 default:
3064 gcc_unreachable ();
e516294a 3065
808f4dfe
DM
3066 case RK_FRAME:
3067 case RK_GLOBALS:
3068 case RK_CODE:
3069 case RK_HEAP:
3070 case RK_STACK:
3071 case RK_ROOT:
3072 /* Regions that represent memory spaces are not expressible as trees. */
3073 return path_var (NULL_TREE, 0);
757bf1df 3074
808f4dfe 3075 case RK_FUNCTION:
884d9141 3076 {
808f4dfe
DM
3077 const function_region *function_reg
3078 = as_a <const function_region *> (reg);
3079 return path_var (function_reg->get_fndecl (), 0);
884d9141 3080 }
808f4dfe 3081 case RK_LABEL:
9e78634c
DM
3082 {
3083 const label_region *label_reg = as_a <const label_region *> (reg);
3084 return path_var (label_reg->get_label (), 0);
3085 }
90f7c300 3086
808f4dfe
DM
3087 case RK_SYMBOLIC:
3088 {
3089 const symbolic_region *symbolic_reg
3090 = as_a <const symbolic_region *> (reg);
3091 const svalue *pointer = symbolic_reg->get_pointer ();
3092 path_var pointer_pv = get_representative_path_var (pointer, visited);
3093 if (!pointer_pv)
3094 return path_var (NULL_TREE, 0);
3095 tree offset = build_int_cst (pointer->get_type (), 0);
3096 return path_var (build2 (MEM_REF,
3097 reg->get_type (),
3098 pointer_pv.m_tree,
3099 offset),
3100 pointer_pv.m_stack_depth);
3101 }
3102 case RK_DECL:
3103 {
3104 const decl_region *decl_reg = as_a <const decl_region *> (reg);
3105 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
3106 }
3107 case RK_FIELD:
3108 {
3109 const field_region *field_reg = as_a <const field_region *> (reg);
3110 path_var parent_pv
3111 = get_representative_path_var (reg->get_parent_region (), visited);
3112 if (!parent_pv)
3113 return path_var (NULL_TREE, 0);
3114 return path_var (build3 (COMPONENT_REF,
3115 reg->get_type (),
3116 parent_pv.m_tree,
3117 field_reg->get_field (),
3118 NULL_TREE),
3119 parent_pv.m_stack_depth);
3120 }
757bf1df 3121
808f4dfe
DM
3122 case RK_ELEMENT:
3123 {
3124 const element_region *element_reg
3125 = as_a <const element_region *> (reg);
3126 path_var parent_pv
3127 = get_representative_path_var (reg->get_parent_region (), visited);
3128 if (!parent_pv)
3129 return path_var (NULL_TREE, 0);
3130 path_var index_pv
3131 = get_representative_path_var (element_reg->get_index (), visited);
3132 if (!index_pv)
3133 return path_var (NULL_TREE, 0);
3134 return path_var (build4 (ARRAY_REF,
3135 reg->get_type (),
3136 parent_pv.m_tree, index_pv.m_tree,
3137 NULL_TREE, NULL_TREE),
3138 parent_pv.m_stack_depth);
3139 }
757bf1df 3140
808f4dfe 3141 case RK_OFFSET:
757bf1df 3142 {
808f4dfe
DM
3143 const offset_region *offset_reg
3144 = as_a <const offset_region *> (reg);
3145 path_var parent_pv
3146 = get_representative_path_var (reg->get_parent_region (), visited);
3147 if (!parent_pv)
3148 return path_var (NULL_TREE, 0);
3149 path_var offset_pv
3150 = get_representative_path_var (offset_reg->get_byte_offset (),
3151 visited);
29f5db8e 3152 if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
808f4dfe 3153 return path_var (NULL_TREE, 0);
29f5db8e
DM
3154 tree addr_parent = build1 (ADDR_EXPR,
3155 build_pointer_type (reg->get_type ()),
3156 parent_pv.m_tree);
808f4dfe
DM
3157 return path_var (build2 (MEM_REF,
3158 reg->get_type (),
29f5db8e 3159 addr_parent, offset_pv.m_tree),
808f4dfe 3160 parent_pv.m_stack_depth);
757bf1df 3161 }
757bf1df 3162
e61ffa20
DM
3163 case RK_SIZED:
3164 return path_var (NULL_TREE, 0);
3165
808f4dfe
DM
3166 case RK_CAST:
3167 {
3168 path_var parent_pv
3169 = get_representative_path_var (reg->get_parent_region (), visited);
3170 if (!parent_pv)
3171 return path_var (NULL_TREE, 0);
3172 return path_var (build1 (NOP_EXPR,
3173 reg->get_type (),
3174 parent_pv.m_tree),
3175 parent_pv.m_stack_depth);
3176 }
757bf1df 3177
808f4dfe
DM
3178 case RK_HEAP_ALLOCATED:
3179 case RK_ALLOCA:
3180 /* No good way to express heap-allocated/alloca regions as trees. */
3181 return path_var (NULL_TREE, 0);
757bf1df 3182
808f4dfe
DM
3183 case RK_STRING:
3184 {
3185 const string_region *string_reg = as_a <const string_region *> (reg);
3186 return path_var (string_reg->get_string_cst (), 0);
3187 }
757bf1df 3188
808f4dfe
DM
3189 case RK_UNKNOWN:
3190 return path_var (NULL_TREE, 0);
3191 }
757bf1df
DM
3192}
3193
467a4820
DM
3194/* Attempt to return a path_var that represents REG, or return
3195 the NULL path_var.
3196 For example, a region for a field of a local would be a path_var
3197 wrapping a COMPONENT_REF.
3198 Use VISITED to prevent infinite mutual recursion with the overload for
3199 svalues.
3200
3201 This function defers to get_representative_path_var_1 to do the work;
3202 it adds verification that get_representative_path_var_1 returned a tree
3203 of the correct type. */
3204
3205path_var
3206region_model::get_representative_path_var (const region *reg,
3207 svalue_set *visited) const
3208{
3209 path_var result = get_representative_path_var_1 (reg, visited);
3210
3211 /* Verify that the result has the same type as REG, if any. */
3212 if (result.m_tree && reg->get_type ())
3213 gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
3214
3215 return result;
3216}
3217
757bf1df
DM
3218/* Update this model for any phis in SNODE, assuming we came from
3219 LAST_CFG_SUPEREDGE. */
3220
3221void
3222region_model::update_for_phis (const supernode *snode,
3223 const cfg_superedge *last_cfg_superedge,
3224 region_model_context *ctxt)
3225{
3226 gcc_assert (last_cfg_superedge);
3227
e0a7a675
DM
3228 /* Copy this state and pass it to handle_phi so that all of the phi stmts
3229 are effectively handled simultaneously. */
3230 const region_model old_state (*this);
3231
757bf1df
DM
3232 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
3233 !gsi_end_p (gpi); gsi_next (&gpi))
3234 {
3235 gphi *phi = gpi.phi ();
3236
3237 tree src = last_cfg_superedge->get_phi_arg (phi);
3238 tree lhs = gimple_phi_result (phi);
3239
e0a7a675
DM
3240 /* Update next_state based on phi and old_state. */
3241 handle_phi (phi, lhs, src, old_state, ctxt);
757bf1df
DM
3242 }
3243}
3244
3245/* Attempt to update this model for taking EDGE (where the last statement
3246 was LAST_STMT), returning true if the edge can be taken, false
3247 otherwise.
84fb3546
DM
3248 When returning false, if OUT is non-NULL, write a new rejected_constraint
3249 to it.
757bf1df
DM
3250
3251 For CFG superedges where LAST_STMT is a conditional or a switch
3252 statement, attempt to add the relevant conditions for EDGE to this
3253 model, returning true if they are feasible, or false if they are
3254 impossible.
3255
3256 For call superedges, push frame information and store arguments
3257 into parameters.
3258
3259 For return superedges, pop frame information and store return
3260 values into any lhs.
3261
3262 Rejection of call/return superedges happens elsewhere, in
3263 program_point::on_edge (i.e. based on program point, rather
3264 than program state). */
3265
3266bool
3267region_model::maybe_update_for_edge (const superedge &edge,
3268 const gimple *last_stmt,
84fb3546
DM
3269 region_model_context *ctxt,
3270 rejected_constraint **out)
757bf1df
DM
3271{
3272 /* Handle frame updates for interprocedural edges. */
3273 switch (edge.m_kind)
3274 {
3275 default:
3276 break;
3277
3278 case SUPEREDGE_CALL:
3279 {
3280 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
3281 update_for_call_superedge (*call_edge, ctxt);
3282 }
3283 break;
3284
3285 case SUPEREDGE_RETURN:
3286 {
3287 const return_superedge *return_edge
3288 = as_a <const return_superedge *> (&edge);
3289 update_for_return_superedge (*return_edge, ctxt);
3290 }
3291 break;
3292
3293 case SUPEREDGE_INTRAPROCEDURAL_CALL:
3294 {
3295 const callgraph_superedge *cg_sedge
3296 = as_a <const callgraph_superedge *> (&edge);
3297 update_for_call_summary (*cg_sedge, ctxt);
3298 }
3299 break;
3300 }
3301
3302 if (last_stmt == NULL)
3303 return true;
3304
3305 /* Apply any constraints for conditionals/switch statements. */
3306
3307 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
3308 {
3309 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
84fb3546 3310 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out);
757bf1df
DM
3311 }
3312
3313 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
3314 {
3315 const switch_cfg_superedge *switch_sedge
3316 = as_a <const switch_cfg_superedge *> (&edge);
84fb3546
DM
3317 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt,
3318 ctxt, out);
757bf1df
DM
3319 }
3320
1690a839
DM
3321 /* Apply any constraints due to an exception being thrown. */
3322 if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge))
3323 if (cfg_sedge->get_flags () & EDGE_EH)
84fb3546 3324 return apply_constraints_for_exception (last_stmt, ctxt, out);
1690a839 3325
757bf1df
DM
3326 return true;
3327}
3328
3329/* Push a new frame_region on to the stack region.
3330 Populate the frame_region with child regions for the function call's
3331 parameters, using values from the arguments at the callsite in the
3332 caller's frame. */
3333
3334void
aef703cf 3335region_model::update_for_gcall (const gcall *call_stmt,
e92d0ff6
AS
3336 region_model_context *ctxt,
3337 function *callee)
757bf1df 3338{
808f4dfe 3339 /* Build a vec of argument svalues, using the current top
757bf1df 3340 frame for resolving tree expressions. */
808f4dfe 3341 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
757bf1df
DM
3342
3343 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
3344 {
3345 tree arg = gimple_call_arg (call_stmt, i);
808f4dfe 3346 arg_svals.quick_push (get_rvalue (arg, ctxt));
757bf1df
DM
3347 }
3348
e92d0ff6
AS
3349 if(!callee)
3350 {
3351 /* Get the function * from the gcall. */
3352 tree fn_decl = get_fndecl_for_call (call_stmt,ctxt);
3353 callee = DECL_STRUCT_FUNCTION (fn_decl);
3354 }
3355
3356 push_frame (callee, &arg_svals, ctxt);
757bf1df
DM
3357}
3358
a96f1c38
DM
3359/* Pop the top-most frame_region from the stack, and copy the return
3360 region's values (if any) into the region for the lvalue of the LHS of
757bf1df 3361 the call (if any). */
aef703cf 3362
757bf1df 3363void
aef703cf
AS
3364region_model::update_for_return_gcall (const gcall *call_stmt,
3365 region_model_context *ctxt)
757bf1df 3366{
a96f1c38 3367 /* Get the region for the result of the call, within the caller frame. */
808f4dfe 3368 const region *result_dst_reg = NULL;
757bf1df
DM
3369 tree lhs = gimple_call_lhs (call_stmt);
3370 if (lhs)
a96f1c38
DM
3371 {
3372 /* Normally we access the top-level frame, which is:
aef703cf
AS
3373 path_var (expr, get_stack_depth () - 1)
3374 whereas here we need the caller frame, hence "- 2" here. */
808f4dfe
DM
3375 gcc_assert (get_stack_depth () >= 2);
3376 result_dst_reg = get_lvalue (path_var (lhs, get_stack_depth () - 2),
aef703cf 3377 ctxt);
a96f1c38
DM
3378 }
3379
808f4dfe 3380 pop_frame (result_dst_reg, NULL, ctxt);
757bf1df
DM
3381}
3382
aef703cf
AS
3383/* Extract calling information from the superedge and update the model for the
3384 call */
3385
3386void
3387region_model::update_for_call_superedge (const call_superedge &call_edge,
3388 region_model_context *ctxt)
3389{
3390 const gcall *call_stmt = call_edge.get_call_stmt ();
e92d0ff6 3391 update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ());
aef703cf
AS
3392}
3393
3394/* Extract calling information from the return superedge and update the model
3395 for the returning call */
3396
3397void
3398region_model::update_for_return_superedge (const return_superedge &return_edge,
3399 region_model_context *ctxt)
3400{
3401 const gcall *call_stmt = return_edge.get_call_stmt ();
3402 update_for_return_gcall (call_stmt, ctxt);
3403}
3404
757bf1df
DM
3405/* Update this region_model with a summary of the effect of calling
3406 and returning from CG_SEDGE.
3407
3408 TODO: Currently this is extremely simplistic: we merely set the
3409 return value to "unknown". A proper implementation would e.g. update
3410 sm-state, and presumably be reworked to support multiple outcomes. */
3411
3412void
3413region_model::update_for_call_summary (const callgraph_superedge &cg_sedge,
3414 region_model_context *ctxt)
3415{
3416 /* For now, set any return value to "unknown". */
3417 const gcall *call_stmt = cg_sedge.get_call_stmt ();
3418 tree lhs = gimple_call_lhs (call_stmt);
3419 if (lhs)
3a66c289
DM
3420 mark_region_as_unknown (get_lvalue (lhs, ctxt),
3421 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
3422
3423 // TODO: actually implement some kind of summary here
3424}
3425
3426/* Given a true or false edge guarded by conditional statement COND_STMT,
3427 determine appropriate constraints for the edge to be taken.
3428
3429 If they are feasible, add the constraints and return true.
3430
3431 Return false if the constraints contradict existing knowledge
84fb3546
DM
3432 (and so the edge should not be taken).
3433 When returning false, if OUT is non-NULL, write a new rejected_constraint
3434 to it. */
757bf1df
DM
3435
3436bool
3437region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
3438 const gcond *cond_stmt,
84fb3546
DM
3439 region_model_context *ctxt,
3440 rejected_constraint **out)
757bf1df
DM
3441{
3442 ::edge cfg_edge = sedge.get_cfg_edge ();
3443 gcc_assert (cfg_edge != NULL);
3444 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
3445
3446 enum tree_code op = gimple_cond_code (cond_stmt);
3447 tree lhs = gimple_cond_lhs (cond_stmt);
3448 tree rhs = gimple_cond_rhs (cond_stmt);
3449 if (cfg_edge->flags & EDGE_FALSE_VALUE)
3450 op = invert_tree_comparison (op, false /* honor_nans */);
84fb3546 3451 return add_constraint (lhs, op, rhs, ctxt, out);
757bf1df
DM
3452}
3453
3454/* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
3455 for the edge to be taken.
3456
3457 If they are feasible, add the constraints and return true.
3458
3459 Return false if the constraints contradict existing knowledge
84fb3546
DM
3460 (and so the edge should not be taken).
3461 When returning false, if OUT is non-NULL, write a new rejected_constraint
3462 to it. */
757bf1df
DM
3463
3464bool
3465region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
3466 const gswitch *switch_stmt,
84fb3546
DM
3467 region_model_context *ctxt,
3468 rejected_constraint **out)
757bf1df 3469{
8ca7fa84
DM
3470 bounded_ranges_manager *ranges_mgr = get_range_manager ();
3471 const bounded_ranges *all_cases_ranges
3472 = ranges_mgr->get_or_create_ranges_for_switch (&edge, switch_stmt);
757bf1df 3473 tree index = gimple_switch_index (switch_stmt);
8ca7fa84
DM
3474 const svalue *index_sval = get_rvalue (index, ctxt);
3475 bool sat = m_constraints->add_bounded_ranges (index_sval, all_cases_ranges);
3476 if (!sat && out)
3477 *out = new rejected_ranges_constraint (*this, index, all_cases_ranges);
3478 return sat;
757bf1df
DM
3479}
3480
1690a839
DM
3481/* Apply any constraints due to an exception being thrown at LAST_STMT.
3482
3483 If they are feasible, add the constraints and return true.
3484
3485 Return false if the constraints contradict existing knowledge
84fb3546
DM
3486 (and so the edge should not be taken).
3487 When returning false, if OUT is non-NULL, write a new rejected_constraint
3488 to it. */
1690a839
DM
3489
3490bool
3491region_model::apply_constraints_for_exception (const gimple *last_stmt,
84fb3546
DM
3492 region_model_context *ctxt,
3493 rejected_constraint **out)
1690a839
DM
3494{
3495 gcc_assert (last_stmt);
3496 if (const gcall *call = dyn_cast <const gcall *> (last_stmt))
3497 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
3498 if (is_named_call_p (callee_fndecl, "operator new", call, 1)
3499 || is_named_call_p (callee_fndecl, "operator new []", call, 1))
3500 {
3501 /* We have an exception thrown from operator new.
3502 Add a constraint that the result was NULL, to avoid a false
3503 leak report due to the result being lost when following
3504 the EH edge. */
3505 if (tree lhs = gimple_call_lhs (call))
84fb3546 3506 return add_constraint (lhs, EQ_EXPR, null_pointer_node, ctxt, out);
1690a839
DM
3507 return true;
3508 }
3509 return true;
3510}
3511
808f4dfe
DM
3512/* For use with push_frame when handling a top-level call within the analysis.
3513 PARAM has a defined but unknown initial value.
3514 Anything it points to has escaped, since the calling context "knows"
3515 the pointer, and thus calls to unknown functions could read/write into
3516 the region. */
757bf1df
DM
3517
3518void
808f4dfe 3519region_model::on_top_level_param (tree param,
3a25f345 3520 region_model_context *ctxt)
757bf1df 3521{
808f4dfe 3522 if (POINTER_TYPE_P (TREE_TYPE (param)))
5eae0ac7 3523 {
808f4dfe
DM
3524 const region *param_reg = get_lvalue (param, ctxt);
3525 const svalue *init_ptr_sval
3526 = m_mgr->get_or_create_initial_value (param_reg);
3527 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
3528 m_store.mark_as_escaped (pointee_reg);
5eae0ac7 3529 }
757bf1df
DM
3530}
3531
808f4dfe
DM
3532/* Update this region_model to reflect pushing a frame onto the stack
3533 for a call to FUN.
757bf1df 3534
808f4dfe
DM
3535 If ARG_SVALS is non-NULL, use it to populate the parameters
3536 in the new frame.
3537 Otherwise, the params have their initial_svalues.
757bf1df 3538
808f4dfe 3539 Return the frame_region for the new frame. */
757bf1df 3540
808f4dfe
DM
3541const region *
3542region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
3543 region_model_context *ctxt)
757bf1df 3544{
808f4dfe
DM
3545 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
3546 if (arg_svals)
757bf1df 3547 {
808f4dfe
DM
3548 /* Arguments supplied from a caller frame. */
3549 tree fndecl = fun->decl;
3550 unsigned idx = 0;
3551 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
3552 iter_parm = DECL_CHAIN (iter_parm), ++idx)
757bf1df 3553 {
808f4dfe
DM
3554 /* If there's a mismatching declaration, the call stmt might
3555 not have enough args. Handle this case by leaving the
3556 rest of the params as uninitialized. */
3557 if (idx >= arg_svals->length ())
3558 break;
294b6da2
DM
3559 tree parm_lval = iter_parm;
3560 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
3561 parm_lval = parm_default_ssa;
3562 const region *parm_reg = get_lvalue (parm_lval, ctxt);
808f4dfe 3563 const svalue *arg_sval = (*arg_svals)[idx];
808f4dfe 3564 set_value (parm_reg, arg_sval, ctxt);
757bf1df 3565 }
757bf1df 3566 }
808f4dfe 3567 else
757bf1df 3568 {
808f4dfe
DM
3569 /* Otherwise we have a top-level call within the analysis. The params
3570 have defined but unknown initial values.
3571 Anything they point to has escaped. */
3572 tree fndecl = fun->decl;
3573 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
3574 iter_parm = DECL_CHAIN (iter_parm))
757bf1df 3575 {
294b6da2 3576 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
808f4dfe 3577 on_top_level_param (parm_default_ssa, ctxt);
294b6da2
DM
3578 else
3579 on_top_level_param (iter_parm, ctxt);
757bf1df
DM
3580 }
3581 }
757bf1df 3582
808f4dfe 3583 return m_current_frame;
757bf1df
DM
3584}
3585
808f4dfe
DM
3586/* Get the function of the top-most frame in this region_model's stack.
3587 There must be such a frame. */
757bf1df 3588
808f4dfe
DM
3589function *
3590region_model::get_current_function () const
757bf1df 3591{
808f4dfe
DM
3592 const frame_region *frame = get_current_frame ();
3593 gcc_assert (frame);
3594 return frame->get_function ();
757bf1df
DM
3595}
3596
808f4dfe 3597/* Pop the topmost frame_region from this region_model's stack;
757bf1df 3598
808f4dfe
DM
3599 If RESULT_DST_REG is non-null, copy any return value from the frame
3600 into RESULT_DST_REG's region.
3601 If OUT_RESULT is non-null, copy any return value from the frame
3602 into *OUT_RESULT.
757bf1df 3603
808f4dfe
DM
3604 Purge the frame region and all its descendent regions.
3605 Convert any pointers that point into such regions into
3606 POISON_KIND_POPPED_STACK svalues. */
757bf1df 3607
808f4dfe
DM
3608void
3609region_model::pop_frame (const region *result_dst_reg,
3610 const svalue **out_result,
3611 region_model_context *ctxt)
3612{
3613 gcc_assert (m_current_frame);
757bf1df 3614
808f4dfe
DM
3615 /* Evaluate the result, within the callee frame. */
3616 const frame_region *frame_reg = m_current_frame;
3617 tree fndecl = m_current_frame->get_function ()->decl;
3618 tree result = DECL_RESULT (fndecl);
3619 if (result && TREE_TYPE (result) != void_type_node)
3620 {
3621 if (result_dst_reg)
3622 {
3623 /* Copy the result to RESULT_DST_REG. */
3624 copy_region (result_dst_reg,
3625 get_lvalue (result, ctxt),
3626 ctxt);
3627 }
3628 if (out_result)
3629 *out_result = get_rvalue (result, ctxt);
3630 }
757bf1df 3631
808f4dfe
DM
3632 /* Pop the frame. */
3633 m_current_frame = m_current_frame->get_calling_frame ();
757bf1df 3634
808f4dfe 3635 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
757bf1df
DM
3636}
3637
808f4dfe 3638/* Get the number of frames in this region_model's stack. */
757bf1df 3639
808f4dfe
DM
3640int
3641region_model::get_stack_depth () const
757bf1df 3642{
808f4dfe
DM
3643 const frame_region *frame = get_current_frame ();
3644 if (frame)
3645 return frame->get_stack_depth ();
3646 else
3647 return 0;
757bf1df
DM
3648}
3649
808f4dfe
DM
3650/* Get the frame_region with the given index within the stack.
3651 The frame_region must exist. */
757bf1df 3652
808f4dfe
DM
3653const frame_region *
3654region_model::get_frame_at_index (int index) const
757bf1df 3655{
808f4dfe
DM
3656 const frame_region *frame = get_current_frame ();
3657 gcc_assert (frame);
3658 gcc_assert (index >= 0);
3659 gcc_assert (index <= frame->get_index ());
3660 while (index != frame->get_index ())
3661 {
3662 frame = frame->get_calling_frame ();
3663 gcc_assert (frame);
3664 }
3665 return frame;
757bf1df
DM
3666}
3667
808f4dfe
DM
3668/* Unbind svalues for any regions in REG and below.
3669 Find any pointers to such regions; convert them to
9a2c9579
DM
3670 poisoned values of kind PKIND.
3671 Also purge any dynamic extents. */
757bf1df 3672
808f4dfe
DM
3673void
3674region_model::unbind_region_and_descendents (const region *reg,
3675 enum poison_kind pkind)
757bf1df 3676{
808f4dfe
DM
3677 /* Gather a set of base regions to be unbound. */
3678 hash_set<const region *> base_regs;
3679 for (store::cluster_map_t::iterator iter = m_store.begin ();
3680 iter != m_store.end (); ++iter)
757bf1df 3681 {
808f4dfe
DM
3682 const region *iter_base_reg = (*iter).first;
3683 if (iter_base_reg->descendent_of_p (reg))
3684 base_regs.add (iter_base_reg);
757bf1df 3685 }
808f4dfe
DM
3686 for (hash_set<const region *>::iterator iter = base_regs.begin ();
3687 iter != base_regs.end (); ++iter)
3688 m_store.purge_cluster (*iter);
757bf1df 3689
808f4dfe
DM
3690 /* Find any pointers to REG or its descendents; convert to poisoned. */
3691 poison_any_pointers_to_descendents (reg, pkind);
9a2c9579
DM
3692
3693 /* Purge dynamic extents of any base regions in REG and below
3694 (e.g. VLAs and alloca stack regions). */
3695 for (auto iter : m_dynamic_extents)
3696 {
3697 const region *iter_reg = iter.first;
3698 if (iter_reg->descendent_of_p (reg))
3699 unset_dynamic_extents (iter_reg);
3700 }
757bf1df
DM
3701}
3702
808f4dfe
DM
3703/* Implementation of BindingVisitor.
3704 Update the bound svalues for regions below REG to use poisoned
3705 values instead. */
757bf1df 3706
808f4dfe 3707struct bad_pointer_finder
757bf1df 3708{
808f4dfe
DM
3709 bad_pointer_finder (const region *reg, enum poison_kind pkind,
3710 region_model_manager *mgr)
3711 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
3712 {}
757bf1df 3713
808f4dfe
DM
3714 void on_binding (const binding_key *, const svalue *&sval)
3715 {
3716 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
3717 {
3718 const region *ptr_dst = ptr_sval->get_pointee ();
3719 /* Poison ptrs to descendents of REG, but not to REG itself,
3720 otherwise double-free detection doesn't work (since sm-state
3721 for "free" is stored on the original ptr svalue). */
3722 if (ptr_dst->descendent_of_p (m_reg)
3723 && ptr_dst != m_reg)
3724 {
3725 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
3726 sval->get_type ());
3727 ++m_count;
3728 }
3729 }
3730 }
757bf1df 3731
808f4dfe
DM
3732 const region *m_reg;
3733 enum poison_kind m_pkind;
3734 region_model_manager *const m_mgr;
3735 int m_count;
3736};
757bf1df 3737
808f4dfe
DM
3738/* Find any pointers to REG or its descendents; convert them to
3739 poisoned values of kind PKIND.
3740 Return the number of pointers that were poisoned. */
757bf1df 3741
808f4dfe
DM
3742int
3743region_model::poison_any_pointers_to_descendents (const region *reg,
3744 enum poison_kind pkind)
3745{
3746 bad_pointer_finder bv (reg, pkind, m_mgr);
3747 m_store.for_each_binding (bv);
3748 return bv.m_count;
757bf1df
DM
3749}
3750
808f4dfe
DM
3751/* Attempt to merge THIS with OTHER_MODEL, writing the result
3752 to OUT_MODEL. Use POINT to distinguish values created as a
3753 result of merging. */
757bf1df 3754
808f4dfe
DM
3755bool
3756region_model::can_merge_with_p (const region_model &other_model,
3757 const program_point &point,
f573d351
DM
3758 region_model *out_model,
3759 const extrinsic_state *ext_state,
3760 const program_state *state_a,
3761 const program_state *state_b) const
757bf1df 3762{
808f4dfe
DM
3763 gcc_assert (out_model);
3764 gcc_assert (m_mgr == other_model.m_mgr);
3765 gcc_assert (m_mgr == out_model->m_mgr);
757bf1df 3766
808f4dfe
DM
3767 if (m_current_frame != other_model.m_current_frame)
3768 return false;
3769 out_model->m_current_frame = m_current_frame;
757bf1df 3770
f573d351
DM
3771 model_merger m (this, &other_model, point, out_model,
3772 ext_state, state_a, state_b);
757bf1df 3773
808f4dfe
DM
3774 if (!store::can_merge_p (&m_store, &other_model.m_store,
3775 &out_model->m_store, m_mgr->get_store_manager (),
3776 &m))
3777 return false;
3778
9a2c9579
DM
3779 if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
3780 &out_model->m_dynamic_extents))
3781 return false;
3782
808f4dfe
DM
3783 /* Merge constraints. */
3784 constraint_manager::merge (*m_constraints,
3785 *other_model.m_constraints,
c710051a 3786 out_model->m_constraints);
757bf1df 3787
808f4dfe 3788 return true;
757bf1df
DM
3789}
3790
3791/* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
3792 otherwise. */
3793
3794tree
3795region_model::get_fndecl_for_call (const gcall *call,
3796 region_model_context *ctxt)
3797{
3798 tree fn_ptr = gimple_call_fn (call);
3799 if (fn_ptr == NULL_TREE)
3800 return NULL_TREE;
808f4dfe
DM
3801 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
3802 if (const region_svalue *fn_ptr_ptr
3803 = fn_ptr_sval->dyn_cast_region_svalue ())
757bf1df 3804 {
808f4dfe
DM
3805 const region *reg = fn_ptr_ptr->get_pointee ();
3806 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
757bf1df 3807 {
808f4dfe 3808 tree fn_decl = fn_reg->get_fndecl ();
0ba70d1b
DM
3809 cgraph_node *node = cgraph_node::get (fn_decl);
3810 if (!node)
3811 return NULL_TREE;
3812 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
91f993b7
DM
3813 if (ultimate_node)
3814 return ultimate_node->decl;
757bf1df
DM
3815 }
3816 }
3817
3818 return NULL_TREE;
3819}
3820
808f4dfe 3821/* Would be much simpler to use a lambda here, if it were supported. */
757bf1df 3822
808f4dfe 3823struct append_ssa_names_cb_data
757bf1df 3824{
808f4dfe
DM
3825 const region_model *model;
3826 auto_vec<const decl_region *> *out;
3827};
757bf1df 3828
808f4dfe
DM
3829/* Populate *OUT with all decl_regions for SSA names in the current
3830 frame that have clusters within the store. */
757bf1df
DM
3831
3832void
808f4dfe
DM
3833region_model::
3834get_ssa_name_regions_for_current_frame (auto_vec<const decl_region *> *out)
3835 const
757bf1df 3836{
808f4dfe
DM
3837 append_ssa_names_cb_data data;
3838 data.model = this;
3839 data.out = out;
3840 m_store.for_each_cluster (append_ssa_names_cb, &data);
757bf1df
DM
3841}
3842
808f4dfe 3843/* Implementation detail of get_ssa_name_regions_for_current_frame. */
757bf1df 3844
808f4dfe
DM
3845void
3846region_model::append_ssa_names_cb (const region *base_reg,
3847 append_ssa_names_cb_data *cb_data)
757bf1df 3848{
808f4dfe
DM
3849 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
3850 return;
3851 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
757bf1df 3852 {
808f4dfe
DM
3853 if (TREE_CODE (decl_reg->get_decl ()) == SSA_NAME)
3854 cb_data->out->safe_push (decl_reg);
757bf1df
DM
3855 }
3856}
3857
b9365b93
DM
3858/* Return a new region describing a heap-allocated block of memory.
3859 Use CTXT to complain about tainted sizes. */
757bf1df 3860
808f4dfe 3861const region *
b9365b93
DM
3862region_model::create_region_for_heap_alloc (const svalue *size_in_bytes,
3863 region_model_context *ctxt)
757bf1df 3864{
808f4dfe 3865 const region *reg = m_mgr->create_region_for_heap_alloc ();
ea4e3218 3866 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
b9365b93 3867 set_dynamic_extents (reg, size_in_bytes, ctxt);
808f4dfe 3868 return reg;
757bf1df
DM
3869}
3870
808f4dfe 3871/* Return a new region describing a block of memory allocated within the
b9365b93
DM
3872 current frame.
3873 Use CTXT to complain about tainted sizes. */
757bf1df 3874
808f4dfe 3875const region *
b9365b93
DM
3876region_model::create_region_for_alloca (const svalue *size_in_bytes,
3877 region_model_context *ctxt)
757bf1df 3878{
808f4dfe 3879 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
ea4e3218 3880 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
b9365b93 3881 set_dynamic_extents (reg, size_in_bytes, ctxt);
808f4dfe 3882 return reg;
757bf1df
DM
3883}
3884
b9365b93
DM
3885/* Record that the size of REG is SIZE_IN_BYTES.
3886 Use CTXT to complain about tainted sizes. */
757bf1df
DM
3887
3888void
9a2c9579 3889region_model::set_dynamic_extents (const region *reg,
b9365b93
DM
3890 const svalue *size_in_bytes,
3891 region_model_context *ctxt)
9a2c9579
DM
3892{
3893 assert_compat_types (size_in_bytes->get_type (), size_type_node);
b9365b93
DM
3894 if (ctxt)
3895 check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
3896 ctxt);
9a2c9579
DM
3897 m_dynamic_extents.put (reg, size_in_bytes);
3898}
3899
3900/* Get the recording of REG in bytes, or NULL if no dynamic size was
3901 recorded. */
3902
3903const svalue *
3904region_model::get_dynamic_extents (const region *reg) const
757bf1df 3905{
9a2c9579
DM
3906 if (const svalue * const *slot = m_dynamic_extents.get (reg))
3907 return *slot;
3908 return NULL;
3909}
3910
3911/* Unset any recorded dynamic size of REG. */
3912
3913void
3914region_model::unset_dynamic_extents (const region *reg)
3915{
3916 m_dynamic_extents.remove (reg);
757bf1df
DM
3917}
3918
eafa9d96
DM
3919/* class noop_region_model_context : public region_model_context. */
3920
3921void
3922noop_region_model_context::bifurcate (custom_edge_info *info)
3923{
3924 delete info;
3925}
3926
3927void
3928noop_region_model_context::terminate_path ()
3929{
3930}
3931
808f4dfe 3932/* struct model_merger. */
757bf1df 3933
808f4dfe 3934/* Dump a multiline representation of this merger to PP. */
757bf1df
DM
3935
3936void
808f4dfe 3937model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
757bf1df 3938{
808f4dfe
DM
3939 pp_string (pp, "model A:");
3940 pp_newline (pp);
3941 m_model_a->dump_to_pp (pp, simple, true);
3942 pp_newline (pp);
757bf1df 3943
808f4dfe 3944 pp_string (pp, "model B:");
757bf1df 3945 pp_newline (pp);
808f4dfe 3946 m_model_b->dump_to_pp (pp, simple, true);
757bf1df
DM
3947 pp_newline (pp);
3948
808f4dfe 3949 pp_string (pp, "merged model:");
757bf1df 3950 pp_newline (pp);
808f4dfe 3951 m_merged_model->dump_to_pp (pp, simple, true);
757bf1df
DM
3952 pp_newline (pp);
3953}
3954
808f4dfe 3955/* Dump a multiline representation of this merger to FILE. */
757bf1df
DM
3956
3957void
808f4dfe 3958model_merger::dump (FILE *fp, bool simple) const
757bf1df
DM
3959{
3960 pretty_printer pp;
3961 pp_format_decoder (&pp) = default_tree_printer;
3962 pp_show_color (&pp) = pp_show_color (global_dc->printer);
3963 pp.buffer->stream = fp;
808f4dfe 3964 dump_to_pp (&pp, simple);
757bf1df
DM
3965 pp_flush (&pp);
3966}
3967
808f4dfe 3968/* Dump a multiline representation of this merger to stderr. */
757bf1df
DM
3969
3970DEBUG_FUNCTION void
808f4dfe 3971model_merger::dump (bool simple) const
757bf1df 3972{
808f4dfe 3973 dump (stderr, simple);
757bf1df
DM
3974}
3975
f573d351
DM
3976/* Return true if it's OK to merge SVAL with other svalues. */
3977
3978bool
3979model_merger::mergeable_svalue_p (const svalue *sval) const
3980{
3981 if (m_ext_state)
3982 {
3983 /* Reject merging svalues that have non-purgable sm-state,
3984 to avoid falsely reporting memory leaks by merging them
3985 with something else. For example, given a local var "p",
3986 reject the merger of a:
3987 store_a mapping "p" to a malloc-ed ptr
3988 with:
3989 store_b mapping "p" to a NULL ptr. */
3990 if (m_state_a)
3991 if (!m_state_a->can_purge_p (*m_ext_state, sval))
3992 return false;
3993 if (m_state_b)
3994 if (!m_state_b->can_purge_p (*m_ext_state, sval))
3995 return false;
3996 }
3997 return true;
3998}
3999
75038aa6
DM
4000} // namespace ana
4001
808f4dfe 4002/* Dump RMODEL fully to stderr (i.e. without summarization). */
757bf1df 4003
808f4dfe
DM
4004DEBUG_FUNCTION void
4005debug (const region_model &rmodel)
757bf1df 4006{
808f4dfe 4007 rmodel.dump (false);
757bf1df
DM
4008}
4009
8ca7fa84 4010/* class rejected_op_constraint : public rejected_constraint. */
84fb3546
DM
4011
4012void
8ca7fa84 4013rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
84fb3546
DM
4014{
4015 region_model m (m_model);
4016 const svalue *lhs_sval = m.get_rvalue (m_lhs, NULL);
4017 const svalue *rhs_sval = m.get_rvalue (m_rhs, NULL);
4018 lhs_sval->dump_to_pp (pp, true);
4019 pp_printf (pp, " %s ", op_symbol_code (m_op));
4020 rhs_sval->dump_to_pp (pp, true);
4021}
4022
8ca7fa84
DM
4023/* class rejected_ranges_constraint : public rejected_constraint. */
4024
4025void
4026rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
4027{
4028 region_model m (m_model);
4029 const svalue *sval = m.get_rvalue (m_expr, NULL);
4030 sval->dump_to_pp (pp, true);
4031 pp_string (pp, " in ");
4032 m_ranges->dump_to_pp (pp, true);
4033}
4034
808f4dfe 4035/* class engine. */
757bf1df 4036
11a2ff8d
DM
4037/* engine's ctor. */
4038
4039engine::engine (logger *logger)
4040: m_mgr (logger)
4041{
4042}
4043
808f4dfe 4044/* Dump the managed objects by class to LOGGER, and the per-class totals. */
757bf1df 4045
808f4dfe
DM
4046void
4047engine::log_stats (logger *logger) const
757bf1df 4048{
808f4dfe 4049 m_mgr.log_stats (logger, true);
757bf1df
DM
4050}
4051
75038aa6
DM
4052namespace ana {
4053
757bf1df
DM
4054#if CHECKING_P
4055
4056namespace selftest {
4057
8c08c983
DM
4058/* Build a constant tree of the given type from STR. */
4059
4060static tree
4061build_real_cst_from_string (tree type, const char *str)
4062{
4063 REAL_VALUE_TYPE real;
4064 real_from_string (&real, str);
4065 return build_real (type, real);
4066}
4067
4068/* Append various "interesting" constants to OUT (e.g. NaN). */
4069
4070static void
4071append_interesting_constants (auto_vec<tree> *out)
4072{
4073 out->safe_push (build_int_cst (integer_type_node, 0));
4074 out->safe_push (build_int_cst (integer_type_node, 42));
4075 out->safe_push (build_int_cst (unsigned_type_node, 0));
4076 out->safe_push (build_int_cst (unsigned_type_node, 42));
4077 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
4078 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
4079 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
4080 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
4081 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
4082 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
4083 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
4084 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
4085}
4086
4087/* Verify that tree_cmp is a well-behaved comparator for qsort, even
4088 if the underlying constants aren't comparable. */
4089
4090static void
4091test_tree_cmp_on_constants ()
4092{
4093 auto_vec<tree> csts;
4094 append_interesting_constants (&csts);
4095
4096 /* Try sorting every triple. */
4097 const unsigned num = csts.length ();
4098 for (unsigned i = 0; i < num; i++)
4099 for (unsigned j = 0; j < num; j++)
4100 for (unsigned k = 0; k < num; k++)
4101 {
4102 auto_vec<tree> v (3);
4103 v.quick_push (csts[i]);
4104 v.quick_push (csts[j]);
4105 v.quick_push (csts[k]);
4106 v.qsort (tree_cmp);
4107 }
4108}
4109
757bf1df
DM
4110/* Implementation detail of the ASSERT_CONDITION_* macros. */
4111
808f4dfe
DM
4112void
4113assert_condition (const location &loc,
4114 region_model &model,
4115 const svalue *lhs, tree_code op, const svalue *rhs,
4116 tristate expected)
4117{
4118 tristate actual = model.eval_condition (lhs, op, rhs);
4119 ASSERT_EQ_AT (loc, actual, expected);
4120}
4121
4122/* Implementation detail of the ASSERT_CONDITION_* macros. */
4123
757bf1df
DM
4124void
4125assert_condition (const location &loc,
4126 region_model &model,
4127 tree lhs, tree_code op, tree rhs,
4128 tristate expected)
4129{
4130 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
4131 ASSERT_EQ_AT (loc, actual, expected);
4132}
4133
90f7c300
DM
4134/* Implementation detail of ASSERT_DUMP_TREE_EQ. */
4135
4136static void
4137assert_dump_tree_eq (const location &loc, tree t, const char *expected)
4138{
4139 auto_fix_quotes sentinel;
4140 pretty_printer pp;
4141 pp_format_decoder (&pp) = default_tree_printer;
4142 dump_tree (&pp, t);
4143 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
4144}
4145
4146/* Assert that dump_tree (T) is EXPECTED. */
4147
4148#define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
4149 SELFTEST_BEGIN_STMT \
4150 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
4151 SELFTEST_END_STMT
4152
757bf1df
DM
4153/* Implementation detail of ASSERT_DUMP_EQ. */
4154
4155static void
4156assert_dump_eq (const location &loc,
4157 const region_model &model,
4158 bool summarize,
4159 const char *expected)
4160{
4161 auto_fix_quotes sentinel;
4162 pretty_printer pp;
4163 pp_format_decoder (&pp) = default_tree_printer;
808f4dfe
DM
4164
4165 model.dump_to_pp (&pp, summarize, true);
757bf1df
DM
4166 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
4167}
4168
4169/* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
4170
4171#define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
4172 SELFTEST_BEGIN_STMT \
4173 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
4174 SELFTEST_END_STMT
4175
4176/* Smoketest for region_model::dump_to_pp. */
4177
4178static void
4179test_dump ()
4180{
808f4dfe
DM
4181 region_model_manager mgr;
4182 region_model model (&mgr);
757bf1df
DM
4183
4184 ASSERT_DUMP_EQ (model, false,
808f4dfe
DM
4185 "stack depth: 0\n"
4186 "m_called_unknown_fn: FALSE\n"
4187 "constraint_manager:\n"
4188 " equiv classes:\n"
4189 " constraints:\n");
4190 ASSERT_DUMP_EQ (model, true,
4191 "stack depth: 0\n"
4192 "m_called_unknown_fn: FALSE\n"
4193 "constraint_manager:\n"
757bf1df
DM
4194 " equiv classes:\n"
4195 " constraints:\n");
757bf1df
DM
4196}
4197
884d9141
DM
4198/* Helper function for selftests. Create a struct or union type named NAME,
4199 with the fields given by the FIELD_DECLS in FIELDS.
4200 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
4201 create a UNION_TYPE. */
4202
4203static tree
4204make_test_compound_type (const char *name, bool is_struct,
4205 const auto_vec<tree> *fields)
4206{
4207 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
4208 TYPE_NAME (t) = get_identifier (name);
4209 TYPE_SIZE (t) = 0;
4210
4211 tree fieldlist = NULL;
4212 int i;
4213 tree field;
4214 FOR_EACH_VEC_ELT (*fields, i, field)
4215 {
4216 gcc_assert (TREE_CODE (field) == FIELD_DECL);
4217 DECL_CONTEXT (field) = t;
4218 fieldlist = chainon (field, fieldlist);
4219 }
4220 fieldlist = nreverse (fieldlist);
4221 TYPE_FIELDS (t) = fieldlist;
4222
4223 layout_type (t);
4224 return t;
4225}
4226
a96f1c38
DM
4227/* Selftest fixture for creating the type "struct coord {int x; int y; };". */
4228
4229struct coord_test
4230{
4231 coord_test ()
4232 {
4233 auto_vec<tree> fields;
4234 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
4235 get_identifier ("x"), integer_type_node);
4236 fields.safe_push (m_x_field);
4237 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
4238 get_identifier ("y"), integer_type_node);
4239 fields.safe_push (m_y_field);
4240 m_coord_type = make_test_compound_type ("coord", true, &fields);
4241 }
4242
4243 tree m_x_field;
4244 tree m_y_field;
4245 tree m_coord_type;
4246};
4247
808f4dfe 4248/* Verify usage of a struct. */
884d9141
DM
4249
4250static void
808f4dfe 4251test_struct ()
884d9141 4252{
a96f1c38
DM
4253 coord_test ct;
4254
4255 tree c = build_global_decl ("c", ct.m_coord_type);
4256 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4257 c, ct.m_x_field, NULL_TREE);
4258 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4259 c, ct.m_y_field, NULL_TREE);
884d9141
DM
4260
4261 tree int_17 = build_int_cst (integer_type_node, 17);
4262 tree int_m3 = build_int_cst (integer_type_node, -3);
4263
808f4dfe
DM
4264 region_model_manager mgr;
4265 region_model model (&mgr);
884d9141
DM
4266 model.set_value (c_x, int_17, NULL);
4267 model.set_value (c_y, int_m3, NULL);
4268
808f4dfe
DM
4269 /* Verify get_offset for "c.x". */
4270 {
4271 const region *c_x_reg = model.get_lvalue (c_x, NULL);
4272 region_offset offset = c_x_reg->get_offset ();
4273 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
4274 ASSERT_EQ (offset.get_bit_offset (), 0);
4275 }
4276
4277 /* Verify get_offset for "c.y". */
4278 {
4279 const region *c_y_reg = model.get_lvalue (c_y, NULL);
4280 region_offset offset = c_y_reg->get_offset ();
4281 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
4282 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
4283 }
884d9141
DM
4284}
4285
808f4dfe 4286/* Verify usage of an array element. */
884d9141
DM
4287
4288static void
808f4dfe 4289test_array_1 ()
884d9141
DM
4290{
4291 tree tlen = size_int (10);
4292 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
4293
4294 tree a = build_global_decl ("a", arr_type);
4295
808f4dfe
DM
4296 region_model_manager mgr;
4297 region_model model (&mgr);
884d9141
DM
4298 tree int_0 = build_int_cst (integer_type_node, 0);
4299 tree a_0 = build4 (ARRAY_REF, char_type_node,
4300 a, int_0, NULL_TREE, NULL_TREE);
4301 tree char_A = build_int_cst (char_type_node, 'A');
4302 model.set_value (a_0, char_A, NULL);
884d9141
DM
4303}
4304
90f7c300
DM
4305/* Verify that region_model::get_representative_tree works as expected. */
4306
4307static void
4308test_get_representative_tree ()
4309{
808f4dfe
DM
4310 region_model_manager mgr;
4311
90f7c300
DM
4312 /* STRING_CST. */
4313 {
4314 tree string_cst = build_string (4, "foo");
808f4dfe
DM
4315 region_model m (&mgr);
4316 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
4317 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
4318 ASSERT_EQ (rep, string_cst);
4319 }
4320
4321 /* String literal. */
4322 {
4323 tree string_cst_ptr = build_string_literal (4, "foo");
808f4dfe
DM
4324 region_model m (&mgr);
4325 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
4326 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
4327 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
4328 }
808f4dfe
DM
4329
4330 /* Value of an element within an array. */
4331 {
4332 tree tlen = size_int (10);
4333 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
4334 tree a = build_global_decl ("a", arr_type);
4335 placeholder_svalue test_sval (char_type_node, "test value");
4336
4337 /* Value of a[3]. */
4338 {
4339 test_region_model_context ctxt;
4340 region_model model (&mgr);
4341 tree int_3 = build_int_cst (integer_type_node, 3);
4342 tree a_3 = build4 (ARRAY_REF, char_type_node,
4343 a, int_3, NULL_TREE, NULL_TREE);
4344 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
4345 model.set_value (a_3_reg, &test_sval, &ctxt);
4346 tree rep = model.get_representative_tree (&test_sval);
4347 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
4348 }
4349
4350 /* Value of a[0]. */
4351 {
4352 test_region_model_context ctxt;
4353 region_model model (&mgr);
4354 tree idx = build_int_cst (integer_type_node, 0);
4355 tree a_0 = build4 (ARRAY_REF, char_type_node,
4356 a, idx, NULL_TREE, NULL_TREE);
4357 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
4358 model.set_value (a_0_reg, &test_sval, &ctxt);
4359 tree rep = model.get_representative_tree (&test_sval);
4360 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
4361 }
4362 }
4363
4364 /* Value of a field within a struct. */
4365 {
4366 coord_test ct;
4367
4368 tree c = build_global_decl ("c", ct.m_coord_type);
4369 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4370 c, ct.m_x_field, NULL_TREE);
4371 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4372 c, ct.m_y_field, NULL_TREE);
4373
4374 test_region_model_context ctxt;
4375
4376 /* Value of initial field. */
4377 {
4378 region_model m (&mgr);
4379 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
4380 placeholder_svalue test_sval_x (integer_type_node, "test x val");
4381 m.set_value (c_x_reg, &test_sval_x, &ctxt);
4382 tree rep = m.get_representative_tree (&test_sval_x);
4383 ASSERT_DUMP_TREE_EQ (rep, "c.x");
4384 }
4385
4386 /* Value of non-initial field. */
4387 {
4388 region_model m (&mgr);
4389 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
4390 placeholder_svalue test_sval_y (integer_type_node, "test y val");
4391 m.set_value (c_y_reg, &test_sval_y, &ctxt);
4392 tree rep = m.get_representative_tree (&test_sval_y);
4393 ASSERT_DUMP_TREE_EQ (rep, "c.y");
4394 }
4395 }
90f7c300
DM
4396}
4397
757bf1df 4398/* Verify that calling region_model::get_rvalue repeatedly on the same
808f4dfe 4399 tree constant retrieves the same svalue *. */
757bf1df
DM
4400
4401static void
4402test_unique_constants ()
4403{
4404 tree int_0 = build_int_cst (integer_type_node, 0);
4405 tree int_42 = build_int_cst (integer_type_node, 42);
4406
4407 test_region_model_context ctxt;
808f4dfe
DM
4408 region_model_manager mgr;
4409 region_model model (&mgr);
757bf1df
DM
4410 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
4411 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
4412 model.get_rvalue (int_42, &ctxt));
4413 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
4414 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
757bf1df 4415
808f4dfe
DM
4416 /* A "(const int)42" will be a different tree from "(int)42)"... */
4417 tree const_int_type_node
4418 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
4419 tree const_int_42 = build_int_cst (const_int_type_node, 42);
4420 ASSERT_NE (int_42, const_int_42);
4421 /* It should have a different const_svalue. */
4422 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
4423 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
4424 ASSERT_NE (int_42_sval, const_int_42_sval);
4425 /* But they should compare as equal. */
4426 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
4427 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
757bf1df
DM
4428}
4429
808f4dfe
DM
4430/* Verify that each type gets its own singleton unknown_svalue within a
4431 region_model_manager, and that NULL_TREE gets its own singleton. */
757bf1df
DM
4432
4433static void
808f4dfe 4434test_unique_unknowns ()
757bf1df 4435{
808f4dfe
DM
4436 region_model_manager mgr;
4437 const svalue *unknown_int
4438 = mgr.get_or_create_unknown_svalue (integer_type_node);
4439 /* Repeated calls with the same type should get the same "unknown"
4440 svalue. */
4441 const svalue *unknown_int_2
4442 = mgr.get_or_create_unknown_svalue (integer_type_node);
4443 ASSERT_EQ (unknown_int, unknown_int_2);
757bf1df 4444
808f4dfe
DM
4445 /* Different types (or the NULL type) should have different
4446 unknown_svalues. */
4447 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
4448 ASSERT_NE (unknown_NULL_type, unknown_int);
757bf1df 4449
808f4dfe
DM
4450 /* Repeated calls with NULL for the type should get the same "unknown"
4451 svalue. */
4452 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
4453 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
757bf1df
DM
4454}
4455
808f4dfe 4456/* Verify that initial_svalue are handled as expected. */
757bf1df 4457
808f4dfe
DM
4458static void
4459test_initial_svalue_folding ()
757bf1df 4460{
808f4dfe
DM
4461 region_model_manager mgr;
4462 tree x = build_global_decl ("x", integer_type_node);
4463 tree y = build_global_decl ("y", integer_type_node);
757bf1df 4464
808f4dfe
DM
4465 test_region_model_context ctxt;
4466 region_model model (&mgr);
4467 const svalue *x_init = model.get_rvalue (x, &ctxt);
4468 const svalue *y_init = model.get_rvalue (y, &ctxt);
4469 ASSERT_NE (x_init, y_init);
4470 const region *x_reg = model.get_lvalue (x, &ctxt);
4471 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
757bf1df 4472
808f4dfe 4473}
757bf1df 4474
808f4dfe 4475/* Verify that unary ops are folded as expected. */
757bf1df
DM
4476
4477static void
808f4dfe 4478test_unaryop_svalue_folding ()
757bf1df 4479{
808f4dfe 4480 region_model_manager mgr;
757bf1df
DM
4481 tree x = build_global_decl ("x", integer_type_node);
4482 tree y = build_global_decl ("y", integer_type_node);
4483
808f4dfe
DM
4484 test_region_model_context ctxt;
4485 region_model model (&mgr);
4486 const svalue *x_init = model.get_rvalue (x, &ctxt);
4487 const svalue *y_init = model.get_rvalue (y, &ctxt);
4488 const region *x_reg = model.get_lvalue (x, &ctxt);
4489 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
4490
4491 /* "(int)x" -> "x". */
4492 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
4493
4494 /* "(void *)x" -> something other than "x". */
4495 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
4496
4497 /* "!(x == y)" -> "x != y". */
4498 ASSERT_EQ (mgr.get_or_create_unaryop
4499 (boolean_type_node, TRUTH_NOT_EXPR,
4500 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
4501 x_init, y_init)),
4502 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
4503 x_init, y_init));
4504 /* "!(x > y)" -> "x <= y". */
4505 ASSERT_EQ (mgr.get_or_create_unaryop
4506 (boolean_type_node, TRUTH_NOT_EXPR,
4507 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
4508 x_init, y_init)),
4509 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
4510 x_init, y_init));
4511}
4512
4513/* Verify that binops on constant svalues are folded. */
757bf1df 4514
808f4dfe
DM
4515static void
4516test_binop_svalue_folding ()
4517{
4518#define NUM_CSTS 10
4519 tree cst_int[NUM_CSTS];
4520 region_model_manager mgr;
4521 const svalue *cst_sval[NUM_CSTS];
4522 for (int i = 0; i < NUM_CSTS; i++)
4523 {
4524 cst_int[i] = build_int_cst (integer_type_node, i);
4525 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
4526 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
4527 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
4528 }
757bf1df 4529
808f4dfe
DM
4530 for (int i = 0; i < NUM_CSTS; i++)
4531 for (int j = 0; j < NUM_CSTS; j++)
4532 {
4533 if (i != j)
4534 ASSERT_NE (cst_sval[i], cst_sval[j]);
4535 if (i + j < NUM_CSTS)
4536 {
4537 const svalue *sum
4538 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4539 cst_sval[i], cst_sval[j]);
4540 ASSERT_EQ (sum, cst_sval[i + j]);
4541 }
4542 if (i - j >= 0)
4543 {
4544 const svalue *difference
4545 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
4546 cst_sval[i], cst_sval[j]);
4547 ASSERT_EQ (difference, cst_sval[i - j]);
4548 }
4549 if (i * j < NUM_CSTS)
4550 {
4551 const svalue *product
4552 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4553 cst_sval[i], cst_sval[j]);
4554 ASSERT_EQ (product, cst_sval[i * j]);
4555 }
4556 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
4557 cst_sval[i], cst_sval[j]);
4558 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
4559 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
4560 cst_sval[i], cst_sval[j]);
4561 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
4562 // etc
4563 }
757bf1df 4564
808f4dfe 4565 tree x = build_global_decl ("x", integer_type_node);
757bf1df 4566
808f4dfe
DM
4567 test_region_model_context ctxt;
4568 region_model model (&mgr);
4569 const svalue *x_init = model.get_rvalue (x, &ctxt);
4570
4571 /* PLUS_EXPR folding. */
4572 const svalue *x_init_plus_zero
4573 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4574 x_init, cst_sval[0]);
4575 ASSERT_EQ (x_init_plus_zero, x_init);
4576 const svalue *zero_plus_x_init
4577 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4578 cst_sval[0], x_init);
4579 ASSERT_EQ (zero_plus_x_init, x_init);
4580
4581 /* MULT_EXPR folding. */
4582 const svalue *x_init_times_zero
4583 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4584 x_init, cst_sval[0]);
4585 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
4586 const svalue *zero_times_x_init
4587 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4588 cst_sval[0], x_init);
4589 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
4590
4591 const svalue *x_init_times_one
4592 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4593 x_init, cst_sval[1]);
4594 ASSERT_EQ (x_init_times_one, x_init);
4595 const svalue *one_times_x_init
4596 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4597 cst_sval[1], x_init);
4598 ASSERT_EQ (one_times_x_init, x_init);
4599
4600 // etc
4601 // TODO: do we want to use the match-and-simplify DSL for this?
4602
4603 /* Verify that binops put any constants on the RHS. */
4604 const svalue *four_times_x_init
4605 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4606 cst_sval[4], x_init);
4607 const svalue *x_init_times_four
4608 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4609 x_init, cst_sval[4]);
4610 ASSERT_EQ (four_times_x_init, x_init_times_four);
4611 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
4612 ASSERT_EQ (binop->get_op (), MULT_EXPR);
4613 ASSERT_EQ (binop->get_arg0 (), x_init);
4614 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
4615
4616 /* Verify that ((x + 1) + 1) == (x + 2). */
4617 const svalue *x_init_plus_one
4618 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4619 x_init, cst_sval[1]);
4620 const svalue *x_init_plus_two
4621 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4622 x_init, cst_sval[2]);
4623 const svalue *x_init_plus_one_plus_one
4624 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4625 x_init_plus_one, cst_sval[1]);
4626 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
4f34f8cc
DM
4627
4628 /* Verify various binops on booleans. */
4629 {
4630 const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1);
4631 const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0);
4632 const svalue *sval_unknown
4633 = mgr.get_or_create_unknown_svalue (boolean_type_node);
4634 const placeholder_svalue sval_placeholder (boolean_type_node, "v");
4635 for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR})
4636 {
4637 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
4638 sval_true, sval_unknown),
4639 sval_true);
4640 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
4641 sval_false, sval_unknown),
4642 sval_unknown);
4643 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
4644 sval_false, &sval_placeholder),
4645 &sval_placeholder);
4646 }
4647 for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR})
4648 {
4649 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
4650 sval_false, sval_unknown),
4651 sval_false);
4652 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
4653 sval_true, sval_unknown),
4654 sval_unknown);
4655 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
4656 sval_true, &sval_placeholder),
4657 &sval_placeholder);
4658 }
4659 }
808f4dfe
DM
4660}
4661
4662/* Verify that sub_svalues are folded as expected. */
757bf1df 4663
808f4dfe
DM
4664static void
4665test_sub_svalue_folding ()
4666{
4667 coord_test ct;
4668 tree c = build_global_decl ("c", ct.m_coord_type);
4669 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4670 c, ct.m_x_field, NULL_TREE);
757bf1df 4671
808f4dfe
DM
4672 region_model_manager mgr;
4673 region_model model (&mgr);
4674 test_region_model_context ctxt;
4675 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
757bf1df 4676
808f4dfe
DM
4677 /* Verify that sub_svalue of "unknown" simply
4678 yields an unknown. */
757bf1df 4679
808f4dfe
DM
4680 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
4681 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
4682 unknown, c_x_reg);
4683 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
4684 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
757bf1df
DM
4685}
4686
808f4dfe 4687/* Test that region::descendent_of_p works as expected. */
757bf1df
DM
4688
4689static void
808f4dfe 4690test_descendent_of_p ()
757bf1df 4691{
808f4dfe
DM
4692 region_model_manager mgr;
4693 const region *stack = mgr.get_stack_region ();
4694 const region *heap = mgr.get_heap_region ();
4695 const region *code = mgr.get_code_region ();
4696 const region *globals = mgr.get_globals_region ();
757bf1df 4697
808f4dfe
DM
4698 /* descendent_of_p should return true when used on the region itself. */
4699 ASSERT_TRUE (stack->descendent_of_p (stack));
4700 ASSERT_FALSE (stack->descendent_of_p (heap));
4701 ASSERT_FALSE (stack->descendent_of_p (code));
4702 ASSERT_FALSE (stack->descendent_of_p (globals));
757bf1df 4703
808f4dfe
DM
4704 tree x = build_global_decl ("x", integer_type_node);
4705 const region *x_reg = mgr.get_region_for_global (x);
4706 ASSERT_TRUE (x_reg->descendent_of_p (globals));
757bf1df 4707
808f4dfe
DM
4708 /* A cast_region should be a descendent of the original region. */
4709 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
4710 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
757bf1df
DM
4711}
4712
4713/* Verify that simple assignments work as expected. */
4714
4715static void
4716test_assignment ()
4717{
4718 tree int_0 = build_int_cst (integer_type_node, 0);
4719 tree x = build_global_decl ("x", integer_type_node);
4720 tree y = build_global_decl ("y", integer_type_node);
4721
4722 /* "x == 0", then use of y, then "y = 0;". */
808f4dfe
DM
4723 region_model_manager mgr;
4724 region_model model (&mgr);
757bf1df
DM
4725 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
4726 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
4727 model.set_value (model.get_lvalue (y, NULL),
4728 model.get_rvalue (int_0, NULL),
4729 NULL);
4730 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
4731 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
757bf1df
DM
4732}
4733
a96f1c38
DM
4734/* Verify that compound assignments work as expected. */
4735
4736static void
4737test_compound_assignment ()
4738{
4739 coord_test ct;
4740
4741 tree c = build_global_decl ("c", ct.m_coord_type);
4742 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4743 c, ct.m_x_field, NULL_TREE);
4744 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4745 c, ct.m_y_field, NULL_TREE);
4746 tree d = build_global_decl ("d", ct.m_coord_type);
4747 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4748 d, ct.m_x_field, NULL_TREE);
4749 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4750 d, ct.m_y_field, NULL_TREE);
4751
4752 tree int_17 = build_int_cst (integer_type_node, 17);
4753 tree int_m3 = build_int_cst (integer_type_node, -3);
4754
808f4dfe
DM
4755 region_model_manager mgr;
4756 region_model model (&mgr);
a96f1c38
DM
4757 model.set_value (c_x, int_17, NULL);
4758 model.set_value (c_y, int_m3, NULL);
4759
a96f1c38
DM
4760 /* Copy c to d. */
4761 model.copy_region (model.get_lvalue (d, NULL), model.get_lvalue (c, NULL),
4762 NULL);
4763 /* Check that the fields have the same svalues. */
4764 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
4765 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
4766}
4767
757bf1df
DM
4768/* Verify the details of pushing and popping stack frames. */
4769
4770static void
4771test_stack_frames ()
4772{
4773 tree int_42 = build_int_cst (integer_type_node, 42);
4774 tree int_10 = build_int_cst (integer_type_node, 10);
4775 tree int_5 = build_int_cst (integer_type_node, 5);
4776 tree int_0 = build_int_cst (integer_type_node, 0);
4777
4778 auto_vec <tree> param_types;
4779 tree parent_fndecl = make_fndecl (integer_type_node,
4780 "parent_fn",
4781 param_types);
4782 allocate_struct_function (parent_fndecl, true);
4783
4784 tree child_fndecl = make_fndecl (integer_type_node,
4785 "child_fn",
4786 param_types);
4787 allocate_struct_function (child_fndecl, true);
4788
4789 /* "a" and "b" in the parent frame. */
4790 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4791 get_identifier ("a"),
4792 integer_type_node);
4793 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4794 get_identifier ("b"),
4795 integer_type_node);
4796 /* "x" and "y" in a child frame. */
4797 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4798 get_identifier ("x"),
4799 integer_type_node);
4800 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4801 get_identifier ("y"),
4802 integer_type_node);
4803
4804 /* "p" global. */
4805 tree p = build_global_decl ("p", ptr_type_node);
4806
4807 /* "q" global. */
4808 tree q = build_global_decl ("q", ptr_type_node);
4809
808f4dfe 4810 region_model_manager mgr;
757bf1df 4811 test_region_model_context ctxt;
808f4dfe 4812 region_model model (&mgr);
757bf1df
DM
4813
4814 /* Push stack frame for "parent_fn". */
808f4dfe
DM
4815 const region *parent_frame_reg
4816 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl),
4817 NULL, &ctxt);
4818 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
4819 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
4820 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
4821 model.set_value (a_in_parent_reg,
4822 model.get_rvalue (int_42, &ctxt),
4823 &ctxt);
4824 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
4825
757bf1df
DM
4826 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
4827 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
4828 tristate (tristate::TS_TRUE));
4829
4830 /* Push stack frame for "child_fn". */
808f4dfe 4831 const region *child_frame_reg
757bf1df 4832 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
808f4dfe
DM
4833 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
4834 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
4835 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
4836 model.set_value (x_in_child_reg,
4837 model.get_rvalue (int_0, &ctxt),
4838 &ctxt);
4839 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
4840
757bf1df
DM
4841 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
4842 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
4843 tristate (tristate::TS_TRUE));
4844
4845 /* Point a global pointer at a local in the child frame: p = &x. */
808f4dfe
DM
4846 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
4847 model.set_value (p_in_globals_reg,
4848 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
757bf1df 4849 &ctxt);
808f4dfe 4850 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
757bf1df
DM
4851
4852 /* Point another global pointer at p: q = &p. */
808f4dfe
DM
4853 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
4854 model.set_value (q_in_globals_reg,
4855 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
757bf1df
DM
4856 &ctxt);
4857
808f4dfe
DM
4858 /* Test region::descendent_of_p. */
4859 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
4860 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
4861 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
757bf1df
DM
4862
4863 /* Pop the "child_fn" frame from the stack. */
808f4dfe
DM
4864 model.pop_frame (NULL, NULL, &ctxt);
4865 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
4866 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
757bf1df
DM
4867
4868 /* Verify that p (which was pointing at the local "x" in the popped
4869 frame) has been poisoned. */
33255ad3 4870 const svalue *new_p_sval = model.get_rvalue (p, NULL);
757bf1df
DM
4871 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
4872 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
4873 POISON_KIND_POPPED_STACK);
4874
4875 /* Verify that q still points to p, in spite of the region
4876 renumbering. */
808f4dfe 4877 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
757bf1df 4878 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
5932dd35 4879 ASSERT_EQ (new_q_sval->maybe_get_region (),
757bf1df
DM
4880 model.get_lvalue (p, &ctxt));
4881
4882 /* Verify that top of stack has been updated. */
808f4dfe 4883 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
757bf1df
DM
4884
4885 /* Verify locals in parent frame. */
4886 /* Verify "a" still has its value. */
808f4dfe 4887 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
757bf1df
DM
4888 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
4889 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
4890 int_42);
4891 /* Verify "b" still has its constraint. */
4892 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
4893 tristate (tristate::TS_TRUE));
4894}
4895
4896/* Verify that get_representative_path_var works as expected, that
808f4dfe 4897 we can map from regions to parms and back within a recursive call
757bf1df
DM
4898 stack. */
4899
4900static void
4901test_get_representative_path_var ()
4902{
4903 auto_vec <tree> param_types;
4904 tree fndecl = make_fndecl (integer_type_node,
4905 "factorial",
4906 param_types);
4907 allocate_struct_function (fndecl, true);
4908
4909 /* Parm "n". */
4910 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4911 get_identifier ("n"),
4912 integer_type_node);
4913
808f4dfe
DM
4914 region_model_manager mgr;
4915 test_region_model_context ctxt;
4916 region_model model (&mgr);
757bf1df
DM
4917
4918 /* Push 5 stack frames for "factorial", each with a param */
808f4dfe
DM
4919 auto_vec<const region *> parm_regs;
4920 auto_vec<const svalue *> parm_svals;
757bf1df
DM
4921 for (int depth = 0; depth < 5; depth++)
4922 {
808f4dfe
DM
4923 const region *frame_n_reg
4924 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
4925 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
4926 parm_regs.safe_push (parm_n_reg);
757bf1df 4927
808f4dfe
DM
4928 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
4929 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
4930 parm_svals.safe_push (sval_n);
757bf1df
DM
4931 }
4932
4933 /* Verify that we can recognize that the regions are the parms,
4934 at every depth. */
4935 for (int depth = 0; depth < 5; depth++)
4936 {
808f4dfe
DM
4937 {
4938 svalue_set visited;
4939 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
4940 &visited),
4941 path_var (n, depth + 1));
4942 }
757bf1df
DM
4943 /* ...and that we can lookup lvalues for locals for all frames,
4944 not just the top. */
4945 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
808f4dfe 4946 parm_regs[depth]);
757bf1df 4947 /* ...and that we can locate the svalues. */
808f4dfe
DM
4948 {
4949 svalue_set visited;
4950 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
4951 &visited),
4952 path_var (n, depth + 1));
4953 }
757bf1df
DM
4954 }
4955}
4956
808f4dfe 4957/* Ensure that region_model::operator== works as expected. */
757bf1df
DM
4958
4959static void
808f4dfe 4960test_equality_1 ()
757bf1df 4961{
808f4dfe
DM
4962 tree int_42 = build_int_cst (integer_type_node, 42);
4963 tree int_17 = build_int_cst (integer_type_node, 17);
757bf1df 4964
808f4dfe
DM
4965/* Verify that "empty" region_model instances are equal to each other. */
4966 region_model_manager mgr;
4967 region_model model0 (&mgr);
4968 region_model model1 (&mgr);
757bf1df 4969 ASSERT_EQ (model0, model1);
808f4dfe
DM
4970
4971 /* Verify that setting state in model1 makes the models non-equal. */
4972 tree x = build_global_decl ("x", integer_type_node);
4973 model0.set_value (x, int_42, NULL);
4974 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
4975 ASSERT_NE (model0, model1);
4976
4977 /* Verify the copy-ctor. */
4978 region_model model2 (model0);
4979 ASSERT_EQ (model0, model2);
4980 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
4981 ASSERT_NE (model1, model2);
4982
4983 /* Verify that models obtained from copy-ctor are independently editable
4984 w/o affecting the original model. */
4985 model2.set_value (x, int_17, NULL);
4986 ASSERT_NE (model0, model2);
4987 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
4988 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
757bf1df
DM
4989}
4990
4991/* Verify that region models for
4992 x = 42; y = 113;
4993 and
4994 y = 113; x = 42;
808f4dfe 4995 are equal. */
757bf1df
DM
4996
4997static void
4998test_canonicalization_2 ()
4999{
5000 tree int_42 = build_int_cst (integer_type_node, 42);
5001 tree int_113 = build_int_cst (integer_type_node, 113);
5002 tree x = build_global_decl ("x", integer_type_node);
5003 tree y = build_global_decl ("y", integer_type_node);
5004
808f4dfe
DM
5005 region_model_manager mgr;
5006 region_model model0 (&mgr);
757bf1df
DM
5007 model0.set_value (model0.get_lvalue (x, NULL),
5008 model0.get_rvalue (int_42, NULL),
5009 NULL);
5010 model0.set_value (model0.get_lvalue (y, NULL),
5011 model0.get_rvalue (int_113, NULL),
5012 NULL);
5013
808f4dfe 5014 region_model model1 (&mgr);
757bf1df
DM
5015 model1.set_value (model1.get_lvalue (y, NULL),
5016 model1.get_rvalue (int_113, NULL),
5017 NULL);
5018 model1.set_value (model1.get_lvalue (x, NULL),
5019 model1.get_rvalue (int_42, NULL),
5020 NULL);
5021
757bf1df
DM
5022 ASSERT_EQ (model0, model1);
5023}
5024
5025/* Verify that constraints for
5026 x > 3 && y > 42
5027 and
5028 y > 42 && x > 3
5029 are equal after canonicalization. */
5030
5031static void
5032test_canonicalization_3 ()
5033{
5034 tree int_3 = build_int_cst (integer_type_node, 3);
5035 tree int_42 = build_int_cst (integer_type_node, 42);
5036 tree x = build_global_decl ("x", integer_type_node);
5037 tree y = build_global_decl ("y", integer_type_node);
5038
808f4dfe
DM
5039 region_model_manager mgr;
5040 region_model model0 (&mgr);
757bf1df
DM
5041 model0.add_constraint (x, GT_EXPR, int_3, NULL);
5042 model0.add_constraint (y, GT_EXPR, int_42, NULL);
5043
808f4dfe 5044 region_model model1 (&mgr);
757bf1df
DM
5045 model1.add_constraint (y, GT_EXPR, int_42, NULL);
5046 model1.add_constraint (x, GT_EXPR, int_3, NULL);
5047
808f4dfe
DM
5048 model0.canonicalize ();
5049 model1.canonicalize ();
757bf1df
DM
5050 ASSERT_EQ (model0, model1);
5051}
5052
8c08c983
DM
5053/* Verify that we can canonicalize a model containing NaN and other real
5054 constants. */
5055
5056static void
5057test_canonicalization_4 ()
5058{
5059 auto_vec<tree> csts;
5060 append_interesting_constants (&csts);
5061
808f4dfe
DM
5062 region_model_manager mgr;
5063 region_model model (&mgr);
8c08c983 5064
3f207ab3 5065 for (tree cst : csts)
8c08c983
DM
5066 model.get_rvalue (cst, NULL);
5067
808f4dfe 5068 model.canonicalize ();
8c08c983
DM
5069}
5070
757bf1df
DM
5071/* Assert that if we have two region_model instances
5072 with values VAL_A and VAL_B for EXPR that they are
5073 mergable. Write the merged model to *OUT_MERGED_MODEL,
5074 and the merged svalue ptr to *OUT_MERGED_SVALUE.
5075 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
5076 for that region_model. */
5077
5078static void
5079assert_region_models_merge (tree expr, tree val_a, tree val_b,
808f4dfe
DM
5080 region_model *out_merged_model,
5081 const svalue **out_merged_svalue)
757bf1df 5082{
808f4dfe 5083 program_point point (program_point::origin ());
757bf1df 5084 test_region_model_context ctxt;
808f4dfe
DM
5085 region_model_manager *mgr = out_merged_model->get_manager ();
5086 region_model model0 (mgr);
5087 region_model model1 (mgr);
757bf1df
DM
5088 if (val_a)
5089 model0.set_value (model0.get_lvalue (expr, &ctxt),
5090 model0.get_rvalue (val_a, &ctxt),
5091 &ctxt);
5092 if (val_b)
5093 model1.set_value (model1.get_lvalue (expr, &ctxt),
5094 model1.get_rvalue (val_b, &ctxt),
5095 &ctxt);
5096
5097 /* They should be mergeable. */
808f4dfe
DM
5098 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
5099 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
757bf1df
DM
5100}
5101
5102/* Verify that we can merge region_model instances. */
5103
5104static void
5105test_state_merging ()
5106{
5107 tree int_42 = build_int_cst (integer_type_node, 42);
5108 tree int_113 = build_int_cst (integer_type_node, 113);
5109 tree x = build_global_decl ("x", integer_type_node);
5110 tree y = build_global_decl ("y", integer_type_node);
5111 tree z = build_global_decl ("z", integer_type_node);
5112 tree p = build_global_decl ("p", ptr_type_node);
5113
5114 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
5115 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
5116
5117 auto_vec <tree> param_types;
5118 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
5119 allocate_struct_function (test_fndecl, true);
5120
5121 /* Param "a". */
5122 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
5123 get_identifier ("a"),
5124 integer_type_node);
5125 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
5126
455f58ec
DM
5127 /* Param "q", a pointer. */
5128 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
5129 get_identifier ("q"),
5130 ptr_type_node);
5131
808f4dfe
DM
5132 program_point point (program_point::origin ());
5133 region_model_manager mgr;
5134
757bf1df 5135 {
808f4dfe
DM
5136 region_model model0 (&mgr);
5137 region_model model1 (&mgr);
5138 region_model merged (&mgr);
757bf1df 5139 /* Verify empty models can be merged. */
808f4dfe 5140 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
5141 ASSERT_EQ (model0, merged);
5142 }
5143
5144 /* Verify that we can merge two contradictory constraints on the
5145 value for a global. */
5146 /* TODO: verify that the merged model doesn't have a value for
5147 the global */
5148 {
808f4dfe
DM
5149 region_model model0 (&mgr);
5150 region_model model1 (&mgr);
5151 region_model merged (&mgr);
757bf1df
DM
5152 test_region_model_context ctxt;
5153 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
5154 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe 5155 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
5156 ASSERT_NE (model0, merged);
5157 ASSERT_NE (model1, merged);
5158 }
5159
5160 /* Verify handling of a PARM_DECL. */
5161 {
5162 test_region_model_context ctxt;
808f4dfe
DM
5163 region_model model0 (&mgr);
5164 region_model model1 (&mgr);
757bf1df
DM
5165 ASSERT_EQ (model0.get_stack_depth (), 0);
5166 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
5167 ASSERT_EQ (model0.get_stack_depth (), 1);
757bf1df
DM
5168 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
5169
808f4dfe
DM
5170 placeholder_svalue test_sval (integer_type_node, "test sval");
5171 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
5172 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
757bf1df
DM
5173 ASSERT_EQ (model0, model1);
5174
757bf1df 5175 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
5176 region_model merged (&mgr);
5177 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 5178 ASSERT_EQ (model0, merged);
808f4dfe
DM
5179 /* In particular, "a" should have the placeholder value. */
5180 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
757bf1df
DM
5181 }
5182
5183 /* Verify handling of a global. */
5184 {
5185 test_region_model_context ctxt;
808f4dfe
DM
5186 region_model model0 (&mgr);
5187 region_model model1 (&mgr);
757bf1df 5188
808f4dfe
DM
5189 placeholder_svalue test_sval (integer_type_node, "test sval");
5190 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
5191 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
5192 ASSERT_EQ (model0, model1);
757bf1df
DM
5193
5194 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
5195 region_model merged (&mgr);
5196 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 5197 ASSERT_EQ (model0, merged);
808f4dfe
DM
5198 /* In particular, "x" should have the placeholder value. */
5199 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
757bf1df
DM
5200 }
5201
5202 /* Use global-handling to verify various combinations of values. */
5203
5204 /* Two equal constant values. */
5205 {
808f4dfe
DM
5206 region_model merged (&mgr);
5207 const svalue *merged_x_sval;
757bf1df
DM
5208 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
5209
5210 /* In particular, there should be a constant value for "x". */
5211 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
5212 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
5213 int_42);
5214 }
5215
5216 /* Two non-equal constant values. */
5217 {
808f4dfe
DM
5218 region_model merged (&mgr);
5219 const svalue *merged_x_sval;
757bf1df
DM
5220 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
5221
808f4dfe
DM
5222 /* In particular, there should be a "widening" value for "x". */
5223 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
757bf1df
DM
5224 }
5225
808f4dfe 5226 /* Initial and constant. */
757bf1df 5227 {
808f4dfe
DM
5228 region_model merged (&mgr);
5229 const svalue *merged_x_sval;
757bf1df
DM
5230 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
5231
5232 /* In particular, there should be an unknown value for "x". */
5233 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
5234 }
5235
808f4dfe 5236 /* Constant and initial. */
757bf1df 5237 {
808f4dfe
DM
5238 region_model merged (&mgr);
5239 const svalue *merged_x_sval;
757bf1df
DM
5240 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
5241
5242 /* In particular, there should be an unknown value for "x". */
5243 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
5244 }
5245
5246 /* Unknown and constant. */
5247 // TODO
5248
5249 /* Pointers: NULL and NULL. */
5250 // TODO
5251
5252 /* Pointers: NULL and non-NULL. */
5253 // TODO
5254
5255 /* Pointers: non-NULL and non-NULL: ptr to a local. */
5256 {
808f4dfe 5257 region_model model0 (&mgr);
757bf1df 5258 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
757bf1df
DM
5259 model0.set_value (model0.get_lvalue (p, NULL),
5260 model0.get_rvalue (addr_of_a, NULL), NULL);
5261
5262 region_model model1 (model0);
5263 ASSERT_EQ (model0, model1);
5264
5265 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
5266 region_model merged (&mgr);
5267 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
5268 ASSERT_EQ (model0, merged);
5269 }
5270
5271 /* Pointers: non-NULL and non-NULL: ptr to a global. */
5272 {
808f4dfe 5273 region_model merged (&mgr);
757bf1df 5274 /* p == &y in both input models. */
808f4dfe 5275 const svalue *merged_p_sval;
757bf1df
DM
5276 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
5277 &merged_p_sval);
5278
5279 /* We should get p == &y in the merged model. */
5280 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
808f4dfe
DM
5281 const region_svalue *merged_p_ptr
5282 = merged_p_sval->dyn_cast_region_svalue ();
5283 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
5284 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
757bf1df
DM
5285 }
5286
5287 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
5288 {
808f4dfe
DM
5289 region_model merged (&mgr);
5290 /* x == &y vs x == &z in the input models; these are actually casts
5291 of the ptrs to "int". */
5292 const svalue *merged_x_sval;
5293 // TODO:
757bf1df
DM
5294 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
5295 &merged_x_sval);
5296
5297 /* We should get x == unknown in the merged model. */
5298 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
5299 }
5300
5301 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
5302 {
5303 test_region_model_context ctxt;
808f4dfe 5304 region_model model0 (&mgr);
9a2c9579 5305 tree size = build_int_cst (size_type_node, 1024);
808f4dfe 5306 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
b9365b93
DM
5307 const region *new_reg
5308 = model0.create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe 5309 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
757bf1df 5310 model0.set_value (model0.get_lvalue (p, &ctxt),
808f4dfe 5311 ptr_sval, &ctxt);
757bf1df
DM
5312
5313 region_model model1 (model0);
5314
5315 ASSERT_EQ (model0, model1);
5316
808f4dfe
DM
5317 region_model merged (&mgr);
5318 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 5319
808f4dfe 5320 /* The merged model ought to be identical. */
757bf1df
DM
5321 ASSERT_EQ (model0, merged);
5322 }
5323
808f4dfe
DM
5324 /* Two regions sharing the same placeholder svalue should continue sharing
5325 it after self-merger. */
757bf1df
DM
5326 {
5327 test_region_model_context ctxt;
808f4dfe
DM
5328 region_model model0 (&mgr);
5329 placeholder_svalue placeholder_sval (integer_type_node, "test");
5330 model0.set_value (model0.get_lvalue (x, &ctxt),
5331 &placeholder_sval, &ctxt);
5332 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
757bf1df
DM
5333 region_model model1 (model0);
5334
5335 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
5336 region_model merged (&mgr);
5337 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
5338 ASSERT_EQ (model0, merged);
5339
5340 /* In particular, we should have x == y. */
5341 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
5342 tristate (tristate::TS_TRUE));
5343 }
5344
757bf1df 5345 {
808f4dfe
DM
5346 region_model model0 (&mgr);
5347 region_model model1 (&mgr);
757bf1df
DM
5348 test_region_model_context ctxt;
5349 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
5350 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
808f4dfe
DM
5351 region_model merged (&mgr);
5352 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
5353 }
5354
5355 {
808f4dfe
DM
5356 region_model model0 (&mgr);
5357 region_model model1 (&mgr);
757bf1df
DM
5358 test_region_model_context ctxt;
5359 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
5360 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
5361 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe
DM
5362 region_model merged (&mgr);
5363 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 5364 }
757bf1df
DM
5365
5366 // TODO: what can't we merge? need at least one such test
5367
5368 /* TODO: various things
5369 - heap regions
5370 - value merging:
5371 - every combination, but in particular
808f4dfe 5372 - pairs of regions
757bf1df
DM
5373 */
5374
5375 /* Views. */
5376 {
5377 test_region_model_context ctxt;
808f4dfe 5378 region_model model0 (&mgr);
757bf1df 5379
808f4dfe
DM
5380 const region *x_reg = model0.get_lvalue (x, &ctxt);
5381 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
757bf1df
DM
5382 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
5383
5384 region_model model1 (model0);
5385 ASSERT_EQ (model1, model0);
5386
5387 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
5388 region_model merged (&mgr);
5389 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 5390 }
455f58ec
DM
5391
5392 /* Verify that we can merge a model in which a local in an older stack
5393 frame points to a local in a more recent stack frame. */
5394 {
808f4dfe 5395 region_model model0 (&mgr);
455f58ec 5396 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
808f4dfe 5397 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
455f58ec
DM
5398
5399 /* Push a second frame. */
808f4dfe 5400 const region *reg_2nd_frame
455f58ec
DM
5401 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
5402
5403 /* Have a pointer in the older frame point to a local in the
5404 more recent frame. */
808f4dfe
DM
5405 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
5406 model0.set_value (q_in_first_frame, sval_ptr, NULL);
455f58ec
DM
5407
5408 /* Verify that it's pointing at the newer frame. */
5932dd35 5409 const region *reg_pointee = sval_ptr->maybe_get_region ();
808f4dfe 5410 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
455f58ec 5411
808f4dfe 5412 model0.canonicalize ();
455f58ec
DM
5413
5414 region_model model1 (model0);
5415 ASSERT_EQ (model0, model1);
5416
5417 /* They should be mergeable, and the result should be the same
5418 (after canonicalization, at least). */
808f4dfe
DM
5419 region_model merged (&mgr);
5420 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5421 merged.canonicalize ();
455f58ec
DM
5422 ASSERT_EQ (model0, merged);
5423 }
5424
5425 /* Verify that we can merge a model in which a local points to a global. */
5426 {
808f4dfe 5427 region_model model0 (&mgr);
455f58ec
DM
5428 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
5429 model0.set_value (model0.get_lvalue (q, NULL),
5430 model0.get_rvalue (addr_of_y, NULL), NULL);
5431
455f58ec
DM
5432 region_model model1 (model0);
5433 ASSERT_EQ (model0, model1);
5434
5435 /* They should be mergeable, and the result should be the same
5436 (after canonicalization, at least). */
808f4dfe
DM
5437 region_model merged (&mgr);
5438 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
455f58ec
DM
5439 ASSERT_EQ (model0, merged);
5440 }
757bf1df
DM
5441}
5442
5443/* Verify that constraints are correctly merged when merging region_model
5444 instances. */
5445
5446static void
5447test_constraint_merging ()
5448{
5449 tree int_0 = build_int_cst (integer_type_node, 0);
5450 tree int_5 = build_int_cst (integer_type_node, 5);
5451 tree x = build_global_decl ("x", integer_type_node);
5452 tree y = build_global_decl ("y", integer_type_node);
5453 tree z = build_global_decl ("z", integer_type_node);
5454 tree n = build_global_decl ("n", integer_type_node);
5455
808f4dfe 5456 region_model_manager mgr;
757bf1df
DM
5457 test_region_model_context ctxt;
5458
5459 /* model0: 0 <= (x == y) < n. */
808f4dfe 5460 region_model model0 (&mgr);
757bf1df
DM
5461 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
5462 model0.add_constraint (x, GE_EXPR, int_0, NULL);
5463 model0.add_constraint (x, LT_EXPR, n, NULL);
5464
5465 /* model1: z != 5 && (0 <= x < n). */
808f4dfe 5466 region_model model1 (&mgr);
757bf1df
DM
5467 model1.add_constraint (z, NE_EXPR, int_5, NULL);
5468 model1.add_constraint (x, GE_EXPR, int_0, NULL);
5469 model1.add_constraint (x, LT_EXPR, n, NULL);
5470
5471 /* They should be mergeable; the merged constraints should
5472 be: (0 <= x < n). */
808f4dfe
DM
5473 program_point point (program_point::origin ());
5474 region_model merged (&mgr);
5475 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
5476
5477 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
5478 tristate (tristate::TS_TRUE));
5479 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
5480 tristate (tristate::TS_TRUE));
5481
5482 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
5483 tristate (tristate::TS_UNKNOWN));
5484 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
5485 tristate (tristate::TS_UNKNOWN));
5486}
5487
808f4dfe
DM
5488/* Verify that widening_svalue::eval_condition_without_cm works as
5489 expected. */
5490
5491static void
5492test_widening_constraints ()
5493{
5494 program_point point (program_point::origin ());
5495 tree int_0 = build_int_cst (integer_type_node, 0);
5496 tree int_m1 = build_int_cst (integer_type_node, -1);
5497 tree int_1 = build_int_cst (integer_type_node, 1);
5498 tree int_256 = build_int_cst (integer_type_node, 256);
5499 region_model_manager mgr;
5500 test_region_model_context ctxt;
5501 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
5502 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
5503 const svalue *w_zero_then_one_sval
5504 = mgr.get_or_create_widening_svalue (integer_type_node, point,
5505 int_0_sval, int_1_sval);
5506 const widening_svalue *w_zero_then_one
5507 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
5508 ASSERT_EQ (w_zero_then_one->get_direction (),
5509 widening_svalue::DIR_ASCENDING);
5510 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
5511 tristate::TS_FALSE);
5512 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
5513 tristate::TS_FALSE);
5514 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
5515 tristate::TS_UNKNOWN);
5516 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
5517 tristate::TS_UNKNOWN);
5518
5519 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
5520 tristate::TS_FALSE);
5521 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
5522 tristate::TS_UNKNOWN);
5523 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
5524 tristate::TS_UNKNOWN);
5525 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
5526 tristate::TS_UNKNOWN);
5527
5528 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
5529 tristate::TS_TRUE);
5530 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
5531 tristate::TS_UNKNOWN);
5532 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
5533 tristate::TS_UNKNOWN);
5534 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
5535 tristate::TS_UNKNOWN);
5536
5537 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
5538 tristate::TS_TRUE);
5539 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
5540 tristate::TS_TRUE);
5541 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
5542 tristate::TS_UNKNOWN);
5543 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
5544 tristate::TS_UNKNOWN);
5545
5546 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
5547 tristate::TS_FALSE);
5548 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
5549 tristate::TS_UNKNOWN);
5550 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
5551 tristate::TS_UNKNOWN);
5552 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
5553 tristate::TS_UNKNOWN);
5554
5555 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
5556 tristate::TS_TRUE);
5557 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
5558 tristate::TS_UNKNOWN);
5559 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
5560 tristate::TS_UNKNOWN);
5561 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
5562 tristate::TS_UNKNOWN);
5563}
5564
5565/* Verify merging constraints for states simulating successive iterations
5566 of a loop.
5567 Simulate:
5568 for (i = 0; i < 256; i++)
5569 [...body...]
5570 i.e. this gimple:.
5571 i_15 = 0;
5572 goto <bb 4>;
5573
5574 <bb 4> :
5575 i_11 = PHI <i_15(2), i_23(3)>
5576 if (i_11 <= 255)
5577 goto <bb 3>;
5578 else
5579 goto [AFTER LOOP]
5580
5581 <bb 3> :
5582 [LOOP BODY]
5583 i_23 = i_11 + 1;
5584
5585 and thus these ops (and resultant states):
5586 i_11 = PHI()
5587 {i_11: 0}
5588 add_constraint (i_11 <= 255) [for the true edge]
5589 {i_11: 0} [constraint was a no-op]
5590 i_23 = i_11 + 1;
5591 {i_22: 1}
5592 i_11 = PHI()
5593 {i_11: WIDENED (at phi, 0, 1)}
5594 add_constraint (i_11 <= 255) [for the true edge]
5595 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
5596 i_23 = i_11 + 1;
5597 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
5598 i_11 = PHI(); merge with state at phi above
5599 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
5600 [changing meaning of "WIDENED" here]
5601 if (i_11 <= 255)
5602 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
5603 F: {i_11: 256}
5604 */
5605
5606static void
5607test_iteration_1 ()
5608{
5609 program_point point (program_point::origin ());
5610
5611 tree int_0 = build_int_cst (integer_type_node, 0);
5612 tree int_1 = build_int_cst (integer_type_node, 1);
5613 tree int_256 = build_int_cst (integer_type_node, 256);
5614 tree int_257 = build_int_cst (integer_type_node, 257);
5615 tree i = build_global_decl ("i", integer_type_node);
5616
5617 region_model_manager mgr;
5618 test_region_model_context ctxt;
5619
5620 /* model0: i: 0. */
5621 region_model model0 (&mgr);
5622 model0.set_value (i, int_0, &ctxt);
5623
5624 /* model1: i: 1. */
5625 region_model model1 (&mgr);
5626 model1.set_value (i, int_1, &ctxt);
5627
5628 /* Should merge "i" to a widened value. */
5629 region_model model2 (&mgr);
5630 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
5631 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
5632 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
5633 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
5634 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
5635
5636 /* Add constraint: i < 256 */
5637 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
5638 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
5639 tristate (tristate::TS_TRUE));
5640 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
5641 tristate (tristate::TS_TRUE));
5642
5643 /* Try merging with the initial state. */
5644 region_model model3 (&mgr);
5645 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
5646 /* Merging the merged value with the initial value should be idempotent,
5647 so that the analysis converges. */
5648 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
5649 /* Merger of 0 and a widening value with constraint < CST
5650 should retain the constraint, even though it was implicit
5651 for the 0 case. */
5652 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
5653 tristate (tristate::TS_TRUE));
5654 /* ...and we should have equality: the analysis should have converged. */
5655 ASSERT_EQ (model3, model2);
5656
5657 /* "i_23 = i_11 + 1;" */
5658 region_model model4 (model3);
5659 ASSERT_EQ (model4, model2);
5660 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
5661 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
5662 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
5663
5664 /* Try merging with the "i: 1" state. */
5665 region_model model5 (&mgr);
5666 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
5667 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
5668 ASSERT_EQ (model5, model4);
5669
5670 /* "i_11 = PHI();" merge with state at phi above.
5671 For i, we should have a merger of WIDENING with WIDENING + 1,
5672 and this should be WIDENING again. */
5673 region_model model6 (&mgr);
5674 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
5675 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
5676 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
5677
5678 ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257);
5679}
5680
6969ac30
DM
5681/* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
5682 all cast pointers to that region are also known to be non-NULL. */
5683
5684static void
5685test_malloc_constraints ()
5686{
808f4dfe
DM
5687 region_model_manager mgr;
5688 region_model model (&mgr);
6969ac30
DM
5689 tree p = build_global_decl ("p", ptr_type_node);
5690 tree char_star = build_pointer_type (char_type_node);
5691 tree q = build_global_decl ("q", char_star);
5692 tree null_ptr = build_int_cst (ptr_type_node, 0);
5693
808f4dfe 5694 const svalue *size_in_bytes
9a2c9579 5695 = mgr.get_or_create_unknown_svalue (size_type_node);
b9365b93 5696 const region *reg = model.create_region_for_heap_alloc (size_in_bytes, NULL);
808f4dfe
DM
5697 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
5698 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
6969ac30
DM
5699 model.set_value (q, p, NULL);
5700
6969ac30
DM
5701 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
5702 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
5703 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
5704 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
5705
5706 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
5707
6969ac30
DM
5708 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
5709 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
5710 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
5711 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
5712}
5713
808f4dfe
DM
5714/* Smoketest of getting and setting the value of a variable. */
5715
5716static void
5717test_var ()
5718{
5719 /* "int i;" */
5720 tree i = build_global_decl ("i", integer_type_node);
5721
5722 tree int_17 = build_int_cst (integer_type_node, 17);
5723 tree int_m3 = build_int_cst (integer_type_node, -3);
5724
5725 region_model_manager mgr;
5726 region_model model (&mgr);
5727
5728 const region *i_reg = model.get_lvalue (i, NULL);
5729 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
5730
5731 /* Reading "i" should give a symbolic "initial value". */
5732 const svalue *sval_init = model.get_rvalue (i, NULL);
5733 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
5734 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
5735 /* ..and doing it again should give the same "initial value". */
5736 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
5737
5738 /* "i = 17;". */
5739 model.set_value (i, int_17, NULL);
5740 ASSERT_EQ (model.get_rvalue (i, NULL),
5741 model.get_rvalue (int_17, NULL));
5742
5743 /* "i = -3;". */
5744 model.set_value (i, int_m3, NULL);
5745 ASSERT_EQ (model.get_rvalue (i, NULL),
5746 model.get_rvalue (int_m3, NULL));
5747
5748 /* Verify get_offset for "i". */
5749 {
5750 region_offset offset = i_reg->get_offset ();
5751 ASSERT_EQ (offset.get_base_region (), i_reg);
5752 ASSERT_EQ (offset.get_bit_offset (), 0);
5753 }
5754}
5755
5756static void
5757test_array_2 ()
5758{
5759 /* "int arr[10];" */
5760 tree tlen = size_int (10);
5761 tree arr_type
5762 = build_array_type (integer_type_node, build_index_type (tlen));
5763 tree arr = build_global_decl ("arr", arr_type);
5764
5765 /* "int i;" */
5766 tree i = build_global_decl ("i", integer_type_node);
5767
5768 tree int_0 = build_int_cst (integer_type_node, 0);
5769 tree int_1 = build_int_cst (integer_type_node, 1);
5770
5771 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
5772 arr, int_0, NULL_TREE, NULL_TREE);
5773 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
5774 arr, int_1, NULL_TREE, NULL_TREE);
5775 tree arr_i = build4 (ARRAY_REF, integer_type_node,
5776 arr, i, NULL_TREE, NULL_TREE);
5777
5778 tree int_17 = build_int_cst (integer_type_node, 17);
5779 tree int_42 = build_int_cst (integer_type_node, 42);
5780 tree int_m3 = build_int_cst (integer_type_node, -3);
5781
5782 region_model_manager mgr;
5783 region_model model (&mgr);
5784 /* "arr[0] = 17;". */
5785 model.set_value (arr_0, int_17, NULL);
5786 /* "arr[1] = -3;". */
5787 model.set_value (arr_1, int_m3, NULL);
5788
5789 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
5790 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
5791
5792 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
5793 model.set_value (arr_1, int_42, NULL);
5794 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
5795
5796 /* Verify get_offset for "arr[0]". */
5797 {
5798 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
5799 region_offset offset = arr_0_reg->get_offset ();
5800 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
5801 ASSERT_EQ (offset.get_bit_offset (), 0);
5802 }
5803
5804 /* Verify get_offset for "arr[1]". */
5805 {
5806 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
5807 region_offset offset = arr_1_reg->get_offset ();
5808 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
5809 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
5810 }
5811
5812 /* "arr[i] = i;" - this should remove the earlier bindings. */
5813 model.set_value (arr_i, i, NULL);
5814 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
5815 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
5816
5817 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
5818 model.set_value (arr_0, int_17, NULL);
5819 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
5820 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
5821}
5822
5823/* Smoketest of dereferencing a pointer via MEM_REF. */
5824
5825static void
5826test_mem_ref ()
5827{
5828 /*
5829 x = 17;
5830 p = &x;
5831 *p;
5832 */
5833 tree x = build_global_decl ("x", integer_type_node);
5834 tree int_star = build_pointer_type (integer_type_node);
5835 tree p = build_global_decl ("p", int_star);
5836
5837 tree int_17 = build_int_cst (integer_type_node, 17);
5838 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
5839 tree offset_0 = build_int_cst (integer_type_node, 0);
5840 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
5841
5842 region_model_manager mgr;
5843 region_model model (&mgr);
5844
5845 /* "x = 17;". */
5846 model.set_value (x, int_17, NULL);
5847
5848 /* "p = &x;". */
5849 model.set_value (p, addr_of_x, NULL);
5850
5851 const svalue *sval = model.get_rvalue (star_p, NULL);
5852 ASSERT_EQ (sval->maybe_get_constant (), int_17);
5853}
5854
5855/* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
5856 Analogous to this code:
5857 void test_6 (int a[10])
5858 {
5859 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
5860 a[3] = 42;
5861 __analyzer_eval (a[3] == 42); [should be TRUE]
5862 }
5863 from data-model-1.c, which looks like this at the gimple level:
5864 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
5865 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
5866 int _2 = *_1; # MEM_REF
5867 _Bool _3 = _2 == 42;
5868 int _4 = (int) _3;
5869 __analyzer_eval (_4);
5870
5871 # a[3] = 42;
5872 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
5873 *_5 = 42; # MEM_REF
5874
5875 # __analyzer_eval (a[3] == 42); [should be TRUE]
5876 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
5877 int _7 = *_6; # MEM_REF
5878 _Bool _8 = _7 == 42;
5879 int _9 = (int) _8;
5880 __analyzer_eval (_9); */
5881
5882static void
5883test_POINTER_PLUS_EXPR_then_MEM_REF ()
5884{
5885 tree int_star = build_pointer_type (integer_type_node);
5886 tree a = build_global_decl ("a", int_star);
5887 tree offset_12 = build_int_cst (size_type_node, 12);
5888 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
5889 tree offset_0 = build_int_cst (integer_type_node, 0);
5890 tree mem_ref = build2 (MEM_REF, integer_type_node,
5891 pointer_plus_expr, offset_0);
5892 region_model_manager mgr;
5893 region_model m (&mgr);
5894
5895 tree int_42 = build_int_cst (integer_type_node, 42);
5896 m.set_value (mem_ref, int_42, NULL);
5897 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
5898}
5899
5900/* Verify that malloc works. */
5901
5902static void
5903test_malloc ()
5904{
5905 tree int_star = build_pointer_type (integer_type_node);
5906 tree p = build_global_decl ("p", int_star);
5907 tree n = build_global_decl ("n", integer_type_node);
5908 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
5909 n, build_int_cst (size_type_node, 4));
5910
5911 region_model_manager mgr;
5912 test_region_model_context ctxt;
5913 region_model model (&mgr);
5914
5915 /* "p = malloc (n * 4);". */
5916 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
b9365b93 5917 const region *reg = model.create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe
DM
5918 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
5919 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 5920 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
5921}
5922
5923/* Verify that alloca works. */
5924
5925static void
5926test_alloca ()
5927{
5928 auto_vec <tree> param_types;
5929 tree fndecl = make_fndecl (integer_type_node,
5930 "test_fn",
5931 param_types);
5932 allocate_struct_function (fndecl, true);
5933
5934
5935 tree int_star = build_pointer_type (integer_type_node);
5936 tree p = build_global_decl ("p", int_star);
5937 tree n = build_global_decl ("n", integer_type_node);
5938 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
5939 n, build_int_cst (size_type_node, 4));
5940
5941 region_model_manager mgr;
5942 test_region_model_context ctxt;
5943 region_model model (&mgr);
5944
5945 /* Push stack frame. */
5946 const region *frame_reg
5947 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl),
5948 NULL, &ctxt);
5949 /* "p = alloca (n * 4);". */
5950 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
b9365b93 5951 const region *reg = model.create_region_for_alloca (size_sval, &ctxt);
808f4dfe
DM
5952 ASSERT_EQ (reg->get_parent_region (), frame_reg);
5953 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
5954 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 5955 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
5956
5957 /* Verify that the pointers to the alloca region are replaced by
5958 poisoned values when the frame is popped. */
5959 model.pop_frame (NULL, NULL, &ctxt);
33255ad3 5960 ASSERT_EQ (model.get_rvalue (p, NULL)->get_kind (), SK_POISONED);
808f4dfe
DM
5961}
5962
71fc4655
DM
5963/* Verify that svalue::involves_p works. */
5964
5965static void
5966test_involves_p ()
5967{
5968 region_model_manager mgr;
5969 tree int_star = build_pointer_type (integer_type_node);
5970 tree p = build_global_decl ("p", int_star);
5971 tree q = build_global_decl ("q", int_star);
5972
5973 test_region_model_context ctxt;
5974 region_model model (&mgr);
5975 const svalue *p_init = model.get_rvalue (p, &ctxt);
5976 const svalue *q_init = model.get_rvalue (q, &ctxt);
5977
5978 ASSERT_TRUE (p_init->involves_p (p_init));
5979 ASSERT_FALSE (p_init->involves_p (q_init));
5980
5981 const region *star_p_reg = mgr.get_symbolic_region (p_init);
5982 const region *star_q_reg = mgr.get_symbolic_region (q_init);
5983
5984 const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
5985 const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
5986
5987 ASSERT_TRUE (init_star_p->involves_p (p_init));
5988 ASSERT_FALSE (p_init->involves_p (init_star_p));
5989 ASSERT_FALSE (init_star_p->involves_p (q_init));
5990 ASSERT_TRUE (init_star_q->involves_p (q_init));
5991 ASSERT_FALSE (init_star_q->involves_p (p_init));
5992}
5993
757bf1df
DM
5994/* Run all of the selftests within this file. */
5995
5996void
5997analyzer_region_model_cc_tests ()
5998{
8c08c983 5999 test_tree_cmp_on_constants ();
757bf1df 6000 test_dump ();
808f4dfe
DM
6001 test_struct ();
6002 test_array_1 ();
90f7c300 6003 test_get_representative_tree ();
757bf1df 6004 test_unique_constants ();
808f4dfe
DM
6005 test_unique_unknowns ();
6006 test_initial_svalue_folding ();
6007 test_unaryop_svalue_folding ();
6008 test_binop_svalue_folding ();
6009 test_sub_svalue_folding ();
6010 test_descendent_of_p ();
757bf1df 6011 test_assignment ();
a96f1c38 6012 test_compound_assignment ();
757bf1df
DM
6013 test_stack_frames ();
6014 test_get_representative_path_var ();
808f4dfe 6015 test_equality_1 ();
757bf1df
DM
6016 test_canonicalization_2 ();
6017 test_canonicalization_3 ();
8c08c983 6018 test_canonicalization_4 ();
757bf1df
DM
6019 test_state_merging ();
6020 test_constraint_merging ();
808f4dfe
DM
6021 test_widening_constraints ();
6022 test_iteration_1 ();
6969ac30 6023 test_malloc_constraints ();
808f4dfe
DM
6024 test_var ();
6025 test_array_2 ();
6026 test_mem_ref ();
6027 test_POINTER_PLUS_EXPR_then_MEM_REF ();
6028 test_malloc ();
6029 test_alloca ();
71fc4655 6030 test_involves_p ();
757bf1df
DM
6031}
6032
6033} // namespace selftest
6034
6035#endif /* CHECKING_P */
6036
75038aa6
DM
6037} // namespace ana
6038
757bf1df 6039#endif /* #if ENABLE_ANALYZER */