]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/analyzer/region-model.cc
analyzer: check for writes to consts via access attr [PR104793]
[thirdparty/gcc.git] / gcc / analyzer / region-model.cc
CommitLineData
757bf1df 1/* Classes for modeling the state of memory.
7adcbafe 2 Copyright (C) 2019-2022 Free Software Foundation, Inc.
757bf1df
DM
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tree.h"
25#include "function.h"
26#include "basic-block.h"
27#include "gimple.h"
28#include "gimple-iterator.h"
7892ff37 29#include "diagnostic-core.h"
757bf1df
DM
30#include "graphviz.h"
31#include "options.h"
32#include "cgraph.h"
33#include "tree-dfa.h"
34#include "stringpool.h"
35#include "convert.h"
36#include "target.h"
37#include "fold-const.h"
38#include "tree-pretty-print.h"
39#include "diagnostic-color.h"
40#include "diagnostic-metadata.h"
757bf1df 41#include "tristate.h"
ef7827b0 42#include "bitmap.h"
757bf1df
DM
43#include "selftest.h"
44#include "function.h"
809192e7 45#include "json.h"
757bf1df
DM
46#include "analyzer/analyzer.h"
47#include "analyzer/analyzer-logging.h"
48#include "ordered-hash-map.h"
49#include "options.h"
50#include "cgraph.h"
51#include "cfg.h"
52#include "digraph.h"
53#include "analyzer/supergraph.h"
54#include "sbitmap.h"
808f4dfe
DM
55#include "analyzer/call-string.h"
56#include "analyzer/program-point.h"
57#include "analyzer/store.h"
757bf1df
DM
58#include "analyzer/region-model.h"
59#include "analyzer/constraint-manager.h"
60#include "diagnostic-event-id.h"
61#include "analyzer/sm.h"
62#include "diagnostic-event-id.h"
63#include "analyzer/sm.h"
64#include "analyzer/pending-diagnostic.h"
808f4dfe 65#include "analyzer/region-model-reachability.h"
757bf1df 66#include "analyzer/analyzer-selftests.h"
f573d351 67#include "analyzer/program-state.h"
884d9141 68#include "stor-layout.h"
c7e276b8 69#include "attribs.h"
9a2c9579 70#include "tree-object-size.h"
1e2fe671
DM
71#include "gimple-ssa.h"
72#include "tree-phinodes.h"
73#include "tree-ssa-operands.h"
74#include "ssa-iterators.h"
5fbcbcaf 75#include "calls.h"
757bf1df
DM
76
77#if ENABLE_ANALYZER
78
75038aa6
DM
79namespace ana {
80
757bf1df
DM
81/* Dump T to PP in language-independent form, for debugging/logging/dumping
82 purposes. */
83
757bf1df 84void
808f4dfe 85dump_tree (pretty_printer *pp, tree t)
757bf1df 86{
808f4dfe 87 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
757bf1df
DM
88}
89
808f4dfe
DM
90/* Dump T to PP in language-independent form in quotes, for
91 debugging/logging/dumping purposes. */
757bf1df
DM
92
93void
808f4dfe 94dump_quoted_tree (pretty_printer *pp, tree t)
757bf1df 95{
808f4dfe
DM
96 pp_begin_quote (pp, pp_show_color (pp));
97 dump_tree (pp, t);
98 pp_end_quote (pp, pp_show_color (pp));
757bf1df
DM
99}
100
808f4dfe
DM
101/* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
102 calls within other pp_printf calls.
757bf1df 103
808f4dfe
DM
104 default_tree_printer handles 'T' and some other codes by calling
105 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
106 dump_generic_node calls pp_printf in various places, leading to
107 garbled output.
757bf1df 108
808f4dfe
DM
109 Ideally pp_printf could be made to be reentrant, but in the meantime
110 this function provides a workaround. */
6969ac30
DM
111
112void
808f4dfe 113print_quoted_type (pretty_printer *pp, tree t)
6969ac30 114{
808f4dfe
DM
115 pp_begin_quote (pp, pp_show_color (pp));
116 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
117 pp_end_quote (pp, pp_show_color (pp));
6969ac30
DM
118}
119
d726a57b
DM
120/* class region_to_value_map. */
121
122/* Assignment operator for region_to_value_map. */
123
124region_to_value_map &
125region_to_value_map::operator= (const region_to_value_map &other)
126{
127 m_hash_map.empty ();
128 for (auto iter : other.m_hash_map)
129 {
130 const region *reg = iter.first;
131 const svalue *sval = iter.second;
132 m_hash_map.put (reg, sval);
133 }
134 return *this;
135}
136
137/* Equality operator for region_to_value_map. */
138
139bool
140region_to_value_map::operator== (const region_to_value_map &other) const
141{
142 if (m_hash_map.elements () != other.m_hash_map.elements ())
143 return false;
144
145 for (auto iter : *this)
146 {
147 const region *reg = iter.first;
148 const svalue *sval = iter.second;
149 const svalue * const *other_slot = other.get (reg);
150 if (other_slot == NULL)
151 return false;
152 if (sval != *other_slot)
153 return false;
154 }
155
156 return true;
157}
158
159/* Dump this object to PP. */
160
161void
162region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
163 bool multiline) const
164{
165 auto_vec<const region *> regs;
166 for (iterator iter = begin (); iter != end (); ++iter)
167 regs.safe_push ((*iter).first);
168 regs.qsort (region::cmp_ptr_ptr);
169 if (multiline)
170 pp_newline (pp);
171 else
172 pp_string (pp, " {");
173 unsigned i;
174 const region *reg;
175 FOR_EACH_VEC_ELT (regs, i, reg)
176 {
177 if (multiline)
178 pp_string (pp, " ");
179 else if (i > 0)
180 pp_string (pp, ", ");
181 reg->dump_to_pp (pp, simple);
182 pp_string (pp, ": ");
183 const svalue *sval = *get (reg);
184 sval->dump_to_pp (pp, true);
185 if (multiline)
186 pp_newline (pp);
187 }
188 if (!multiline)
189 pp_string (pp, "}");
190}
191
192/* Dump this object to stderr. */
193
194DEBUG_FUNCTION void
195region_to_value_map::dump (bool simple) const
196{
197 pretty_printer pp;
198 pp_format_decoder (&pp) = default_tree_printer;
199 pp_show_color (&pp) = pp_show_color (global_dc->printer);
200 pp.buffer->stream = stderr;
201 dump_to_pp (&pp, simple, true);
202 pp_newline (&pp);
203 pp_flush (&pp);
204}
205
206
207/* Attempt to merge THIS with OTHER, writing the result
208 to OUT.
209
210 For now, write (region, value) mappings that are in common between THIS
211 and OTHER to OUT, effectively taking the intersection, rather than
212 rejecting differences. */
213
214bool
215region_to_value_map::can_merge_with_p (const region_to_value_map &other,
216 region_to_value_map *out) const
217{
218 for (auto iter : *this)
219 {
220 const region *iter_reg = iter.first;
221 const svalue *iter_sval = iter.second;
222 const svalue * const * other_slot = other.get (iter_reg);
223 if (other_slot)
224 if (iter_sval == *other_slot)
225 out->put (iter_reg, iter_sval);
226 }
227 return true;
228}
229
33255ad3
DM
230/* Purge any state involving SVAL. */
231
232void
233region_to_value_map::purge_state_involving (const svalue *sval)
234{
235 auto_vec<const region *> to_purge;
236 for (auto iter : *this)
237 {
238 const region *iter_reg = iter.first;
239 const svalue *iter_sval = iter.second;
240 if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
241 to_purge.safe_push (iter_reg);
242 }
243 for (auto iter : to_purge)
244 m_hash_map.remove (iter);
245}
246
757bf1df
DM
247/* class region_model. */
248
808f4dfe 249/* Ctor for region_model: construct an "empty" model. */
757bf1df 250
808f4dfe 251region_model::region_model (region_model_manager *mgr)
9a2c9579
DM
252: m_mgr (mgr), m_store (), m_current_frame (NULL),
253 m_dynamic_extents ()
757bf1df 254{
808f4dfe 255 m_constraints = new constraint_manager (mgr);
757bf1df
DM
256}
257
258/* region_model's copy ctor. */
259
260region_model::region_model (const region_model &other)
808f4dfe
DM
261: m_mgr (other.m_mgr), m_store (other.m_store),
262 m_constraints (new constraint_manager (*other.m_constraints)),
9a2c9579
DM
263 m_current_frame (other.m_current_frame),
264 m_dynamic_extents (other.m_dynamic_extents)
757bf1df 265{
757bf1df
DM
266}
267
268/* region_model's dtor. */
269
270region_model::~region_model ()
271{
272 delete m_constraints;
273}
274
275/* region_model's assignment operator. */
276
277region_model &
278region_model::operator= (const region_model &other)
279{
808f4dfe
DM
280 /* m_mgr is const. */
281 gcc_assert (m_mgr == other.m_mgr);
757bf1df 282
808f4dfe 283 m_store = other.m_store;
757bf1df
DM
284
285 delete m_constraints;
808f4dfe 286 m_constraints = new constraint_manager (*other.m_constraints);
757bf1df 287
808f4dfe 288 m_current_frame = other.m_current_frame;
757bf1df 289
9a2c9579
DM
290 m_dynamic_extents = other.m_dynamic_extents;
291
757bf1df
DM
292 return *this;
293}
294
295/* Equality operator for region_model.
296
808f4dfe
DM
297 Amongst other things this directly compares the stores and the constraint
298 managers, so for this to be meaningful both this and OTHER should
757bf1df
DM
299 have been canonicalized. */
300
301bool
302region_model::operator== (const region_model &other) const
303{
808f4dfe
DM
304 /* We can only compare instances that use the same manager. */
305 gcc_assert (m_mgr == other.m_mgr);
757bf1df 306
808f4dfe 307 if (m_store != other.m_store)
757bf1df
DM
308 return false;
309
310 if (*m_constraints != *other.m_constraints)
311 return false;
312
808f4dfe
DM
313 if (m_current_frame != other.m_current_frame)
314 return false;
757bf1df 315
9a2c9579
DM
316 if (m_dynamic_extents != other.m_dynamic_extents)
317 return false;
318
757bf1df
DM
319 gcc_checking_assert (hash () == other.hash ());
320
321 return true;
322}
323
324/* Generate a hash value for this region_model. */
325
326hashval_t
808f4dfe
DM
327region_model::hash () const
328{
329 hashval_t result = m_store.hash ();
330 result ^= m_constraints->hash ();
331 return result;
757bf1df
DM
332}
333
808f4dfe
DM
334/* Dump a representation of this model to PP, showing the
335 stack, the store, and any constraints.
336 Use SIMPLE to control how svalues and regions are printed. */
757bf1df
DM
337
338void
808f4dfe
DM
339region_model::dump_to_pp (pretty_printer *pp, bool simple,
340 bool multiline) const
757bf1df 341{
808f4dfe
DM
342 /* Dump stack. */
343 pp_printf (pp, "stack depth: %i", get_stack_depth ());
344 if (multiline)
345 pp_newline (pp);
346 else
347 pp_string (pp, " {");
348 for (const frame_region *iter_frame = m_current_frame; iter_frame;
349 iter_frame = iter_frame->get_calling_frame ())
350 {
351 if (multiline)
352 pp_string (pp, " ");
353 else if (iter_frame != m_current_frame)
354 pp_string (pp, ", ");
355 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
356 iter_frame->dump_to_pp (pp, simple);
357 if (multiline)
358 pp_newline (pp);
359 }
360 if (!multiline)
361 pp_string (pp, "}");
362
363 /* Dump store. */
364 if (!multiline)
365 pp_string (pp, ", {");
366 m_store.dump_to_pp (pp, simple, multiline,
367 m_mgr->get_store_manager ());
368 if (!multiline)
369 pp_string (pp, "}");
370
371 /* Dump constraints. */
372 pp_string (pp, "constraint_manager:");
373 if (multiline)
374 pp_newline (pp);
375 else
376 pp_string (pp, " {");
377 m_constraints->dump_to_pp (pp, multiline);
378 if (!multiline)
379 pp_string (pp, "}");
9a2c9579
DM
380
381 /* Dump sizes of dynamic regions, if any are known. */
382 if (!m_dynamic_extents.is_empty ())
383 {
384 pp_string (pp, "dynamic_extents:");
385 m_dynamic_extents.dump_to_pp (pp, simple, multiline);
386 }
808f4dfe 387}
757bf1df 388
808f4dfe 389/* Dump a representation of this model to FILE. */
757bf1df 390
808f4dfe
DM
391void
392region_model::dump (FILE *fp, bool simple, bool multiline) const
393{
394 pretty_printer pp;
395 pp_format_decoder (&pp) = default_tree_printer;
396 pp_show_color (&pp) = pp_show_color (global_dc->printer);
397 pp.buffer->stream = fp;
398 dump_to_pp (&pp, simple, multiline);
399 pp_newline (&pp);
400 pp_flush (&pp);
757bf1df
DM
401}
402
808f4dfe 403/* Dump a multiline representation of this model to stderr. */
757bf1df 404
808f4dfe
DM
405DEBUG_FUNCTION void
406region_model::dump (bool simple) const
407{
408 dump (stderr, simple, true);
409}
757bf1df 410
808f4dfe 411/* Dump a multiline representation of this model to stderr. */
757bf1df 412
808f4dfe
DM
413DEBUG_FUNCTION void
414region_model::debug () const
757bf1df 415{
808f4dfe 416 dump (true);
757bf1df
DM
417}
418
e61ffa20
DM
419/* Assert that this object is valid. */
420
421void
422region_model::validate () const
423{
424 m_store.validate ();
425}
426
808f4dfe
DM
427/* Canonicalize the store and constraints, to maximize the chance of
428 equality between region_model instances. */
757bf1df
DM
429
430void
808f4dfe 431region_model::canonicalize ()
757bf1df 432{
808f4dfe
DM
433 m_store.canonicalize (m_mgr->get_store_manager ());
434 m_constraints->canonicalize ();
757bf1df
DM
435}
436
437/* Return true if this region_model is in canonical form. */
438
439bool
440region_model::canonicalized_p () const
441{
442 region_model copy (*this);
808f4dfe 443 copy.canonicalize ();
757bf1df
DM
444 return *this == copy;
445}
446
808f4dfe
DM
447/* See the comment for store::loop_replay_fixup. */
448
449void
450region_model::loop_replay_fixup (const region_model *dst_state)
451{
452 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
453}
454
757bf1df
DM
455/* A subclass of pending_diagnostic for complaining about uses of
456 poisoned values. */
457
458class poisoned_value_diagnostic
459: public pending_diagnostic_subclass<poisoned_value_diagnostic>
460{
461public:
00e7d024
DM
462 poisoned_value_diagnostic (tree expr, enum poison_kind pkind,
463 const region *src_region)
464 : m_expr (expr), m_pkind (pkind),
465 m_src_region (src_region)
757bf1df
DM
466 {}
467
468 const char *get_kind () const FINAL OVERRIDE { return "poisoned_value_diagnostic"; }
469
33255ad3
DM
470 bool use_of_uninit_p () const FINAL OVERRIDE
471 {
472 return m_pkind == POISON_KIND_UNINIT;
473 }
474
757bf1df
DM
475 bool operator== (const poisoned_value_diagnostic &other) const
476 {
00e7d024
DM
477 return (m_expr == other.m_expr
478 && m_pkind == other.m_pkind
479 && m_src_region == other.m_src_region);
757bf1df
DM
480 }
481
482 bool emit (rich_location *rich_loc) FINAL OVERRIDE
483 {
484 switch (m_pkind)
485 {
486 default:
487 gcc_unreachable ();
33255ad3
DM
488 case POISON_KIND_UNINIT:
489 {
490 diagnostic_metadata m;
491 m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
492 return warning_meta (rich_loc, m,
493 OPT_Wanalyzer_use_of_uninitialized_value,
494 "use of uninitialized value %qE",
495 m_expr);
496 }
497 break;
757bf1df
DM
498 case POISON_KIND_FREED:
499 {
500 diagnostic_metadata m;
501 m.add_cwe (416); /* "CWE-416: Use After Free". */
6c8e5844
DM
502 return warning_meta (rich_loc, m,
503 OPT_Wanalyzer_use_after_free,
504 "use after %<free%> of %qE",
505 m_expr);
757bf1df
DM
506 }
507 break;
508 case POISON_KIND_POPPED_STACK:
509 {
757bf1df 510 /* TODO: which CWE? */
808f4dfe
DM
511 return warning_at
512 (rich_loc,
513 OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame,
514 "dereferencing pointer %qE to within stale stack frame",
515 m_expr);
757bf1df
DM
516 }
517 break;
518 }
519 }
520
521 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
522 {
523 switch (m_pkind)
524 {
525 default:
526 gcc_unreachable ();
33255ad3
DM
527 case POISON_KIND_UNINIT:
528 return ev.formatted_print ("use of uninitialized value %qE here",
529 m_expr);
757bf1df
DM
530 case POISON_KIND_FREED:
531 return ev.formatted_print ("use after %<free%> of %qE here",
532 m_expr);
533 case POISON_KIND_POPPED_STACK:
534 return ev.formatted_print
808f4dfe 535 ("dereferencing pointer %qE to within stale stack frame",
757bf1df
DM
536 m_expr);
537 }
538 }
539
00e7d024
DM
540 void mark_interesting_stuff (interesting_t *interest) FINAL OVERRIDE
541 {
542 if (m_src_region)
543 interest->add_region_creation (m_src_region);
544 }
545
757bf1df
DM
546private:
547 tree m_expr;
548 enum poison_kind m_pkind;
00e7d024 549 const region *m_src_region;
757bf1df
DM
550};
551
5e00ad3f
DM
552/* A subclass of pending_diagnostic for complaining about shifts
553 by negative counts. */
554
555class shift_count_negative_diagnostic
556: public pending_diagnostic_subclass<shift_count_negative_diagnostic>
557{
558public:
559 shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
560 : m_assign (assign), m_count_cst (count_cst)
561 {}
562
563 const char *get_kind () const FINAL OVERRIDE
564 {
565 return "shift_count_negative_diagnostic";
566 }
567
568 bool operator== (const shift_count_negative_diagnostic &other) const
569 {
570 return (m_assign == other.m_assign
571 && same_tree_p (m_count_cst, other.m_count_cst));
572 }
573
574 bool emit (rich_location *rich_loc) FINAL OVERRIDE
575 {
576 return warning_at (rich_loc, OPT_Wanalyzer_shift_count_negative,
577 "shift by negative count (%qE)", m_count_cst);
578 }
579
580 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
581 {
582 return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst);
583 }
584
585private:
586 const gassign *m_assign;
587 tree m_count_cst;
588};
589
590/* A subclass of pending_diagnostic for complaining about shifts
591 by counts >= the width of the operand type. */
592
593class shift_count_overflow_diagnostic
594: public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
595{
596public:
597 shift_count_overflow_diagnostic (const gassign *assign,
598 int operand_precision,
599 tree count_cst)
600 : m_assign (assign), m_operand_precision (operand_precision),
601 m_count_cst (count_cst)
602 {}
603
604 const char *get_kind () const FINAL OVERRIDE
605 {
606 return "shift_count_overflow_diagnostic";
607 }
608
609 bool operator== (const shift_count_overflow_diagnostic &other) const
610 {
611 return (m_assign == other.m_assign
612 && m_operand_precision == other.m_operand_precision
613 && same_tree_p (m_count_cst, other.m_count_cst));
614 }
615
616 bool emit (rich_location *rich_loc) FINAL OVERRIDE
617 {
618 return warning_at (rich_loc, OPT_Wanalyzer_shift_count_overflow,
619 "shift by count (%qE) >= precision of type (%qi)",
620 m_count_cst, m_operand_precision);
621 }
622
623 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
624 {
625 return ev.formatted_print ("shift by count %qE here", m_count_cst);
626 }
627
628private:
629 const gassign *m_assign;
630 int m_operand_precision;
631 tree m_count_cst;
632};
633
808f4dfe
DM
634/* If ASSIGN is a stmt that can be modelled via
635 set_value (lhs_reg, SVALUE, CTXT)
636 for some SVALUE, get the SVALUE.
637 Otherwise return NULL. */
757bf1df 638
808f4dfe
DM
639const svalue *
640region_model::get_gassign_result (const gassign *assign,
641 region_model_context *ctxt)
757bf1df
DM
642{
643 tree lhs = gimple_assign_lhs (assign);
644 tree rhs1 = gimple_assign_rhs1 (assign);
757bf1df
DM
645 enum tree_code op = gimple_assign_rhs_code (assign);
646 switch (op)
647 {
648 default:
808f4dfe 649 return NULL;
757bf1df
DM
650
651 case POINTER_PLUS_EXPR:
652 {
653 /* e.g. "_1 = a_10(D) + 12;" */
654 tree ptr = rhs1;
655 tree offset = gimple_assign_rhs2 (assign);
656
808f4dfe
DM
657 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
658 const svalue *offset_sval = get_rvalue (offset, ctxt);
659 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
660 is an integer of type sizetype". */
661 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
662
663 const svalue *sval_binop
664 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
665 ptr_sval, offset_sval);
666 return sval_binop;
757bf1df
DM
667 }
668 break;
669
670 case POINTER_DIFF_EXPR:
671 {
672 /* e.g. "_1 = p_2(D) - q_3(D);". */
808f4dfe
DM
673 tree rhs2 = gimple_assign_rhs2 (assign);
674 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
675 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 676
808f4dfe 677 // TODO: perhaps fold to zero if they're known to be equal?
757bf1df 678
808f4dfe
DM
679 const svalue *sval_binop
680 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
681 rhs1_sval, rhs2_sval);
682 return sval_binop;
757bf1df
DM
683 }
684 break;
685
808f4dfe
DM
686 /* Assignments of the form
687 set_value (lvalue (LHS), rvalue (EXPR))
688 for various EXPR.
689 We already have the lvalue for the LHS above, as "lhs_reg". */
690 case ADDR_EXPR: /* LHS = &RHS; */
691 case BIT_FIELD_REF:
692 case COMPONENT_REF: /* LHS = op0.op1; */
757bf1df 693 case MEM_REF:
757bf1df 694 case REAL_CST:
808f4dfe
DM
695 case COMPLEX_CST:
696 case VECTOR_CST:
757bf1df
DM
697 case INTEGER_CST:
698 case ARRAY_REF:
808f4dfe
DM
699 case SSA_NAME: /* LHS = VAR; */
700 case VAR_DECL: /* LHS = VAR; */
701 case PARM_DECL:/* LHS = VAR; */
702 case REALPART_EXPR:
703 case IMAGPART_EXPR:
704 return get_rvalue (rhs1, ctxt);
705
706 case ABS_EXPR:
707 case ABSU_EXPR:
708 case CONJ_EXPR:
709 case BIT_NOT_EXPR:
757bf1df
DM
710 case FIX_TRUNC_EXPR:
711 case FLOAT_EXPR:
808f4dfe 712 case NEGATE_EXPR:
757bf1df 713 case NOP_EXPR:
808f4dfe 714 case VIEW_CONVERT_EXPR:
757bf1df 715 {
808f4dfe
DM
716 /* Unary ops. */
717 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
718 const svalue *sval_unaryop
719 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
720 return sval_unaryop;
757bf1df 721 }
757bf1df
DM
722
723 case EQ_EXPR:
724 case GE_EXPR:
725 case LE_EXPR:
726 case NE_EXPR:
727 case GT_EXPR:
728 case LT_EXPR:
808f4dfe
DM
729 case UNORDERED_EXPR:
730 case ORDERED_EXPR:
757bf1df
DM
731 {
732 tree rhs2 = gimple_assign_rhs2 (assign);
733
808f4dfe
DM
734 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
735 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 736
2f5951bd 737 if (TREE_TYPE (lhs) == boolean_type_node)
808f4dfe 738 {
2f5951bd
DM
739 /* Consider constraints between svalues. */
740 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
741 if (t.is_known ())
742 return m_mgr->get_or_create_constant_svalue
743 (t.is_true () ? boolean_true_node : boolean_false_node);
808f4dfe 744 }
2f5951bd
DM
745
746 /* Otherwise, generate a symbolic binary op. */
747 const svalue *sval_binop
748 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
749 rhs1_sval, rhs2_sval);
750 return sval_binop;
757bf1df
DM
751 }
752 break;
753
754 case PLUS_EXPR:
755 case MINUS_EXPR:
756 case MULT_EXPR:
808f4dfe 757 case MULT_HIGHPART_EXPR:
757bf1df 758 case TRUNC_DIV_EXPR:
808f4dfe
DM
759 case CEIL_DIV_EXPR:
760 case FLOOR_DIV_EXPR:
761 case ROUND_DIV_EXPR:
757bf1df 762 case TRUNC_MOD_EXPR:
808f4dfe
DM
763 case CEIL_MOD_EXPR:
764 case FLOOR_MOD_EXPR:
765 case ROUND_MOD_EXPR:
766 case RDIV_EXPR:
767 case EXACT_DIV_EXPR:
757bf1df
DM
768 case LSHIFT_EXPR:
769 case RSHIFT_EXPR:
808f4dfe
DM
770 case LROTATE_EXPR:
771 case RROTATE_EXPR:
757bf1df
DM
772 case BIT_IOR_EXPR:
773 case BIT_XOR_EXPR:
774 case BIT_AND_EXPR:
775 case MIN_EXPR:
776 case MAX_EXPR:
808f4dfe 777 case COMPLEX_EXPR:
757bf1df
DM
778 {
779 /* Binary ops. */
780 tree rhs2 = gimple_assign_rhs2 (assign);
781
808f4dfe
DM
782 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
783 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 784
5e00ad3f
DM
785 if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
786 {
787 /* "INT34-C. Do not shift an expression by a negative number of bits
788 or by greater than or equal to the number of bits that exist in
789 the operand." */
790 if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
791 if (TREE_CODE (rhs2_cst) == INTEGER_CST)
792 {
793 if (tree_int_cst_sgn (rhs2_cst) < 0)
794 ctxt->warn (new shift_count_negative_diagnostic
795 (assign, rhs2_cst));
796 else if (compare_tree_int (rhs2_cst,
797 TYPE_PRECISION (TREE_TYPE (rhs1)))
798 >= 0)
799 ctxt->warn (new shift_count_overflow_diagnostic
800 (assign, TYPE_PRECISION (TREE_TYPE (rhs1)),
801 rhs2_cst));
802 }
803 }
804
808f4dfe
DM
805 const svalue *sval_binop
806 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
807 rhs1_sval, rhs2_sval);
808 return sval_binop;
809 }
810
811 /* Vector expressions. In theory we could implement these elementwise,
812 but for now, simply return unknown values. */
813 case VEC_DUPLICATE_EXPR:
814 case VEC_SERIES_EXPR:
815 case VEC_COND_EXPR:
816 case VEC_PERM_EXPR:
1b0be822
DM
817 case VEC_WIDEN_MULT_HI_EXPR:
818 case VEC_WIDEN_MULT_LO_EXPR:
819 case VEC_WIDEN_MULT_EVEN_EXPR:
820 case VEC_WIDEN_MULT_ODD_EXPR:
821 case VEC_UNPACK_HI_EXPR:
822 case VEC_UNPACK_LO_EXPR:
823 case VEC_UNPACK_FLOAT_HI_EXPR:
824 case VEC_UNPACK_FLOAT_LO_EXPR:
825 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
826 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
827 case VEC_PACK_TRUNC_EXPR:
828 case VEC_PACK_SAT_EXPR:
829 case VEC_PACK_FIX_TRUNC_EXPR:
830 case VEC_PACK_FLOAT_EXPR:
831 case VEC_WIDEN_LSHIFT_HI_EXPR:
832 case VEC_WIDEN_LSHIFT_LO_EXPR:
808f4dfe
DM
833 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
834 }
835}
836
1e2fe671
DM
837/* Workaround for discarding certain false positives from
838 -Wanalyzer-use-of-uninitialized-value
839 of the form:
840 ((A OR-IF B) OR-IF C)
841 and:
842 ((A AND-IF B) AND-IF C)
843 where evaluating B is redundant, but could involve simple accesses of
844 uninitialized locals.
845
846 When optimization is turned on the FE can immediately fold compound
847 conditionals. Specifically, c_parser_condition parses this condition:
848 ((A OR-IF B) OR-IF C)
849 and calls c_fully_fold on the condition.
850 Within c_fully_fold, fold_truth_andor is called, which bails when
851 optimization is off, but if any optimization is turned on can convert the
852 ((A OR-IF B) OR-IF C)
853 into:
854 ((A OR B) OR_IF C)
855 for sufficiently simple B
856 i.e. the inner OR-IF becomes an OR.
857 At gimplification time the inner OR becomes BIT_IOR_EXPR (in gimplify_expr),
858 giving this for the inner condition:
859 tmp = A | B;
860 if (tmp)
861 thus effectively synthesizing a redundant access of B when optimization
862 is turned on, when compared to:
863 if (A) goto L1; else goto L4;
864 L1: if (B) goto L2; else goto L4;
865 L2: if (C) goto L3; else goto L4;
866 for the unoptimized case.
867
868 Return true if CTXT appears to be handling such a short-circuitable stmt,
869 such as the def-stmt for B for the:
870 tmp = A | B;
871 case above, for the case where A is true and thus B would have been
872 short-circuited without optimization, using MODEL for the value of A. */
873
874static bool
875within_short_circuited_stmt_p (const region_model *model,
876 region_model_context *ctxt)
877{
878 gcc_assert (ctxt);
879 const gimple *curr_stmt = ctxt->get_stmt ();
880 if (curr_stmt == NULL)
881 return false;
882
883 /* We must have an assignment to a temporary of _Bool type. */
884 const gassign *assign_stmt = dyn_cast <const gassign *> (curr_stmt);
885 if (!assign_stmt)
886 return false;
887 tree lhs = gimple_assign_lhs (assign_stmt);
888 if (TREE_TYPE (lhs) != boolean_type_node)
889 return false;
890 if (TREE_CODE (lhs) != SSA_NAME)
891 return false;
892 if (SSA_NAME_VAR (lhs) != NULL_TREE)
893 return false;
894
895 /* The temporary bool must be used exactly once: as the second arg of
896 a BIT_IOR_EXPR or BIT_AND_EXPR. */
897 use_operand_p use_op;
898 gimple *use_stmt;
899 if (!single_imm_use (lhs, &use_op, &use_stmt))
900 return false;
901 const gassign *use_assign = dyn_cast <const gassign *> (use_stmt);
902 if (!use_assign)
903 return false;
904 enum tree_code op = gimple_assign_rhs_code (use_assign);
905 if (!(op == BIT_IOR_EXPR ||op == BIT_AND_EXPR))
906 return false;
907 if (!(gimple_assign_rhs1 (use_assign) != lhs
908 && gimple_assign_rhs2 (use_assign) == lhs))
909 return false;
910
911 /* The first arg of the bitwise stmt must have a known value in MODEL
912 that implies that the value of the second arg doesn't matter, i.e.
913 1 for bitwise or, 0 for bitwise and. */
914 tree other_arg = gimple_assign_rhs1 (use_assign);
915 /* Use a NULL ctxt here to avoid generating warnings. */
916 const svalue *other_arg_sval = model->get_rvalue (other_arg, NULL);
917 tree other_arg_cst = other_arg_sval->maybe_get_constant ();
918 if (!other_arg_cst)
919 return false;
920 switch (op)
921 {
922 default:
923 gcc_unreachable ();
924 case BIT_IOR_EXPR:
925 if (zerop (other_arg_cst))
926 return false;
927 break;
928 case BIT_AND_EXPR:
929 if (!zerop (other_arg_cst))
930 return false;
931 break;
932 }
933
934 /* All tests passed. We appear to be in a stmt that generates a boolean
935 temporary with a value that won't matter. */
936 return true;
937}
938
33255ad3
DM
939/* Check for SVAL being poisoned, adding a warning to CTXT.
940 Return SVAL, or, if a warning is added, another value, to avoid
941 repeatedly complaining about the same poisoned value in followup code. */
942
943const svalue *
944region_model::check_for_poison (const svalue *sval,
945 tree expr,
946 region_model_context *ctxt) const
947{
948 if (!ctxt)
949 return sval;
950
951 if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
952 {
cc68ad87
DM
953 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
954
955 /* Ignore uninitialized uses of empty types; there's nothing
956 to initialize. */
957 if (pkind == POISON_KIND_UNINIT
958 && sval->get_type ()
959 && is_empty_type (sval->get_type ()))
960 return sval;
961
1e2fe671
DM
962 /* Special case to avoid certain false positives. */
963 if (pkind == POISON_KIND_UNINIT
964 && within_short_circuited_stmt_p (this, ctxt))
965 return sval;
966
33255ad3
DM
967 /* If we have an SSA name for a temporary, we don't want to print
968 '<unknown>'.
969 Poisoned values are shared by type, and so we can't reconstruct
970 the tree other than via the def stmts, using
971 fixup_tree_for_diagnostic. */
972 tree diag_arg = fixup_tree_for_diagnostic (expr);
00e7d024
DM
973 const region *src_region = NULL;
974 if (pkind == POISON_KIND_UNINIT)
975 src_region = get_region_for_poisoned_expr (expr);
976 if (ctxt->warn (new poisoned_value_diagnostic (diag_arg, pkind,
977 src_region)))
33255ad3
DM
978 {
979 /* We only want to report use of a poisoned value at the first
980 place it gets used; return an unknown value to avoid generating
981 a chain of followup warnings. */
982 sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
983 }
984
985 return sval;
986 }
987
988 return sval;
989}
990
00e7d024
DM
991/* Attempt to get a region for describing EXPR, the source of region of
992 a poisoned_svalue for use in a poisoned_value_diagnostic.
993 Return NULL if there is no good region to use. */
994
995const region *
996region_model::get_region_for_poisoned_expr (tree expr) const
997{
998 if (TREE_CODE (expr) == SSA_NAME)
999 {
1000 tree decl = SSA_NAME_VAR (expr);
1001 if (decl && DECL_P (decl))
1002 expr = decl;
1003 else
1004 return NULL;
1005 }
1006 return get_lvalue (expr, NULL);
1007}
1008
808f4dfe
DM
1009/* Update this model for the ASSIGN stmt, using CTXT to report any
1010 diagnostics. */
1011
1012void
1013region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
1014{
1015 tree lhs = gimple_assign_lhs (assign);
1016 tree rhs1 = gimple_assign_rhs1 (assign);
1017
1018 const region *lhs_reg = get_lvalue (lhs, ctxt);
1019
1020 /* Most assignments are handled by:
1021 set_value (lhs_reg, SVALUE, CTXT)
1022 for some SVALUE. */
1023 if (const svalue *sval = get_gassign_result (assign, ctxt))
1024 {
33255ad3
DM
1025 tree expr = get_diagnostic_tree_for_gassign (assign);
1026 check_for_poison (sval, expr, ctxt);
808f4dfe
DM
1027 set_value (lhs_reg, sval, ctxt);
1028 return;
1029 }
1030
1031 enum tree_code op = gimple_assign_rhs_code (assign);
1032 switch (op)
1033 {
1034 default:
1035 {
1b0be822 1036 if (0)
808f4dfe
DM
1037 sorry_at (assign->location, "unhandled assignment op: %qs",
1038 get_tree_code_name (op));
1b0be822
DM
1039 const svalue *unknown_sval
1040 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1041 set_value (lhs_reg, unknown_sval, ctxt);
757bf1df
DM
1042 }
1043 break;
1044
808f4dfe
DM
1045 case CONSTRUCTOR:
1046 {
1047 if (TREE_CLOBBER_P (rhs1))
1048 {
1049 /* e.g. "x ={v} {CLOBBER};" */
1050 clobber_region (lhs_reg);
1051 }
1052 else
1053 {
1054 /* Any CONSTRUCTOR that survives to this point is either
1055 just a zero-init of everything, or a vector. */
1056 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
1057 zero_fill_region (lhs_reg);
1058 unsigned ix;
1059 tree index;
1060 tree val;
1061 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
1062 {
1063 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
1064 if (!index)
1065 index = build_int_cst (integer_type_node, ix);
1066 gcc_assert (TREE_CODE (index) == INTEGER_CST);
1067 const svalue *index_sval
1068 = m_mgr->get_or_create_constant_svalue (index);
1069 gcc_assert (index_sval);
1070 const region *sub_reg
1071 = m_mgr->get_element_region (lhs_reg,
1072 TREE_TYPE (val),
1073 index_sval);
1074 const svalue *val_sval = get_rvalue (val, ctxt);
1075 set_value (sub_reg, val_sval, ctxt);
1076 }
1077 }
1078 }
1079 break;
1080
1081 case STRING_CST:
757bf1df 1082 {
808f4dfe 1083 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
808f4dfe
DM
1084 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1085 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 1086 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
1087 }
1088 break;
1089 }
1090}
1091
33255ad3
DM
1092/* A pending_diagnostic subclass for implementing "__analyzer_dump_path". */
1093
1094class dump_path_diagnostic
1095 : public pending_diagnostic_subclass<dump_path_diagnostic>
1096{
1097public:
1098 bool emit (rich_location *richloc) FINAL OVERRIDE
1099 {
1100 inform (richloc, "path");
1101 return true;
1102 }
1103
1104 const char *get_kind () const FINAL OVERRIDE { return "dump_path_diagnostic"; }
1105
1106 bool operator== (const dump_path_diagnostic &) const
1107 {
1108 return true;
1109 }
1110};
1111
1112/* Handle the pre-sm-state part of STMT, modifying this object in-place.
1113 Write true to *OUT_TERMINATE_PATH if the path should be terminated.
1114 Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
1115 side effects. */
1116
1117void
1118region_model::on_stmt_pre (const gimple *stmt,
1119 bool *out_terminate_path,
1120 bool *out_unknown_side_effects,
1121 region_model_context *ctxt)
1122{
1123 switch (gimple_code (stmt))
1124 {
1125 default:
1126 /* No-op for now. */
1127 break;
1128
1129 case GIMPLE_ASSIGN:
1130 {
1131 const gassign *assign = as_a <const gassign *> (stmt);
1132 on_assignment (assign, ctxt);
1133 }
1134 break;
1135
1136 case GIMPLE_ASM:
ded2c2c0
DM
1137 {
1138 const gasm *asm_stmt = as_a <const gasm *> (stmt);
1139 on_asm_stmt (asm_stmt, ctxt);
1140 }
33255ad3
DM
1141 break;
1142
1143 case GIMPLE_CALL:
1144 {
1145 /* Track whether we have a gcall to a function that's not recognized by
1146 anything, for which we don't have a function body, or for which we
1147 don't know the fndecl. */
1148 const gcall *call = as_a <const gcall *> (stmt);
1149
1150 /* Debugging/test support. */
1151 if (is_special_named_call_p (call, "__analyzer_describe", 2))
1152 impl_call_analyzer_describe (call, ctxt);
1153 else if (is_special_named_call_p (call, "__analyzer_dump_capacity", 1))
1154 impl_call_analyzer_dump_capacity (call, ctxt);
4409152a
DM
1155 else if (is_special_named_call_p (call, "__analyzer_dump_escaped", 0))
1156 impl_call_analyzer_dump_escaped (call);
33255ad3
DM
1157 else if (is_special_named_call_p (call, "__analyzer_dump_path", 0))
1158 {
1159 /* Handle the builtin "__analyzer_dump_path" by queuing a
1160 diagnostic at this exploded_node. */
1161 ctxt->warn (new dump_path_diagnostic ());
1162 }
1163 else if (is_special_named_call_p (call, "__analyzer_dump_region_model",
1164 0))
1165 {
1166 /* Handle the builtin "__analyzer_dump_region_model" by dumping
1167 the region model's state to stderr. */
1168 dump (false);
1169 }
1170 else if (is_special_named_call_p (call, "__analyzer_eval", 1))
1171 impl_call_analyzer_eval (call, ctxt);
1172 else if (is_special_named_call_p (call, "__analyzer_break", 0))
1173 {
1174 /* Handle the builtin "__analyzer_break" by triggering a
1175 breakpoint. */
1176 /* TODO: is there a good cross-platform way to do this? */
1177 raise (SIGINT);
1178 }
1179 else if (is_special_named_call_p (call,
1180 "__analyzer_dump_exploded_nodes",
1181 1))
1182 {
1183 /* This is handled elsewhere. */
1184 }
1185 else
1186 *out_unknown_side_effects = on_call_pre (call, ctxt,
1187 out_terminate_path);
1188 }
1189 break;
1190
1191 case GIMPLE_RETURN:
1192 {
1193 const greturn *return_ = as_a <const greturn *> (stmt);
1194 on_return (return_, ctxt);
1195 }
1196 break;
1197 }
1198}
1199
9ff3e236
DM
1200/* Ensure that all arguments at the call described by CD are checked
1201 for poisoned values, by calling get_rvalue on each argument. */
1202
1203void
1204region_model::check_call_args (const call_details &cd) const
1205{
1206 for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++)
1207 cd.get_arg_svalue (arg_idx);
1208}
1209
aee1adf2
DM
1210/* Return true if CD is known to be a call to a function with
1211 __attribute__((const)). */
1212
1213static bool
1214const_fn_p (const call_details &cd)
1215{
1216 tree fndecl = cd.get_fndecl_for_call ();
1217 if (!fndecl)
1218 return false;
1219 gcc_assert (DECL_P (fndecl));
1220 return TREE_READONLY (fndecl);
1221}
1222
1223/* If this CD is known to be a call to a function with
1224 __attribute__((const)), attempt to get a const_fn_result_svalue
1225 based on the arguments, or return NULL otherwise. */
1226
1227static const svalue *
1228maybe_get_const_fn_result (const call_details &cd)
1229{
1230 if (!const_fn_p (cd))
1231 return NULL;
1232
1233 unsigned num_args = cd.num_args ();
1234 if (num_args > const_fn_result_svalue::MAX_INPUTS)
1235 /* Too many arguments. */
1236 return NULL;
1237
1238 auto_vec<const svalue *> inputs (num_args);
1239 for (unsigned arg_idx = 0; arg_idx < num_args; arg_idx++)
1240 {
1241 const svalue *arg_sval = cd.get_arg_svalue (arg_idx);
1242 if (!arg_sval->can_have_associated_state_p ())
1243 return NULL;
1244 inputs.quick_push (arg_sval);
1245 }
1246
1247 region_model_manager *mgr = cd.get_manager ();
1248 const svalue *sval
1249 = mgr->get_or_create_const_fn_result_svalue (cd.get_lhs_type (),
1250 cd.get_fndecl_for_call (),
1251 inputs);
1252 return sval;
1253}
1254
757bf1df
DM
1255/* Update this model for the CALL stmt, using CTXT to report any
1256 diagnostics - the first half.
1257
1258 Updates to the region_model that should be made *before* sm-states
1259 are updated are done here; other updates to the region_model are done
ef7827b0 1260 in region_model::on_call_post.
757bf1df 1261
ef7827b0
DM
1262 Return true if the function call has unknown side effects (it wasn't
1263 recognized and we don't have a body for it, or are unable to tell which
5ee4ba03
DM
1264 fndecl it is).
1265
1266 Write true to *OUT_TERMINATE_PATH if this execution path should be
1267 terminated (e.g. the function call terminates the process). */
ef7827b0
DM
1268
1269bool
5ee4ba03
DM
1270region_model::on_call_pre (const gcall *call, region_model_context *ctxt,
1271 bool *out_terminate_path)
757bf1df 1272{
48e8a7a6
DM
1273 call_details cd (call, this, ctxt);
1274
ef7827b0
DM
1275 bool unknown_side_effects = false;
1276
9b4eee5f
DM
1277 /* Special-case for IFN_DEFERRED_INIT.
1278 We want to report uninitialized variables with -fanalyzer (treating
1279 -ftrivial-auto-var-init= as purely a mitigation feature).
1280 Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the
1281 lhs of the call, so that it is still uninitialized from the point of
1282 view of the analyzer. */
1283 if (gimple_call_internal_p (call)
1284 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1285 return false;
1286
33255ad3
DM
1287 /* Some of the cases below update the lhs of the call based on the
1288 return value, but not all. Provide a default value, which may
1289 get overwritten below. */
1290 if (tree lhs = gimple_call_lhs (call))
1291 {
1292 const region *lhs_region = get_lvalue (lhs, ctxt);
aee1adf2
DM
1293 const svalue *sval = maybe_get_const_fn_result (cd);
1294 if (!sval)
1295 {
1296 /* For the common case of functions without __attribute__((const)),
1297 use a conjured value, and purge any prior state involving that
1298 value (in case this is in a loop). */
1299 sval = m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs), call,
1300 lhs_region);
1301 purge_state_involving (sval, ctxt);
1302 }
3a1d168e 1303 set_value (lhs_region, sval, ctxt);
33255ad3
DM
1304 }
1305
48e8a7a6 1306 if (gimple_call_internal_p (call))
757bf1df 1307 {
48e8a7a6
DM
1308 switch (gimple_call_internal_fn (call))
1309 {
1310 default:
1311 break;
1312 case IFN_BUILTIN_EXPECT:
b5081130
DM
1313 impl_call_builtin_expect (cd);
1314 return false;
37eb3ef4
DM
1315 case IFN_UBSAN_BOUNDS:
1316 return false;
48e8a7a6
DM
1317 }
1318 }
808f4dfe 1319
48e8a7a6
DM
1320 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1321 {
808f4dfe
DM
1322 /* The various impl_call_* member functions are implemented
1323 in region-model-impl-calls.cc.
1324 Having them split out into separate functions makes it easier
1325 to put breakpoints on the handling of specific functions. */
5fbcbcaf 1326 int callee_fndecl_flags = flags_from_decl_or_type (callee_fndecl);
ee7bfbe5 1327
47997a32 1328 if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
ee7bfbe5
DM
1329 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
1330 switch (DECL_UNCHECKED_FUNCTION_CODE (callee_fndecl))
1331 {
1332 default:
5fbcbcaf 1333 if (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
b7028f06 1334 unknown_side_effects = true;
ee7bfbe5
DM
1335 break;
1336 case BUILT_IN_ALLOCA:
1337 case BUILT_IN_ALLOCA_WITH_ALIGN:
b5081130
DM
1338 impl_call_alloca (cd);
1339 return false;
ee7bfbe5 1340 case BUILT_IN_CALLOC:
b5081130
DM
1341 impl_call_calloc (cd);
1342 return false;
ee7bfbe5
DM
1343 case BUILT_IN_EXPECT:
1344 case BUILT_IN_EXPECT_WITH_PROBABILITY:
b5081130
DM
1345 impl_call_builtin_expect (cd);
1346 return false;
ee7bfbe5
DM
1347 case BUILT_IN_FREE:
1348 /* Handle in "on_call_post". */
1349 break;
1350 case BUILT_IN_MALLOC:
b5081130
DM
1351 impl_call_malloc (cd);
1352 return false;
b7028f06
DM
1353 case BUILT_IN_MEMCPY:
1354 case BUILT_IN_MEMCPY_CHK:
1355 impl_call_memcpy (cd);
1356 return false;
ee7bfbe5 1357 case BUILT_IN_MEMSET:
bc62bfb0 1358 case BUILT_IN_MEMSET_CHK:
ee7bfbe5
DM
1359 impl_call_memset (cd);
1360 return false;
1361 break;
a6baafca 1362 case BUILT_IN_REALLOC:
a6baafca 1363 return false;
111fd515
DM
1364 case BUILT_IN_STRCHR:
1365 impl_call_strchr (cd);
1366 return false;
b7028f06
DM
1367 case BUILT_IN_STRCPY:
1368 case BUILT_IN_STRCPY_CHK:
1369 impl_call_strcpy (cd);
1370 return false;
ee7bfbe5 1371 case BUILT_IN_STRLEN:
b5081130
DM
1372 impl_call_strlen (cd);
1373 return false;
b7028f06 1374
37eb3ef4
DM
1375 case BUILT_IN_STACK_SAVE:
1376 case BUILT_IN_STACK_RESTORE:
1377 return false;
1378
b7028f06
DM
1379 /* Stdio builtins. */
1380 case BUILT_IN_FPRINTF:
1381 case BUILT_IN_FPRINTF_UNLOCKED:
1382 case BUILT_IN_PUTC:
1383 case BUILT_IN_PUTC_UNLOCKED:
1384 case BUILT_IN_FPUTC:
1385 case BUILT_IN_FPUTC_UNLOCKED:
1386 case BUILT_IN_FPUTS:
1387 case BUILT_IN_FPUTS_UNLOCKED:
1388 case BUILT_IN_FWRITE:
1389 case BUILT_IN_FWRITE_UNLOCKED:
1390 case BUILT_IN_PRINTF:
1391 case BUILT_IN_PRINTF_UNLOCKED:
1392 case BUILT_IN_PUTCHAR:
1393 case BUILT_IN_PUTCHAR_UNLOCKED:
1394 case BUILT_IN_PUTS:
1395 case BUILT_IN_PUTS_UNLOCKED:
1396 case BUILT_IN_VFPRINTF:
1397 case BUILT_IN_VPRINTF:
1398 /* These stdio builtins have external effects that are out
1399 of scope for the analyzer: we only want to model the effects
1400 on the return value. */
9ff3e236 1401 check_call_args (cd);
b7028f06 1402 break;
ee7bfbe5 1403 }
ee7bfbe5 1404 else if (is_named_call_p (callee_fndecl, "malloc", call, 1))
b5081130
DM
1405 {
1406 impl_call_malloc (cd);
1407 return false;
1408 }
808f4dfe 1409 else if (is_named_call_p (callee_fndecl, "calloc", call, 2))
b5081130
DM
1410 {
1411 impl_call_calloc (cd);
1412 return false;
1413 }
ee7bfbe5 1414 else if (is_named_call_p (callee_fndecl, "alloca", call, 1))
b5081130
DM
1415 {
1416 impl_call_alloca (cd);
1417 return false;
1418 }
a6baafca
DM
1419 else if (is_named_call_p (callee_fndecl, "realloc", call, 2))
1420 {
1421 impl_call_realloc (cd);
1422 return false;
1423 }
5ee4ba03
DM
1424 else if (is_named_call_p (callee_fndecl, "error"))
1425 {
1426 if (impl_call_error (cd, 3, out_terminate_path))
1427 return false;
1428 else
1429 unknown_side_effects = true;
1430 }
1431 else if (is_named_call_p (callee_fndecl, "error_at_line"))
1432 {
1433 if (impl_call_error (cd, 5, out_terminate_path))
1434 return false;
1435 else
1436 unknown_side_effects = true;
1437 }
33255ad3
DM
1438 else if (is_named_call_p (callee_fndecl, "fgets", call, 3)
1439 || is_named_call_p (callee_fndecl, "fgets_unlocked", call, 3))
1440 {
1441 impl_call_fgets (cd);
1442 return false;
1443 }
1444 else if (is_named_call_p (callee_fndecl, "fread", call, 4))
1445 {
1446 impl_call_fread (cd);
1447 return false;
1448 }
e097c9ab
DM
1449 else if (is_named_call_p (callee_fndecl, "getchar", call, 0))
1450 {
1451 /* No side-effects (tracking stream state is out-of-scope
1452 for the analyzer). */
1453 }
1e19ecd7
DM
1454 else if (is_named_call_p (callee_fndecl, "memset", call, 3)
1455 && POINTER_TYPE_P (cd.get_arg_type (0)))
e516294a 1456 {
808f4dfe 1457 impl_call_memset (cd);
e516294a
DM
1458 return false;
1459 }
111fd515
DM
1460 else if (is_named_call_p (callee_fndecl, "strchr", call, 2)
1461 && POINTER_TYPE_P (cd.get_arg_type (0)))
1462 {
1463 impl_call_strchr (cd);
1464 return false;
1465 }
1e19ecd7
DM
1466 else if (is_named_call_p (callee_fndecl, "strlen", call, 1)
1467 && POINTER_TYPE_P (cd.get_arg_type (0)))
757bf1df 1468 {
b5081130
DM
1469 impl_call_strlen (cd);
1470 return false;
757bf1df 1471 }
1690a839 1472 else if (is_named_call_p (callee_fndecl, "operator new", call, 1))
b5081130
DM
1473 {
1474 impl_call_operator_new (cd);
1475 return false;
1476 }
1690a839 1477 else if (is_named_call_p (callee_fndecl, "operator new []", call, 1))
b5081130
DM
1478 {
1479 impl_call_operator_new (cd);
1480 return false;
1481 }
1690a839
DM
1482 else if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
1483 || is_named_call_p (callee_fndecl, "operator delete", call, 2)
1484 || is_named_call_p (callee_fndecl, "operator delete []", call, 1))
1485 {
1486 /* Handle in "on_call_post". */
1487 }
ef7827b0 1488 else if (!fndecl_has_gimple_body_p (callee_fndecl)
5fbcbcaf 1489 && (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
808f4dfe 1490 && !fndecl_built_in_p (callee_fndecl))
ef7827b0 1491 unknown_side_effects = true;
757bf1df 1492 }
ef7827b0
DM
1493 else
1494 unknown_side_effects = true;
757bf1df 1495
ef7827b0 1496 return unknown_side_effects;
757bf1df
DM
1497}
1498
1499/* Update this model for the CALL stmt, using CTXT to report any
1500 diagnostics - the second half.
1501
1502 Updates to the region_model that should be made *after* sm-states
1503 are updated are done here; other updates to the region_model are done
ef7827b0
DM
1504 in region_model::on_call_pre.
1505
1506 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
1507 to purge state. */
757bf1df
DM
1508
1509void
ef7827b0
DM
1510region_model::on_call_post (const gcall *call,
1511 bool unknown_side_effects,
1512 region_model_context *ctxt)
757bf1df 1513{
757bf1df 1514 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1690a839 1515 {
eafa9d96 1516 call_details cd (call, this, ctxt);
1690a839
DM
1517 if (is_named_call_p (callee_fndecl, "free", call, 1))
1518 {
1690a839
DM
1519 impl_call_free (cd);
1520 return;
1521 }
1522 if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
1523 || is_named_call_p (callee_fndecl, "operator delete", call, 2)
1524 || is_named_call_p (callee_fndecl, "operator delete []", call, 1))
1525 {
1690a839
DM
1526 impl_call_operator_delete (cd);
1527 return;
1528 }
c7e276b8
DM
1529 /* Was this fndecl referenced by
1530 __attribute__((malloc(FOO)))? */
1531 if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
1532 {
c7e276b8
DM
1533 impl_deallocation_call (cd);
1534 return;
1535 }
eafa9d96
DM
1536 if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
1537 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
1538 switch (DECL_UNCHECKED_FUNCTION_CODE (callee_fndecl))
1539 {
1540 default:
1541 break;
1542 case BUILT_IN_REALLOC:
1543 impl_call_realloc (cd);
1544 return;
1545 }
1690a839 1546 }
ef7827b0
DM
1547
1548 if (unknown_side_effects)
1549 handle_unrecognized_call (call, ctxt);
1550}
1551
33255ad3
DM
1552/* Purge state involving SVAL from this region_model, using CTXT
1553 (if non-NULL) to purge other state in a program_state.
1554
1555 For example, if we're at the def-stmt of an SSA name, then we need to
1556 purge any state for svalues that involve that SSA name. This avoids
1557 false positives in loops, since a symbolic value referring to the
1558 SSA name will be referring to the previous value of that SSA name.
1559
1560 For example, in:
1561 while ((e = hashmap_iter_next(&iter))) {
1562 struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
1563 free (e_strbuf->value);
1564 }
1565 at the def-stmt of e_8:
1566 e_8 = hashmap_iter_next (&iter);
1567 we should purge the "freed" state of:
1568 INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
1569 which is the "e_strbuf->value" value from the previous iteration,
1570 or we will erroneously report a double-free - the "e_8" within it
1571 refers to the previous value. */
1572
1573void
1574region_model::purge_state_involving (const svalue *sval,
1575 region_model_context *ctxt)
1576{
a113b143
DM
1577 if (!sval->can_have_associated_state_p ())
1578 return;
33255ad3
DM
1579 m_store.purge_state_involving (sval, m_mgr);
1580 m_constraints->purge_state_involving (sval);
1581 m_dynamic_extents.purge_state_involving (sval);
1582 if (ctxt)
1583 ctxt->purge_state_involving (sval);
1584}
1585
b6eaf90c
DM
1586/* Check CALL a call to external function CALLEE_FNDECL based on
1587 any __attribute__ ((access, ....) on the latter, complaining to
1588 CTXT about any issues.
1589
1590 Currently we merely call check_region_for_write on any regions
1591 pointed to by arguments marked with a "write_only" or "read_write"
1592 attribute. */
1593
1594void
1595region_model::
1596check_external_function_for_access_attr (const gcall *call,
1597 tree callee_fndecl,
1598 region_model_context *ctxt) const
1599{
1600 gcc_assert (call);
1601 gcc_assert (callee_fndecl);
1602 gcc_assert (ctxt);
1603
1604 tree fntype = TREE_TYPE (callee_fndecl);
1605 if (!fntype)
1606 return;
1607
1608 if (!TYPE_ATTRIBUTES (fntype))
1609 return;
1610
1611 /* Initialize a map of attribute access specifications for arguments
1612 to the function call. */
1613 rdwr_map rdwr_idx;
1614 init_attr_rdwr_indices (&rdwr_idx, TYPE_ATTRIBUTES (fntype));
1615
1616 unsigned argno = 0;
1617
1618 for (tree iter = TYPE_ARG_TYPES (fntype); iter;
1619 iter = TREE_CHAIN (iter), ++argno)
1620 {
1621 const attr_access* access = rdwr_idx.get (argno);
1622 if (!access)
1623 continue;
1624
1625 /* Ignore any duplicate entry in the map for the size argument. */
1626 if (access->ptrarg != argno)
1627 continue;
1628
1629 if (access->mode == access_write_only
1630 || access->mode == access_read_write)
1631 {
1632 tree ptr_tree = gimple_call_arg (call, access->ptrarg);
1633 const svalue *ptr_sval = get_rvalue (ptr_tree, ctxt);
1634 const region *reg = deref_rvalue (ptr_sval, ptr_tree, ctxt);
1635 check_region_for_write (reg, ctxt);
1636 /* We don't use the size arg for now. */
1637 }
1638 }
1639}
1640
ef7827b0
DM
1641/* Handle a call CALL to a function with unknown behavior.
1642
1643 Traverse the regions in this model, determining what regions are
1644 reachable from pointer arguments to CALL and from global variables,
1645 recursively.
1646
1647 Set all reachable regions to new unknown values and purge sm-state
1648 from their values, and from values that point to them. */
1649
1650void
1651region_model::handle_unrecognized_call (const gcall *call,
1652 region_model_context *ctxt)
1653{
1654 tree fndecl = get_fndecl_for_call (call, ctxt);
1655
b6eaf90c
DM
1656 if (fndecl && ctxt)
1657 check_external_function_for_access_attr (call, fndecl, ctxt);
1658
c710051a 1659 reachable_regions reachable_regs (this);
ef7827b0
DM
1660
1661 /* Determine the reachable regions and their mutability. */
1662 {
808f4dfe
DM
1663 /* Add globals and regions that already escaped in previous
1664 unknown calls. */
1665 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1666 &reachable_regs);
ef7827b0
DM
1667
1668 /* Params that are pointers. */
1669 tree iter_param_types = NULL_TREE;
1670 if (fndecl)
1671 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
1672 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
1673 {
1674 /* Track expected param type, where available. */
1675 tree param_type = NULL_TREE;
1676 if (iter_param_types)
1677 {
1678 param_type = TREE_VALUE (iter_param_types);
1679 gcc_assert (param_type);
1680 iter_param_types = TREE_CHAIN (iter_param_types);
1681 }
1682
1683 tree parm = gimple_call_arg (call, arg_idx);
808f4dfe
DM
1684 const svalue *parm_sval = get_rvalue (parm, ctxt);
1685 reachable_regs.handle_parm (parm_sval, param_type);
ef7827b0
DM
1686 }
1687 }
1688
33255ad3 1689 uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULL;
3a66c289 1690
808f4dfe
DM
1691 /* Purge sm-state for the svalues that were reachable,
1692 both in non-mutable and mutable form. */
1693 for (svalue_set::iterator iter
1694 = reachable_regs.begin_reachable_svals ();
1695 iter != reachable_regs.end_reachable_svals (); ++iter)
ef7827b0 1696 {
808f4dfe 1697 const svalue *sval = (*iter);
33255ad3
DM
1698 if (ctxt)
1699 ctxt->on_unknown_change (sval, false);
808f4dfe
DM
1700 }
1701 for (svalue_set::iterator iter
1702 = reachable_regs.begin_mutable_svals ();
1703 iter != reachable_regs.end_mutable_svals (); ++iter)
1704 {
1705 const svalue *sval = (*iter);
33255ad3
DM
1706 if (ctxt)
1707 ctxt->on_unknown_change (sval, true);
3a66c289
DM
1708 if (uncertainty)
1709 uncertainty->on_mutable_sval_at_unknown_call (sval);
808f4dfe 1710 }
ef7827b0 1711
808f4dfe 1712 /* Mark any clusters that have escaped. */
af66094d 1713 reachable_regs.mark_escaped_clusters (ctxt);
ef7827b0 1714
808f4dfe
DM
1715 /* Update bindings for all clusters that have escaped, whether above,
1716 or previously. */
1717 m_store.on_unknown_fncall (call, m_mgr->get_store_manager ());
9a2c9579
DM
1718
1719 /* Purge dynamic extents from any regions that have escaped mutably:
1720 realloc could have been called on them. */
1721 for (hash_set<const region *>::iterator
1722 iter = reachable_regs.begin_mutable_base_regs ();
1723 iter != reachable_regs.end_mutable_base_regs ();
1724 ++iter)
1725 {
1726 const region *base_reg = (*iter);
1727 unset_dynamic_extents (base_reg);
1728 }
808f4dfe 1729}
ef7827b0 1730
808f4dfe
DM
1731/* Traverse the regions in this model, determining what regions are
1732 reachable from the store and populating *OUT.
ef7827b0 1733
808f4dfe
DM
1734 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
1735 for reachability (for handling return values from functions when
1736 analyzing return of the only function on the stack).
1737
3a66c289
DM
1738 If UNCERTAINTY is non-NULL, treat any svalues that were recorded
1739 within it as being maybe-bound as additional "roots" for reachability.
1740
808f4dfe
DM
1741 Find svalues that haven't leaked. */
1742
1743void
1744region_model::get_reachable_svalues (svalue_set *out,
3a66c289
DM
1745 const svalue *extra_sval,
1746 const uncertainty_t *uncertainty)
808f4dfe 1747{
c710051a 1748 reachable_regions reachable_regs (this);
808f4dfe
DM
1749
1750 /* Add globals and regions that already escaped in previous
1751 unknown calls. */
1752 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1753 &reachable_regs);
1754
1755 if (extra_sval)
1756 reachable_regs.handle_sval (extra_sval);
ef7827b0 1757
3a66c289
DM
1758 if (uncertainty)
1759 for (uncertainty_t::iterator iter
1760 = uncertainty->begin_maybe_bound_svals ();
1761 iter != uncertainty->end_maybe_bound_svals (); ++iter)
1762 reachable_regs.handle_sval (*iter);
1763
808f4dfe
DM
1764 /* Get regions for locals that have explicitly bound values. */
1765 for (store::cluster_map_t::iterator iter = m_store.begin ();
1766 iter != m_store.end (); ++iter)
1767 {
1768 const region *base_reg = (*iter).first;
1769 if (const region *parent = base_reg->get_parent_region ())
1770 if (parent->get_kind () == RK_FRAME)
1771 reachable_regs.add (base_reg, false);
1772 }
1773
1774 /* Populate *OUT based on the values that were reachable. */
1775 for (svalue_set::iterator iter
1776 = reachable_regs.begin_reachable_svals ();
1777 iter != reachable_regs.end_reachable_svals (); ++iter)
1778 out->add (*iter);
757bf1df
DM
1779}
1780
1781/* Update this model for the RETURN_STMT, using CTXT to report any
1782 diagnostics. */
1783
1784void
1785region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
1786{
1787 tree callee = get_current_function ()->decl;
1788 tree lhs = DECL_RESULT (callee);
1789 tree rhs = gimple_return_retval (return_stmt);
1790
1791 if (lhs && rhs)
13ad6d9f
DM
1792 {
1793 const svalue *sval = get_rvalue (rhs, ctxt);
1794 const region *ret_reg = get_lvalue (lhs, ctxt);
1795 set_value (ret_reg, sval, ctxt);
1796 }
757bf1df
DM
1797}
1798
342e14ff
DM
1799/* Update this model for a call and return of setjmp/sigsetjmp at CALL within
1800 ENODE, using CTXT to report any diagnostics.
757bf1df 1801
342e14ff
DM
1802 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
1803 0), as opposed to any second return due to longjmp/sigsetjmp. */
757bf1df
DM
1804
1805void
1806region_model::on_setjmp (const gcall *call, const exploded_node *enode,
1807 region_model_context *ctxt)
1808{
808f4dfe
DM
1809 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
1810 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
1811 ctxt);
757bf1df 1812
808f4dfe
DM
1813 /* Create a setjmp_svalue for this call and store it in BUF_REG's
1814 region. */
1815 if (buf_reg)
757bf1df 1816 {
fd9982bb 1817 setjmp_record r (enode, call);
808f4dfe
DM
1818 const svalue *sval
1819 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
1820 set_value (buf_reg, sval, ctxt);
757bf1df
DM
1821 }
1822
1823 /* Direct calls to setjmp return 0. */
1824 if (tree lhs = gimple_call_lhs (call))
1825 {
1aff29d4
DM
1826 const svalue *new_sval
1827 = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
808f4dfe
DM
1828 const region *lhs_reg = get_lvalue (lhs, ctxt);
1829 set_value (lhs_reg, new_sval, ctxt);
757bf1df
DM
1830 }
1831}
1832
1833/* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
1834 to a "setjmp" at SETJMP_CALL where the final stack depth should be
808f4dfe
DM
1835 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
1836 done, and should be done by the caller. */
757bf1df
DM
1837
1838void
1839region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
808f4dfe 1840 int setjmp_stack_depth, region_model_context *ctxt)
757bf1df
DM
1841{
1842 /* Evaluate the val, using the frame of the "longjmp". */
1843 tree fake_retval = gimple_call_arg (longjmp_call, 1);
808f4dfe 1844 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
757bf1df
DM
1845
1846 /* Pop any frames until we reach the stack depth of the function where
1847 setjmp was called. */
1848 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
1849 while (get_stack_depth () > setjmp_stack_depth)
808f4dfe 1850 pop_frame (NULL, NULL, ctxt);
757bf1df
DM
1851
1852 gcc_assert (get_stack_depth () == setjmp_stack_depth);
1853
1854 /* Assign to LHS of "setjmp" in new_state. */
1855 if (tree lhs = gimple_call_lhs (setjmp_call))
1856 {
1857 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
1aff29d4
DM
1858 const svalue *zero_sval
1859 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
808f4dfe 1860 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
757bf1df
DM
1861 /* If we have 0, use 1. */
1862 if (eq_zero.is_true ())
1863 {
808f4dfe 1864 const svalue *one_sval
1aff29d4 1865 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
808f4dfe 1866 fake_retval_sval = one_sval;
757bf1df
DM
1867 }
1868 else
1869 {
1870 /* Otherwise note that the value is nonzero. */
808f4dfe 1871 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
757bf1df
DM
1872 }
1873
808f4dfe
DM
1874 /* Decorate the return value from setjmp as being unmergeable,
1875 so that we don't attempt to merge states with it as zero
1876 with states in which it's nonzero, leading to a clean distinction
1877 in the exploded_graph betweeen the first return and the second
1878 return. */
1879 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
757bf1df 1880
808f4dfe
DM
1881 const region *lhs_reg = get_lvalue (lhs, ctxt);
1882 set_value (lhs_reg, fake_retval_sval, ctxt);
1883 }
757bf1df
DM
1884}
1885
1886/* Update this region_model for a phi stmt of the form
1887 LHS = PHI <...RHS...>.
e0a7a675
DM
1888 where RHS is for the appropriate edge.
1889 Get state from OLD_STATE so that all of the phi stmts for a basic block
1890 are effectively handled simultaneously. */
757bf1df
DM
1891
1892void
8525d1f5 1893region_model::handle_phi (const gphi *phi,
808f4dfe 1894 tree lhs, tree rhs,
e0a7a675 1895 const region_model &old_state,
757bf1df
DM
1896 region_model_context *ctxt)
1897{
1898 /* For now, don't bother tracking the .MEM SSA names. */
1899 if (tree var = SSA_NAME_VAR (lhs))
1900 if (TREE_CODE (var) == VAR_DECL)
1901 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
1902 return;
1903
e0a7a675
DM
1904 const svalue *src_sval = old_state.get_rvalue (rhs, ctxt);
1905 const region *dst_reg = old_state.get_lvalue (lhs, ctxt);
757bf1df 1906
e0a7a675 1907 set_value (dst_reg, src_sval, ctxt);
8525d1f5
DM
1908
1909 if (ctxt)
1910 ctxt->on_phi (phi, rhs);
757bf1df
DM
1911}
1912
1913/* Implementation of region_model::get_lvalue; the latter adds type-checking.
1914
1915 Get the id of the region for PV within this region_model,
1916 emitting any diagnostics to CTXT. */
1917
808f4dfe 1918const region *
53cb324c 1919region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
1920{
1921 tree expr = pv.m_tree;
1922
1923 gcc_assert (expr);
1924
1925 switch (TREE_CODE (expr))
1926 {
1927 default:
808f4dfe
DM
1928 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
1929 dump_location_t ());
757bf1df
DM
1930
1931 case ARRAY_REF:
1932 {
1933 tree array = TREE_OPERAND (expr, 0);
1934 tree index = TREE_OPERAND (expr, 1);
757bf1df 1935
808f4dfe
DM
1936 const region *array_reg = get_lvalue (array, ctxt);
1937 const svalue *index_sval = get_rvalue (index, ctxt);
1938 return m_mgr->get_element_region (array_reg,
1939 TREE_TYPE (TREE_TYPE (array)),
1940 index_sval);
757bf1df
DM
1941 }
1942 break;
1943
93e759fc
DM
1944 case BIT_FIELD_REF:
1945 {
1946 tree inner_expr = TREE_OPERAND (expr, 0);
1947 const region *inner_reg = get_lvalue (inner_expr, ctxt);
1948 tree num_bits = TREE_OPERAND (expr, 1);
1949 tree first_bit_offset = TREE_OPERAND (expr, 2);
1950 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
1951 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
1952 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
1953 TREE_INT_CST_LOW (num_bits));
1954 return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits);
1955 }
1956 break;
1957
757bf1df
DM
1958 case MEM_REF:
1959 {
1960 tree ptr = TREE_OPERAND (expr, 0);
1961 tree offset = TREE_OPERAND (expr, 1);
808f4dfe
DM
1962 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
1963 const svalue *offset_sval = get_rvalue (offset, ctxt);
1964 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
1965 return m_mgr->get_offset_region (star_ptr,
1966 TREE_TYPE (expr),
1967 offset_sval);
757bf1df
DM
1968 }
1969 break;
1970
808f4dfe
DM
1971 case FUNCTION_DECL:
1972 return m_mgr->get_region_for_fndecl (expr);
1973
1974 case LABEL_DECL:
1975 return m_mgr->get_region_for_label (expr);
1976
757bf1df
DM
1977 case VAR_DECL:
1978 /* Handle globals. */
1979 if (is_global_var (expr))
808f4dfe 1980 return m_mgr->get_region_for_global (expr);
757bf1df
DM
1981
1982 /* Fall through. */
1983
1984 case SSA_NAME:
1985 case PARM_DECL:
1986 case RESULT_DECL:
1987 {
1988 gcc_assert (TREE_CODE (expr) == SSA_NAME
1989 || TREE_CODE (expr) == PARM_DECL
1990 || TREE_CODE (expr) == VAR_DECL
1991 || TREE_CODE (expr) == RESULT_DECL);
1992
808f4dfe
DM
1993 int stack_index = pv.m_stack_depth;
1994 const frame_region *frame = get_frame_at_index (stack_index);
757bf1df 1995 gcc_assert (frame);
808f4dfe 1996 return frame->get_region_for_local (m_mgr, expr);
757bf1df
DM
1997 }
1998
1999 case COMPONENT_REF:
2000 {
2001 /* obj.field */
2002 tree obj = TREE_OPERAND (expr, 0);
2003 tree field = TREE_OPERAND (expr, 1);
808f4dfe
DM
2004 const region *obj_reg = get_lvalue (obj, ctxt);
2005 return m_mgr->get_field_region (obj_reg, field);
41a9e940
DM
2006 }
2007 break;
2008
757bf1df 2009 case STRING_CST:
808f4dfe 2010 return m_mgr->get_region_for_string (expr);
757bf1df
DM
2011 }
2012}
2013
2014/* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
2015
09bea584
DM
2016static void
2017assert_compat_types (tree src_type, tree dst_type)
2018{
2019 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
808f4dfe
DM
2020 {
2021#if CHECKING_P
2022 if (!(useless_type_conversion_p (src_type, dst_type)))
2023 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
2024#endif
2025 }
09bea584 2026}
757bf1df 2027
ea4e3218
DM
2028/* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
2029
e66b9f67 2030bool
ea4e3218
DM
2031compat_types_p (tree src_type, tree dst_type)
2032{
2033 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2034 if (!(useless_type_conversion_p (src_type, dst_type)))
2035 return false;
2036 return true;
2037}
2038
808f4dfe 2039/* Get the region for PV within this region_model,
757bf1df
DM
2040 emitting any diagnostics to CTXT. */
2041
808f4dfe 2042const region *
53cb324c 2043region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2044{
2045 if (pv.m_tree == NULL_TREE)
808f4dfe 2046 return NULL;
757bf1df 2047
808f4dfe
DM
2048 const region *result_reg = get_lvalue_1 (pv, ctxt);
2049 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
2050 return result_reg;
757bf1df
DM
2051}
2052
808f4dfe 2053/* Get the region for EXPR within this region_model (assuming the most
757bf1df
DM
2054 recent stack frame if it's a local). */
2055
808f4dfe 2056const region *
53cb324c 2057region_model::get_lvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
2058{
2059 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2060}
2061
2062/* Implementation of region_model::get_rvalue; the latter adds type-checking.
2063
2064 Get the value of PV within this region_model,
2065 emitting any diagnostics to CTXT. */
2066
808f4dfe 2067const svalue *
53cb324c 2068region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2069{
2070 gcc_assert (pv.m_tree);
2071
2072 switch (TREE_CODE (pv.m_tree))
2073 {
2074 default:
2242b975 2075 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
757bf1df
DM
2076
2077 case ADDR_EXPR:
2078 {
2079 /* "&EXPR". */
2080 tree expr = pv.m_tree;
2081 tree op0 = TREE_OPERAND (expr, 0);
808f4dfe
DM
2082 const region *expr_reg = get_lvalue (op0, ctxt);
2083 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
757bf1df
DM
2084 }
2085 break;
2086
808f4dfe 2087 case BIT_FIELD_REF:
d3b1ef7a
DM
2088 {
2089 tree expr = pv.m_tree;
2090 tree op0 = TREE_OPERAND (expr, 0);
2091 const region *reg = get_lvalue (op0, ctxt);
2092 tree num_bits = TREE_OPERAND (expr, 1);
2093 tree first_bit_offset = TREE_OPERAND (expr, 2);
2094 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2095 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2096 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2097 TREE_INT_CST_LOW (num_bits));
9faf8348 2098 return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
d3b1ef7a 2099 }
808f4dfe
DM
2100
2101 case SSA_NAME:
2102 case VAR_DECL:
2103 case PARM_DECL:
2104 case RESULT_DECL:
757bf1df
DM
2105 case ARRAY_REF:
2106 {
da7c2773 2107 const region *reg = get_lvalue (pv, ctxt);
9faf8348 2108 return get_store_value (reg, ctxt);
757bf1df
DM
2109 }
2110
808f4dfe
DM
2111 case REALPART_EXPR:
2112 case IMAGPART_EXPR:
2113 case VIEW_CONVERT_EXPR:
2114 {
2115 tree expr = pv.m_tree;
2116 tree arg = TREE_OPERAND (expr, 0);
2117 const svalue *arg_sval = get_rvalue (arg, ctxt);
2118 const svalue *sval_unaryop
2119 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
2120 arg_sval);
2121 return sval_unaryop;
2122 };
2123
757bf1df
DM
2124 case INTEGER_CST:
2125 case REAL_CST:
808f4dfe
DM
2126 case COMPLEX_CST:
2127 case VECTOR_CST:
757bf1df 2128 case STRING_CST:
808f4dfe
DM
2129 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
2130
2131 case POINTER_PLUS_EXPR:
2132 {
2133 tree expr = pv.m_tree;
2134 tree ptr = TREE_OPERAND (expr, 0);
2135 tree offset = TREE_OPERAND (expr, 1);
2136 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2137 const svalue *offset_sval = get_rvalue (offset, ctxt);
2138 const svalue *sval_binop
2139 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
2140 ptr_sval, offset_sval);
2141 return sval_binop;
2142 }
2143
2144 /* Binary ops. */
2145 case PLUS_EXPR:
2146 case MULT_EXPR:
2147 {
2148 tree expr = pv.m_tree;
2149 tree arg0 = TREE_OPERAND (expr, 0);
2150 tree arg1 = TREE_OPERAND (expr, 1);
2151 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
2152 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
2153 const svalue *sval_binop
2154 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
2155 arg0_sval, arg1_sval);
2156 return sval_binop;
2157 }
757bf1df
DM
2158
2159 case COMPONENT_REF:
2160 case MEM_REF:
757bf1df 2161 {
808f4dfe 2162 const region *ref_reg = get_lvalue (pv, ctxt);
9faf8348 2163 return get_store_value (ref_reg, ctxt);
757bf1df 2164 }
1b342485
AS
2165 case OBJ_TYPE_REF:
2166 {
2167 tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
2168 return get_rvalue (expr, ctxt);
2169 }
757bf1df
DM
2170 }
2171}
2172
2173/* Get the value of PV within this region_model,
2174 emitting any diagnostics to CTXT. */
2175
808f4dfe 2176const svalue *
53cb324c 2177region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2178{
2179 if (pv.m_tree == NULL_TREE)
808f4dfe 2180 return NULL;
757bf1df 2181
808f4dfe 2182 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
757bf1df 2183
808f4dfe
DM
2184 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
2185
33255ad3
DM
2186 result_sval = check_for_poison (result_sval, pv.m_tree, ctxt);
2187
808f4dfe 2188 return result_sval;
757bf1df
DM
2189}
2190
2191/* Get the value of EXPR within this region_model (assuming the most
2192 recent stack frame if it's a local). */
2193
808f4dfe 2194const svalue *
53cb324c 2195region_model::get_rvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
2196{
2197 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2198}
2199
623bc027
DM
2200/* Return true if this model is on a path with "main" as the entrypoint
2201 (as opposed to one in which we're merely analyzing a subset of the
2202 path through the code). */
2203
2204bool
2205region_model::called_from_main_p () const
2206{
2207 if (!m_current_frame)
2208 return false;
2209 /* Determine if the oldest stack frame in this model is for "main". */
2210 const frame_region *frame0 = get_frame_at_index (0);
2211 gcc_assert (frame0);
2212 return id_equal (DECL_NAME (frame0->get_function ()->decl), "main");
2213}
2214
2215/* Subroutine of region_model::get_store_value for when REG is (or is within)
2216 a global variable that hasn't been touched since the start of this path
2217 (or was implicitly touched due to a call to an unknown function). */
2218
2219const svalue *
2220region_model::get_initial_value_for_global (const region *reg) const
2221{
2222 /* Get the decl that REG is for (or is within). */
2223 const decl_region *base_reg
2224 = reg->get_base_region ()->dyn_cast_decl_region ();
2225 gcc_assert (base_reg);
2226 tree decl = base_reg->get_decl ();
2227
2228 /* Special-case: to avoid having to explicitly update all previously
2229 untracked globals when calling an unknown fn, they implicitly have
2230 an unknown value if an unknown call has occurred, unless this is
2231 static to-this-TU and hasn't escaped. Globals that have escaped
2232 are explicitly tracked, so we shouldn't hit this case for them. */
af66094d
DM
2233 if (m_store.called_unknown_fn_p ()
2234 && TREE_PUBLIC (decl)
2235 && !TREE_READONLY (decl))
623bc027
DM
2236 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2237
2238 /* If we are on a path from the entrypoint from "main" and we have a
2239 global decl defined in this TU that hasn't been touched yet, then
2240 the initial value of REG can be taken from the initialization value
2241 of the decl. */
16ad9ae8 2242 if (called_from_main_p () || TREE_READONLY (decl))
623bc027 2243 {
61a43de5
DM
2244 /* Attempt to get the initializer value for base_reg. */
2245 if (const svalue *base_reg_init
2246 = base_reg->get_svalue_for_initializer (m_mgr))
623bc027 2247 {
61a43de5
DM
2248 if (reg == base_reg)
2249 return base_reg_init;
2250 else
623bc027 2251 {
61a43de5
DM
2252 /* Get the value for REG within base_reg_init. */
2253 binding_cluster c (base_reg);
e61ffa20 2254 c.bind (m_mgr->get_store_manager (), base_reg, base_reg_init);
61a43de5
DM
2255 const svalue *sval
2256 = c.get_any_binding (m_mgr->get_store_manager (), reg);
2257 if (sval)
2258 {
2259 if (reg->get_type ())
2260 sval = m_mgr->get_or_create_cast (reg->get_type (),
2261 sval);
2262 return sval;
2263 }
623bc027
DM
2264 }
2265 }
2266 }
2267
2268 /* Otherwise, return INIT_VAL(REG). */
2269 return m_mgr->get_or_create_initial_value (reg);
2270}
2271
808f4dfe 2272/* Get a value for REG, looking it up in the store, or otherwise falling
9faf8348
DM
2273 back to "initial" or "unknown" values.
2274 Use CTXT to report any warnings associated with reading from REG. */
757bf1df 2275
808f4dfe 2276const svalue *
9faf8348
DM
2277region_model::get_store_value (const region *reg,
2278 region_model_context *ctxt) const
757bf1df 2279{
9faf8348
DM
2280 check_region_for_read (reg, ctxt);
2281
2867118d
DM
2282 /* Special-case: handle var_decls in the constant pool. */
2283 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
2284 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
2285 return sval;
2286
808f4dfe
DM
2287 const svalue *sval
2288 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
2289 if (sval)
757bf1df 2290 {
808f4dfe
DM
2291 if (reg->get_type ())
2292 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
2293 return sval;
757bf1df 2294 }
757bf1df 2295
808f4dfe
DM
2296 /* Special-case: read at a constant index within a STRING_CST. */
2297 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
2298 if (tree byte_offset_cst
2299 = offset_reg->get_byte_offset ()->maybe_get_constant ())
2300 if (const string_region *str_reg
2301 = reg->get_parent_region ()->dyn_cast_string_region ())
757bf1df 2302 {
808f4dfe
DM
2303 tree string_cst = str_reg->get_string_cst ();
2304 if (const svalue *char_sval
2305 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2306 byte_offset_cst))
2307 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
757bf1df 2308 }
757bf1df 2309
808f4dfe
DM
2310 /* Special-case: read the initial char of a STRING_CST. */
2311 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
2312 if (const string_region *str_reg
2313 = cast_reg->get_original_region ()->dyn_cast_string_region ())
2314 {
2315 tree string_cst = str_reg->get_string_cst ();
2316 tree byte_offset_cst = build_int_cst (integer_type_node, 0);
2317 if (const svalue *char_sval
2318 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2319 byte_offset_cst))
2320 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2321 }
757bf1df 2322
808f4dfe
DM
2323 /* Otherwise we implicitly have the initial value of the region
2324 (if the cluster had been touched, binding_cluster::get_any_binding,
2325 would have returned UNKNOWN, and we would already have returned
2326 that above). */
757bf1df 2327
623bc027
DM
2328 /* Handle globals. */
2329 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
2330 == RK_GLOBALS)
2331 return get_initial_value_for_global (reg);
757bf1df 2332
808f4dfe 2333 return m_mgr->get_or_create_initial_value (reg);
757bf1df
DM
2334}
2335
808f4dfe
DM
2336/* Return false if REG does not exist, true if it may do.
2337 This is for detecting regions within the stack that don't exist anymore
2338 after frames are popped. */
757bf1df 2339
808f4dfe
DM
2340bool
2341region_model::region_exists_p (const region *reg) const
757bf1df 2342{
808f4dfe
DM
2343 /* If within a stack frame, check that the stack frame is live. */
2344 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
757bf1df 2345 {
808f4dfe
DM
2346 /* Check that the current frame is the enclosing frame, or is called
2347 by it. */
2348 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
2349 iter_frame = iter_frame->get_calling_frame ())
2350 if (iter_frame == enclosing_frame)
2351 return true;
2352 return false;
757bf1df 2353 }
808f4dfe
DM
2354
2355 return true;
757bf1df
DM
2356}
2357
808f4dfe
DM
2358/* Get a region for referencing PTR_SVAL, creating a region if need be, and
2359 potentially generating warnings via CTXT.
35e3f082 2360 PTR_SVAL must be of pointer type.
808f4dfe 2361 PTR_TREE if non-NULL can be used when emitting diagnostics. */
757bf1df 2362
808f4dfe
DM
2363const region *
2364region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
53cb324c 2365 region_model_context *ctxt) const
757bf1df 2366{
808f4dfe 2367 gcc_assert (ptr_sval);
35e3f082 2368 gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
757bf1df 2369
49bfbf18
DM
2370 /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
2371 as a constraint. This suppresses false positives from
2372 -Wanalyzer-null-dereference for the case where we later have an
2373 if (PTR_SVAL) that would occur if we considered the false branch
2374 and transitioned the malloc state machine from start->null. */
2375 tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
2376 const svalue *null_ptr = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
2377 m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
2378
808f4dfe 2379 switch (ptr_sval->get_kind ())
757bf1df 2380 {
808f4dfe 2381 default:
23ebfda0 2382 break;
808f4dfe 2383
757bf1df
DM
2384 case SK_REGION:
2385 {
808f4dfe
DM
2386 const region_svalue *region_sval
2387 = as_a <const region_svalue *> (ptr_sval);
757bf1df
DM
2388 return region_sval->get_pointee ();
2389 }
2390
808f4dfe
DM
2391 case SK_BINOP:
2392 {
2393 const binop_svalue *binop_sval
2394 = as_a <const binop_svalue *> (ptr_sval);
2395 switch (binop_sval->get_op ())
2396 {
2397 case POINTER_PLUS_EXPR:
2398 {
2399 /* If we have a symbolic value expressing pointer arithmentic,
2400 try to convert it to a suitable region. */
2401 const region *parent_region
2402 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
2403 const svalue *offset = binop_sval->get_arg1 ();
2404 tree type= TREE_TYPE (ptr_sval->get_type ());
2405 return m_mgr->get_offset_region (parent_region, type, offset);
2406 }
2407 default:
23ebfda0 2408 break;
808f4dfe
DM
2409 }
2410 }
23ebfda0 2411 break;
757bf1df
DM
2412
2413 case SK_POISONED:
2414 {
2415 if (ctxt)
808f4dfe
DM
2416 {
2417 tree ptr = get_representative_tree (ptr_sval);
2418 /* If we can't get a representative tree for PTR_SVAL
2419 (e.g. if it hasn't been bound into the store), then
2420 fall back on PTR_TREE, if non-NULL. */
2421 if (!ptr)
2422 ptr = ptr_tree;
2423 if (ptr)
2424 {
2425 const poisoned_svalue *poisoned_sval
2426 = as_a <const poisoned_svalue *> (ptr_sval);
2427 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
00e7d024 2428 ctxt->warn (new poisoned_value_diagnostic (ptr, pkind, NULL));
808f4dfe
DM
2429 }
2430 }
757bf1df 2431 }
23ebfda0 2432 break;
757bf1df
DM
2433 }
2434
23ebfda0 2435 return m_mgr->get_symbolic_region (ptr_sval);
757bf1df
DM
2436}
2437
d3b1ef7a
DM
2438/* Attempt to get BITS within any value of REG, as TYPE.
2439 In particular, extract values from compound_svalues for the case
2440 where there's a concrete binding at BITS.
9faf8348
DM
2441 Return an unknown svalue if we can't handle the given case.
2442 Use CTXT to report any warnings associated with reading from REG. */
d3b1ef7a
DM
2443
2444const svalue *
2445region_model::get_rvalue_for_bits (tree type,
2446 const region *reg,
9faf8348
DM
2447 const bit_range &bits,
2448 region_model_context *ctxt) const
d3b1ef7a 2449{
9faf8348 2450 const svalue *sval = get_store_value (reg, ctxt);
e61ffa20 2451 return m_mgr->get_or_create_bits_within (type, bits, sval);
d3b1ef7a
DM
2452}
2453
3175d40f
DM
2454/* A subclass of pending_diagnostic for complaining about writes to
2455 constant regions of memory. */
2456
2457class write_to_const_diagnostic
2458: public pending_diagnostic_subclass<write_to_const_diagnostic>
2459{
2460public:
2461 write_to_const_diagnostic (const region *reg, tree decl)
2462 : m_reg (reg), m_decl (decl)
2463 {}
2464
2465 const char *get_kind () const FINAL OVERRIDE
2466 {
2467 return "write_to_const_diagnostic";
2468 }
2469
2470 bool operator== (const write_to_const_diagnostic &other) const
2471 {
2472 return (m_reg == other.m_reg
2473 && m_decl == other.m_decl);
2474 }
2475
2476 bool emit (rich_location *rich_loc) FINAL OVERRIDE
2477 {
111fd515
DM
2478 auto_diagnostic_group d;
2479 bool warned;
2480 switch (m_reg->get_kind ())
2481 {
2482 default:
2483 warned = warning_at (rich_loc, OPT_Wanalyzer_write_to_const,
2484 "write to %<const%> object %qE", m_decl);
2485 break;
2486 case RK_FUNCTION:
2487 warned = warning_at (rich_loc, OPT_Wanalyzer_write_to_const,
2488 "write to function %qE", m_decl);
2489 break;
2490 case RK_LABEL:
2491 warned = warning_at (rich_loc, OPT_Wanalyzer_write_to_const,
2492 "write to label %qE", m_decl);
2493 break;
2494 }
3175d40f
DM
2495 if (warned)
2496 inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
2497 return warned;
2498 }
2499
2500 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
2501 {
111fd515
DM
2502 switch (m_reg->get_kind ())
2503 {
2504 default:
2505 return ev.formatted_print ("write to %<const%> object %qE here", m_decl);
2506 case RK_FUNCTION:
2507 return ev.formatted_print ("write to function %qE here", m_decl);
2508 case RK_LABEL:
2509 return ev.formatted_print ("write to label %qE here", m_decl);
2510 }
3175d40f
DM
2511 }
2512
2513private:
2514 const region *m_reg;
2515 tree m_decl;
2516};
2517
2518/* A subclass of pending_diagnostic for complaining about writes to
2519 string literals. */
2520
2521class write_to_string_literal_diagnostic
2522: public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
2523{
2524public:
2525 write_to_string_literal_diagnostic (const region *reg)
2526 : m_reg (reg)
2527 {}
2528
2529 const char *get_kind () const FINAL OVERRIDE
2530 {
2531 return "write_to_string_literal_diagnostic";
2532 }
2533
2534 bool operator== (const write_to_string_literal_diagnostic &other) const
2535 {
2536 return m_reg == other.m_reg;
2537 }
2538
2539 bool emit (rich_location *rich_loc) FINAL OVERRIDE
2540 {
2541 return warning_at (rich_loc, OPT_Wanalyzer_write_to_string_literal,
2542 "write to string literal");
2543 /* Ideally we would show the location of the STRING_CST as well,
2544 but it is not available at this point. */
2545 }
2546
2547 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
2548 {
2549 return ev.formatted_print ("write to string literal here");
2550 }
2551
2552private:
2553 const region *m_reg;
2554};
2555
2556/* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
2557
2558void
2559region_model::check_for_writable_region (const region* dest_reg,
2560 region_model_context *ctxt) const
2561{
2562 /* Fail gracefully if CTXT is NULL. */
2563 if (!ctxt)
2564 return;
2565
2566 const region *base_reg = dest_reg->get_base_region ();
2567 switch (base_reg->get_kind ())
2568 {
2569 default:
2570 break;
111fd515
DM
2571 case RK_FUNCTION:
2572 {
2573 const function_region *func_reg = as_a <const function_region *> (base_reg);
2574 tree fndecl = func_reg->get_fndecl ();
2575 ctxt->warn (new write_to_const_diagnostic (func_reg, fndecl));
2576 }
2577 break;
2578 case RK_LABEL:
2579 {
2580 const label_region *label_reg = as_a <const label_region *> (base_reg);
2581 tree label = label_reg->get_label ();
2582 ctxt->warn (new write_to_const_diagnostic (label_reg, label));
2583 }
2584 break;
3175d40f
DM
2585 case RK_DECL:
2586 {
2587 const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
2588 tree decl = decl_reg->get_decl ();
2589 /* Warn about writes to const globals.
2590 Don't warn for writes to const locals, and params in particular,
2591 since we would warn in push_frame when setting them up (e.g the
2592 "this" param is "T* const"). */
2593 if (TREE_READONLY (decl)
2594 && is_global_var (decl))
2595 ctxt->warn (new write_to_const_diagnostic (dest_reg, decl));
2596 }
2597 break;
2598 case RK_STRING:
2599 ctxt->warn (new write_to_string_literal_diagnostic (dest_reg));
2600 break;
2601 }
2602}
2603
9a2c9579
DM
2604/* Get the capacity of REG in bytes. */
2605
2606const svalue *
2607region_model::get_capacity (const region *reg) const
2608{
2609 switch (reg->get_kind ())
2610 {
2611 default:
2612 break;
2613 case RK_DECL:
2614 {
2615 const decl_region *decl_reg = as_a <const decl_region *> (reg);
2616 tree decl = decl_reg->get_decl ();
2617 if (TREE_CODE (decl) == SSA_NAME)
2618 {
2619 tree type = TREE_TYPE (decl);
2620 tree size = TYPE_SIZE (type);
2621 return get_rvalue (size, NULL);
2622 }
2623 else
2624 {
2625 tree size = decl_init_size (decl, false);
2626 if (size)
2627 return get_rvalue (size, NULL);
2628 }
2629 }
2630 break;
e61ffa20
DM
2631 case RK_SIZED:
2632 /* Look through sized regions to get at the capacity
2633 of the underlying regions. */
2634 return get_capacity (reg->get_parent_region ());
9a2c9579
DM
2635 }
2636
2637 if (const svalue *recorded = get_dynamic_extents (reg))
2638 return recorded;
2639
2640 return m_mgr->get_or_create_unknown_svalue (sizetype);
2641}
2642
9faf8348
DM
2643/* If CTXT is non-NULL, use it to warn about any problems accessing REG,
2644 using DIR to determine if this access is a read or write. */
2645
2646void
2647region_model::check_region_access (const region *reg,
2648 enum access_direction dir,
2649 region_model_context *ctxt) const
2650{
2651 /* Fail gracefully if CTXT is NULL. */
2652 if (!ctxt)
2653 return;
2654
b9365b93
DM
2655 check_region_for_taint (reg, dir, ctxt);
2656
9faf8348
DM
2657 switch (dir)
2658 {
2659 default:
2660 gcc_unreachable ();
2661 case DIR_READ:
2662 /* Currently a no-op. */
2663 break;
2664 case DIR_WRITE:
2665 check_for_writable_region (reg, ctxt);
2666 break;
2667 }
2668}
2669
2670/* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
2671
2672void
2673region_model::check_region_for_write (const region *dest_reg,
2674 region_model_context *ctxt) const
2675{
2676 check_region_access (dest_reg, DIR_WRITE, ctxt);
2677}
2678
2679/* If CTXT is non-NULL, use it to warn about any problems reading from REG. */
2680
2681void
2682region_model::check_region_for_read (const region *src_reg,
2683 region_model_context *ctxt) const
2684{
2685 check_region_access (src_reg, DIR_READ, ctxt);
2686}
2687
808f4dfe 2688/* Set the value of the region given by LHS_REG to the value given
9faf8348
DM
2689 by RHS_SVAL.
2690 Use CTXT to report any warnings associated with writing to LHS_REG. */
757bf1df 2691
808f4dfe
DM
2692void
2693region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
3175d40f 2694 region_model_context *ctxt)
757bf1df 2695{
808f4dfe
DM
2696 gcc_assert (lhs_reg);
2697 gcc_assert (rhs_sval);
2698
9faf8348 2699 check_region_for_write (lhs_reg, ctxt);
3175d40f 2700
808f4dfe 2701 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 2702 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
2703}
2704
808f4dfe 2705/* Set the value of the region given by LHS to the value given by RHS. */
757bf1df
DM
2706
2707void
808f4dfe 2708region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
757bf1df 2709{
808f4dfe
DM
2710 const region *lhs_reg = get_lvalue (lhs, ctxt);
2711 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
2712 gcc_assert (lhs_reg);
2713 gcc_assert (rhs_sval);
2714 set_value (lhs_reg, rhs_sval, ctxt);
757bf1df
DM
2715}
2716
808f4dfe 2717/* Remove all bindings overlapping REG within the store. */
884d9141
DM
2718
2719void
808f4dfe
DM
2720region_model::clobber_region (const region *reg)
2721{
2722 m_store.clobber_region (m_mgr->get_store_manager(), reg);
2723}
2724
2725/* Remove any bindings for REG within the store. */
2726
2727void
2728region_model::purge_region (const region *reg)
2729{
2730 m_store.purge_region (m_mgr->get_store_manager(), reg);
2731}
2732
e61ffa20
DM
2733/* Fill REG with SVAL. */
2734
2735void
2736region_model::fill_region (const region *reg, const svalue *sval)
2737{
2738 m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
2739}
2740
808f4dfe
DM
2741/* Zero-fill REG. */
2742
2743void
2744region_model::zero_fill_region (const region *reg)
2745{
2746 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
2747}
2748
2749/* Mark REG as having unknown content. */
2750
2751void
3a66c289
DM
2752region_model::mark_region_as_unknown (const region *reg,
2753 uncertainty_t *uncertainty)
884d9141 2754{
3a66c289
DM
2755 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
2756 uncertainty);
884d9141
DM
2757}
2758
808f4dfe 2759/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
757bf1df
DM
2760 this model. */
2761
2762tristate
808f4dfe
DM
2763region_model::eval_condition (const svalue *lhs,
2764 enum tree_code op,
2765 const svalue *rhs) const
757bf1df 2766{
e978955d
DM
2767 /* For now, make no attempt to capture constraints on floating-point
2768 values. */
2769 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
2770 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
2771 return tristate::unknown ();
2772
808f4dfe 2773 tristate ts = eval_condition_without_cm (lhs, op, rhs);
757bf1df
DM
2774 if (ts.is_known ())
2775 return ts;
2776
2777 /* Otherwise, try constraints. */
808f4dfe 2778 return m_constraints->eval_condition (lhs, op, rhs);
757bf1df
DM
2779}
2780
808f4dfe 2781/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
757bf1df
DM
2782 this model, without resorting to the constraint_manager.
2783
2784 This is exposed so that impl_region_model_context::on_state_leak can
2785 check for equality part-way through region_model::purge_unused_svalues
2786 without risking creating new ECs. */
2787
2788tristate
808f4dfe
DM
2789region_model::eval_condition_without_cm (const svalue *lhs,
2790 enum tree_code op,
2791 const svalue *rhs) const
757bf1df 2792{
757bf1df
DM
2793 gcc_assert (lhs);
2794 gcc_assert (rhs);
2795
2796 /* See what we know based on the values. */
808f4dfe
DM
2797
2798 /* For now, make no attempt to capture constraints on floating-point
2799 values. */
2800 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
2801 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
2802 return tristate::unknown ();
2803
2804 /* Unwrap any unmergeable values. */
2805 lhs = lhs->unwrap_any_unmergeable ();
2806 rhs = rhs->unwrap_any_unmergeable ();
2807
2808 if (lhs == rhs)
757bf1df 2809 {
808f4dfe
DM
2810 /* If we have the same svalue, then we have equality
2811 (apart from NaN-handling).
2812 TODO: should this definitely be the case for poisoned values? */
2813 /* Poisoned and unknown values are "unknowable". */
2814 if (lhs->get_kind () == SK_POISONED
2815 || lhs->get_kind () == SK_UNKNOWN)
2816 return tristate::TS_UNKNOWN;
e978955d 2817
808f4dfe 2818 switch (op)
757bf1df 2819 {
808f4dfe
DM
2820 case EQ_EXPR:
2821 case GE_EXPR:
2822 case LE_EXPR:
2823 return tristate::TS_TRUE;
07c86323 2824
808f4dfe
DM
2825 case NE_EXPR:
2826 case GT_EXPR:
2827 case LT_EXPR:
2828 return tristate::TS_FALSE;
2829
2830 default:
2831 /* For other ops, use the logic below. */
2832 break;
757bf1df 2833 }
808f4dfe 2834 }
757bf1df 2835
808f4dfe
DM
2836 /* If we have a pair of region_svalues, compare them. */
2837 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
2838 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
2839 {
2840 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
2841 if (res.is_known ())
2842 return res;
2843 /* Otherwise, only known through constraints. */
2844 }
757bf1df 2845
808f4dfe
DM
2846 /* If we have a pair of constants, compare them. */
2847 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
2848 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
2849 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
757bf1df 2850
e82e0f14
DM
2851 /* Handle comparison against zero. */
2852 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
2853 if (zerop (cst_rhs->get_constant ()))
2854 {
2855 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
2856 {
2857 /* A region_svalue is a non-NULL pointer, except in certain
2858 special cases (see the comment for region::non_null_p). */
2859 const region *pointee = ptr->get_pointee ();
2860 if (pointee->non_null_p ())
2861 {
2862 switch (op)
2863 {
2864 default:
2865 gcc_unreachable ();
2866
2867 case EQ_EXPR:
2868 case GE_EXPR:
2869 case LE_EXPR:
2870 return tristate::TS_FALSE;
2871
2872 case NE_EXPR:
2873 case GT_EXPR:
2874 case LT_EXPR:
2875 return tristate::TS_TRUE;
2876 }
2877 }
2878 }
2879 else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
2880 {
2881 /* Treat offsets from a non-NULL pointer as being non-NULL. This
2882 isn't strictly true, in that eventually ptr++ will wrap
2883 around and be NULL, but it won't occur in practise and thus
2884 can be used to suppress effectively false positives that we
2885 shouldn't warn for. */
2886 if (binop->get_op () == POINTER_PLUS_EXPR)
2887 {
2888 tristate lhs_ts
2889 = eval_condition_without_cm (binop->get_arg0 (),
2890 op, rhs);
2891 if (lhs_ts.is_known ())
2892 return lhs_ts;
2893 }
2894 }
2895 }
808f4dfe
DM
2896
2897 /* Handle rejection of equality for comparisons of the initial values of
2898 "external" values (such as params) with the address of locals. */
2899 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
2900 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
2901 {
2902 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
2903 if (res.is_known ())
2904 return res;
2905 }
2906 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
2907 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
2908 {
2909 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
2910 if (res.is_known ())
2911 return res;
2912 }
2913
2914 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
2915 if (tree rhs_cst = rhs->maybe_get_constant ())
2916 {
2917 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
2918 if (res.is_known ())
2919 return res;
2920 }
2921
2922 return tristate::TS_UNKNOWN;
2923}
2924
2925/* Subroutine of region_model::eval_condition_without_cm, for rejecting
2926 equality of INIT_VAL(PARM) with &LOCAL. */
2927
2928tristate
2929region_model::compare_initial_and_pointer (const initial_svalue *init,
2930 const region_svalue *ptr) const
2931{
2932 const region *pointee = ptr->get_pointee ();
2933
2934 /* If we have a pointer to something within a stack frame, it can't be the
2935 initial value of a param. */
2936 if (pointee->maybe_get_frame_region ())
e0139b2a
DM
2937 if (init->initial_value_of_param_p ())
2938 return tristate::TS_FALSE;
757bf1df
DM
2939
2940 return tristate::TS_UNKNOWN;
2941}
2942
48e8a7a6
DM
2943/* Handle various constraints of the form:
2944 LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
2945 OP : == or !=
2946 RHS: zero
2947 and (with a cast):
2948 LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
2949 OP : == or !=
2950 RHS: zero
2951 by adding constraints for INNER_LHS INNEROP INNER_RHS.
2952
2953 Return true if this function can fully handle the constraint; if
2954 so, add the implied constraint(s) and write true to *OUT if they
2955 are consistent with existing constraints, or write false to *OUT
2956 if they contradicts existing constraints.
2957
2958 Return false for cases that this function doeesn't know how to handle.
2959
2960 For example, if we're checking a stored conditional, we'll have
2961 something like:
2962 LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
2963 OP : NE_EXPR
2964 RHS: zero
2965 which this function can turn into an add_constraint of:
2966 (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
2967
2968 Similarly, optimized && and || conditionals lead to e.g.
2969 if (p && q)
2970 becoming gimple like this:
2971 _1 = p_6 == 0B;
2972 _2 = q_8 == 0B
2973 _3 = _1 | _2
2974 On the "_3 is false" branch we can have constraints of the form:
2975 ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
2976 | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
2977 == 0
2978 which implies that both _1 and _2 are false,
2979 which this function can turn into a pair of add_constraints of
2980 (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
2981 and:
2982 (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
2983
2984bool
2985region_model::add_constraints_from_binop (const svalue *outer_lhs,
2986 enum tree_code outer_op,
2987 const svalue *outer_rhs,
2988 bool *out,
2989 region_model_context *ctxt)
2990{
2991 while (const svalue *cast = outer_lhs->maybe_undo_cast ())
2992 outer_lhs = cast;
2993 const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
2994 if (!binop_sval)
2995 return false;
2996 if (!outer_rhs->all_zeroes_p ())
2997 return false;
2998
2999 const svalue *inner_lhs = binop_sval->get_arg0 ();
3000 enum tree_code inner_op = binop_sval->get_op ();
3001 const svalue *inner_rhs = binop_sval->get_arg1 ();
3002
3003 if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
3004 return false;
3005
3006 /* We have either
3007 - "OUTER_LHS != false" (i.e. OUTER is true), or
3008 - "OUTER_LHS == false" (i.e. OUTER is false). */
3009 bool is_true = outer_op == NE_EXPR;
3010
3011 switch (inner_op)
3012 {
3013 default:
3014 return false;
3015
3016 case EQ_EXPR:
3017 case NE_EXPR:
3018 {
3019 /* ...and "(inner_lhs OP inner_rhs) == 0"
3020 then (inner_lhs OP inner_rhs) must have the same
3021 logical value as LHS. */
3022 if (!is_true)
3023 inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
3024 *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
3025 return true;
3026 }
3027 break;
3028
3029 case BIT_AND_EXPR:
3030 if (is_true)
3031 {
3032 /* ...and "(inner_lhs & inner_rhs) != 0"
3033 then both inner_lhs and inner_rhs must be true. */
3034 const svalue *false_sval
3035 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
3036 bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
3037 bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
3038 *out = sat1 && sat2;
3039 return true;
3040 }
3041 return false;
3042
3043 case BIT_IOR_EXPR:
3044 if (!is_true)
3045 {
3046 /* ...and "(inner_lhs | inner_rhs) == 0"
3047 i.e. "(inner_lhs | inner_rhs)" is false
3048 then both inner_lhs and inner_rhs must be false. */
3049 const svalue *false_sval
3050 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
3051 bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
3052 bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
3053 *out = sat1 && sat2;
3054 return true;
3055 }
3056 return false;
3057 }
3058}
3059
757bf1df
DM
3060/* Attempt to add the constraint "LHS OP RHS" to this region_model.
3061 If it is consistent with existing constraints, add it, and return true.
3062 Return false if it contradicts existing constraints.
3063 Use CTXT for reporting any diagnostics associated with the accesses. */
3064
3065bool
3066region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
3067 region_model_context *ctxt)
3068{
e978955d
DM
3069 /* For now, make no attempt to capture constraints on floating-point
3070 values. */
3071 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
3072 return true;
3073
808f4dfe
DM
3074 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
3075 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
757bf1df 3076
48e8a7a6
DM
3077 return add_constraint (lhs_sval, op, rhs_sval, ctxt);
3078}
3079
3080/* Attempt to add the constraint "LHS OP RHS" to this region_model.
3081 If it is consistent with existing constraints, add it, and return true.
3082 Return false if it contradicts existing constraints.
3083 Use CTXT for reporting any diagnostics associated with the accesses. */
3084
3085bool
3086region_model::add_constraint (const svalue *lhs,
3087 enum tree_code op,
3088 const svalue *rhs,
3089 region_model_context *ctxt)
3090{
3091 tristate t_cond = eval_condition (lhs, op, rhs);
757bf1df
DM
3092
3093 /* If we already have the condition, do nothing. */
3094 if (t_cond.is_true ())
3095 return true;
3096
3097 /* Reject a constraint that would contradict existing knowledge, as
3098 unsatisfiable. */
3099 if (t_cond.is_false ())
3100 return false;
3101
48e8a7a6
DM
3102 bool out;
3103 if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
3104 return out;
757bf1df 3105
c4b8f373
DM
3106 /* Attempt to store the constraint. */
3107 if (!m_constraints->add_constraint (lhs, op, rhs))
3108 return false;
757bf1df
DM
3109
3110 /* Notify the context, if any. This exists so that the state machines
3111 in a program_state can be notified about the condition, and so can
3112 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
3113 when synthesizing constraints as above. */
3114 if (ctxt)
3115 ctxt->on_condition (lhs, op, rhs);
3116
9a2c9579
DM
3117 /* If we have &REGION == NULL, then drop dynamic extents for REGION (for
3118 the case where REGION is heap-allocated and thus could be NULL). */
48e8a7a6
DM
3119 if (tree rhs_cst = rhs->maybe_get_constant ())
3120 if (op == EQ_EXPR && zerop (rhs_cst))
3121 if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
3122 unset_dynamic_extents (region_sval->get_pointee ());
9a2c9579 3123
757bf1df
DM
3124 return true;
3125}
3126
84fb3546
DM
3127/* As above, but when returning false, if OUT is non-NULL, write a
3128 new rejected_constraint to *OUT. */
3129
3130bool
3131region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
3132 region_model_context *ctxt,
3133 rejected_constraint **out)
3134{
3135 bool sat = add_constraint (lhs, op, rhs, ctxt);
3136 if (!sat && out)
8ca7fa84 3137 *out = new rejected_op_constraint (*this, lhs, op, rhs);
84fb3546
DM
3138 return sat;
3139}
3140
757bf1df
DM
3141/* Determine what is known about the condition "LHS OP RHS" within
3142 this model.
3143 Use CTXT for reporting any diagnostics associated with the accesses. */
3144
3145tristate
3146region_model::eval_condition (tree lhs,
3147 enum tree_code op,
3148 tree rhs,
3149 region_model_context *ctxt)
3150{
e978955d
DM
3151 /* For now, make no attempt to model constraints on floating-point
3152 values. */
3153 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
3154 return tristate::unknown ();
3155
757bf1df
DM
3156 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
3157}
3158
467a4820
DM
3159/* Implementation of region_model::get_representative_path_var.
3160 Attempt to return a path_var that represents SVAL, or return NULL_TREE.
808f4dfe
DM
3161 Use VISITED to prevent infinite mutual recursion with the overload for
3162 regions. */
757bf1df 3163
808f4dfe 3164path_var
467a4820
DM
3165region_model::get_representative_path_var_1 (const svalue *sval,
3166 svalue_set *visited) const
757bf1df 3167{
467a4820 3168 gcc_assert (sval);
757bf1df 3169
808f4dfe
DM
3170 /* Prevent infinite recursion. */
3171 if (visited->contains (sval))
3172 return path_var (NULL_TREE, 0);
3173 visited->add (sval);
757bf1df 3174
467a4820
DM
3175 /* Handle casts by recursion into get_representative_path_var. */
3176 if (const svalue *cast_sval = sval->maybe_undo_cast ())
3177 {
3178 path_var result = get_representative_path_var (cast_sval, visited);
3179 tree orig_type = sval->get_type ();
3180 /* If necessary, wrap the result in a cast. */
3181 if (result.m_tree && orig_type)
3182 result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
3183 return result;
3184 }
3185
808f4dfe
DM
3186 auto_vec<path_var> pvs;
3187 m_store.get_representative_path_vars (this, visited, sval, &pvs);
757bf1df 3188
808f4dfe
DM
3189 if (tree cst = sval->maybe_get_constant ())
3190 pvs.safe_push (path_var (cst, 0));
757bf1df 3191
90f7c300 3192 /* Handle string literals and various other pointers. */
808f4dfe
DM
3193 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
3194 {
3195 const region *reg = ptr_sval->get_pointee ();
3196 if (path_var pv = get_representative_path_var (reg, visited))
3197 return path_var (build1 (ADDR_EXPR,
467a4820 3198 sval->get_type (),
808f4dfe
DM
3199 pv.m_tree),
3200 pv.m_stack_depth);
3201 }
3202
3203 /* If we have a sub_svalue, look for ways to represent the parent. */
3204 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
90f7c300 3205 {
808f4dfe
DM
3206 const svalue *parent_sval = sub_sval->get_parent ();
3207 const region *subreg = sub_sval->get_subregion ();
3208 if (path_var parent_pv
3209 = get_representative_path_var (parent_sval, visited))
3210 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
3211 return path_var (build3 (COMPONENT_REF,
3212 sval->get_type (),
3213 parent_pv.m_tree,
3214 field_reg->get_field (),
3215 NULL_TREE),
3216 parent_pv.m_stack_depth);
90f7c300
DM
3217 }
3218
b9365b93
DM
3219 /* Handle binops. */
3220 if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
3221 if (path_var lhs_pv
3222 = get_representative_path_var (binop_sval->get_arg0 (), visited))
3223 if (path_var rhs_pv
3224 = get_representative_path_var (binop_sval->get_arg1 (), visited))
3225 return path_var (build2 (binop_sval->get_op (),
3226 sval->get_type (),
3227 lhs_pv.m_tree, rhs_pv.m_tree),
3228 lhs_pv.m_stack_depth);
3229
808f4dfe
DM
3230 if (pvs.length () < 1)
3231 return path_var (NULL_TREE, 0);
3232
3233 pvs.qsort (readability_comparator);
3234 return pvs[0];
757bf1df
DM
3235}
3236
467a4820
DM
3237/* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
3238 Use VISITED to prevent infinite mutual recursion with the overload for
3239 regions
3240
3241 This function defers to get_representative_path_var_1 to do the work;
3242 it adds verification that get_representative_path_var_1 returned a tree
3243 of the correct type. */
3244
3245path_var
3246region_model::get_representative_path_var (const svalue *sval,
3247 svalue_set *visited) const
3248{
3249 if (sval == NULL)
3250 return path_var (NULL_TREE, 0);
3251
3252 tree orig_type = sval->get_type ();
3253
3254 path_var result = get_representative_path_var_1 (sval, visited);
3255
3256 /* Verify that the result has the same type as SVAL, if any. */
3257 if (result.m_tree && orig_type)
3258 gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
3259
3260 return result;
3261}
3262
3263/* Attempt to return a tree that represents SVAL, or return NULL_TREE.
3264
3265 Strip off any top-level cast, to avoid messages like
3266 double-free of '(void *)ptr'
3267 from analyzer diagnostics. */
757bf1df 3268
808f4dfe
DM
3269tree
3270region_model::get_representative_tree (const svalue *sval) const
757bf1df 3271{
808f4dfe 3272 svalue_set visited;
467a4820
DM
3273 tree expr = get_representative_path_var (sval, &visited).m_tree;
3274
3275 /* Strip off any top-level cast. */
3276 if (expr && TREE_CODE (expr) == NOP_EXPR)
e4bb1bd6 3277 expr = TREE_OPERAND (expr, 0);
467a4820 3278
e4bb1bd6 3279 return fixup_tree_for_diagnostic (expr);
808f4dfe
DM
3280}
3281
467a4820
DM
3282/* Implementation of region_model::get_representative_path_var.
3283
3284 Attempt to return a path_var that represents REG, or return
808f4dfe
DM
3285 the NULL path_var.
3286 For example, a region for a field of a local would be a path_var
3287 wrapping a COMPONENT_REF.
3288 Use VISITED to prevent infinite mutual recursion with the overload for
3289 svalues. */
757bf1df 3290
808f4dfe 3291path_var
467a4820
DM
3292region_model::get_representative_path_var_1 (const region *reg,
3293 svalue_set *visited) const
808f4dfe
DM
3294{
3295 switch (reg->get_kind ())
757bf1df 3296 {
808f4dfe
DM
3297 default:
3298 gcc_unreachable ();
e516294a 3299
808f4dfe
DM
3300 case RK_FRAME:
3301 case RK_GLOBALS:
3302 case RK_CODE:
3303 case RK_HEAP:
3304 case RK_STACK:
3305 case RK_ROOT:
3306 /* Regions that represent memory spaces are not expressible as trees. */
3307 return path_var (NULL_TREE, 0);
757bf1df 3308
808f4dfe 3309 case RK_FUNCTION:
884d9141 3310 {
808f4dfe
DM
3311 const function_region *function_reg
3312 = as_a <const function_region *> (reg);
3313 return path_var (function_reg->get_fndecl (), 0);
884d9141 3314 }
808f4dfe 3315 case RK_LABEL:
9e78634c
DM
3316 {
3317 const label_region *label_reg = as_a <const label_region *> (reg);
3318 return path_var (label_reg->get_label (), 0);
3319 }
90f7c300 3320
808f4dfe
DM
3321 case RK_SYMBOLIC:
3322 {
3323 const symbolic_region *symbolic_reg
3324 = as_a <const symbolic_region *> (reg);
3325 const svalue *pointer = symbolic_reg->get_pointer ();
3326 path_var pointer_pv = get_representative_path_var (pointer, visited);
3327 if (!pointer_pv)
3328 return path_var (NULL_TREE, 0);
3329 tree offset = build_int_cst (pointer->get_type (), 0);
3330 return path_var (build2 (MEM_REF,
3331 reg->get_type (),
3332 pointer_pv.m_tree,
3333 offset),
3334 pointer_pv.m_stack_depth);
3335 }
3336 case RK_DECL:
3337 {
3338 const decl_region *decl_reg = as_a <const decl_region *> (reg);
3339 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
3340 }
3341 case RK_FIELD:
3342 {
3343 const field_region *field_reg = as_a <const field_region *> (reg);
3344 path_var parent_pv
3345 = get_representative_path_var (reg->get_parent_region (), visited);
3346 if (!parent_pv)
3347 return path_var (NULL_TREE, 0);
3348 return path_var (build3 (COMPONENT_REF,
3349 reg->get_type (),
3350 parent_pv.m_tree,
3351 field_reg->get_field (),
3352 NULL_TREE),
3353 parent_pv.m_stack_depth);
3354 }
757bf1df 3355
808f4dfe
DM
3356 case RK_ELEMENT:
3357 {
3358 const element_region *element_reg
3359 = as_a <const element_region *> (reg);
3360 path_var parent_pv
3361 = get_representative_path_var (reg->get_parent_region (), visited);
3362 if (!parent_pv)
3363 return path_var (NULL_TREE, 0);
3364 path_var index_pv
3365 = get_representative_path_var (element_reg->get_index (), visited);
3366 if (!index_pv)
3367 return path_var (NULL_TREE, 0);
3368 return path_var (build4 (ARRAY_REF,
3369 reg->get_type (),
3370 parent_pv.m_tree, index_pv.m_tree,
3371 NULL_TREE, NULL_TREE),
3372 parent_pv.m_stack_depth);
3373 }
757bf1df 3374
808f4dfe 3375 case RK_OFFSET:
757bf1df 3376 {
808f4dfe
DM
3377 const offset_region *offset_reg
3378 = as_a <const offset_region *> (reg);
3379 path_var parent_pv
3380 = get_representative_path_var (reg->get_parent_region (), visited);
3381 if (!parent_pv)
3382 return path_var (NULL_TREE, 0);
3383 path_var offset_pv
3384 = get_representative_path_var (offset_reg->get_byte_offset (),
3385 visited);
29f5db8e 3386 if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
808f4dfe 3387 return path_var (NULL_TREE, 0);
29f5db8e
DM
3388 tree addr_parent = build1 (ADDR_EXPR,
3389 build_pointer_type (reg->get_type ()),
3390 parent_pv.m_tree);
808f4dfe
DM
3391 return path_var (build2 (MEM_REF,
3392 reg->get_type (),
29f5db8e 3393 addr_parent, offset_pv.m_tree),
808f4dfe 3394 parent_pv.m_stack_depth);
757bf1df 3395 }
757bf1df 3396
e61ffa20
DM
3397 case RK_SIZED:
3398 return path_var (NULL_TREE, 0);
3399
808f4dfe
DM
3400 case RK_CAST:
3401 {
3402 path_var parent_pv
3403 = get_representative_path_var (reg->get_parent_region (), visited);
3404 if (!parent_pv)
3405 return path_var (NULL_TREE, 0);
3406 return path_var (build1 (NOP_EXPR,
3407 reg->get_type (),
3408 parent_pv.m_tree),
3409 parent_pv.m_stack_depth);
3410 }
757bf1df 3411
808f4dfe
DM
3412 case RK_HEAP_ALLOCATED:
3413 case RK_ALLOCA:
3414 /* No good way to express heap-allocated/alloca regions as trees. */
3415 return path_var (NULL_TREE, 0);
757bf1df 3416
808f4dfe
DM
3417 case RK_STRING:
3418 {
3419 const string_region *string_reg = as_a <const string_region *> (reg);
3420 return path_var (string_reg->get_string_cst (), 0);
3421 }
757bf1df 3422
808f4dfe
DM
3423 case RK_UNKNOWN:
3424 return path_var (NULL_TREE, 0);
3425 }
757bf1df
DM
3426}
3427
467a4820
DM
3428/* Attempt to return a path_var that represents REG, or return
3429 the NULL path_var.
3430 For example, a region for a field of a local would be a path_var
3431 wrapping a COMPONENT_REF.
3432 Use VISITED to prevent infinite mutual recursion with the overload for
3433 svalues.
3434
3435 This function defers to get_representative_path_var_1 to do the work;
3436 it adds verification that get_representative_path_var_1 returned a tree
3437 of the correct type. */
3438
3439path_var
3440region_model::get_representative_path_var (const region *reg,
3441 svalue_set *visited) const
3442{
3443 path_var result = get_representative_path_var_1 (reg, visited);
3444
3445 /* Verify that the result has the same type as REG, if any. */
3446 if (result.m_tree && reg->get_type ())
3447 gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
3448
3449 return result;
3450}
3451
757bf1df
DM
3452/* Update this model for any phis in SNODE, assuming we came from
3453 LAST_CFG_SUPEREDGE. */
3454
3455void
3456region_model::update_for_phis (const supernode *snode,
3457 const cfg_superedge *last_cfg_superedge,
3458 region_model_context *ctxt)
3459{
3460 gcc_assert (last_cfg_superedge);
3461
e0a7a675
DM
3462 /* Copy this state and pass it to handle_phi so that all of the phi stmts
3463 are effectively handled simultaneously. */
3464 const region_model old_state (*this);
3465
757bf1df
DM
3466 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
3467 !gsi_end_p (gpi); gsi_next (&gpi))
3468 {
3469 gphi *phi = gpi.phi ();
3470
3471 tree src = last_cfg_superedge->get_phi_arg (phi);
3472 tree lhs = gimple_phi_result (phi);
3473
e0a7a675
DM
3474 /* Update next_state based on phi and old_state. */
3475 handle_phi (phi, lhs, src, old_state, ctxt);
757bf1df
DM
3476 }
3477}
3478
3479/* Attempt to update this model for taking EDGE (where the last statement
3480 was LAST_STMT), returning true if the edge can be taken, false
3481 otherwise.
84fb3546
DM
3482 When returning false, if OUT is non-NULL, write a new rejected_constraint
3483 to it.
757bf1df
DM
3484
3485 For CFG superedges where LAST_STMT is a conditional or a switch
3486 statement, attempt to add the relevant conditions for EDGE to this
3487 model, returning true if they are feasible, or false if they are
3488 impossible.
3489
3490 For call superedges, push frame information and store arguments
3491 into parameters.
3492
3493 For return superedges, pop frame information and store return
3494 values into any lhs.
3495
3496 Rejection of call/return superedges happens elsewhere, in
3497 program_point::on_edge (i.e. based on program point, rather
3498 than program state). */
3499
3500bool
3501region_model::maybe_update_for_edge (const superedge &edge,
3502 const gimple *last_stmt,
84fb3546
DM
3503 region_model_context *ctxt,
3504 rejected_constraint **out)
757bf1df
DM
3505{
3506 /* Handle frame updates for interprocedural edges. */
3507 switch (edge.m_kind)
3508 {
3509 default:
3510 break;
3511
3512 case SUPEREDGE_CALL:
3513 {
3514 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
3515 update_for_call_superedge (*call_edge, ctxt);
3516 }
3517 break;
3518
3519 case SUPEREDGE_RETURN:
3520 {
3521 const return_superedge *return_edge
3522 = as_a <const return_superedge *> (&edge);
3523 update_for_return_superedge (*return_edge, ctxt);
3524 }
3525 break;
3526
3527 case SUPEREDGE_INTRAPROCEDURAL_CALL:
3528 {
3529 const callgraph_superedge *cg_sedge
3530 = as_a <const callgraph_superedge *> (&edge);
3531 update_for_call_summary (*cg_sedge, ctxt);
3532 }
3533 break;
3534 }
3535
3536 if (last_stmt == NULL)
3537 return true;
3538
3539 /* Apply any constraints for conditionals/switch statements. */
3540
3541 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
3542 {
3543 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
84fb3546 3544 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out);
757bf1df
DM
3545 }
3546
3547 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
3548 {
3549 const switch_cfg_superedge *switch_sedge
3550 = as_a <const switch_cfg_superedge *> (&edge);
84fb3546
DM
3551 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt,
3552 ctxt, out);
757bf1df
DM
3553 }
3554
1690a839
DM
3555 /* Apply any constraints due to an exception being thrown. */
3556 if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge))
3557 if (cfg_sedge->get_flags () & EDGE_EH)
84fb3546 3558 return apply_constraints_for_exception (last_stmt, ctxt, out);
1690a839 3559
757bf1df
DM
3560 return true;
3561}
3562
3563/* Push a new frame_region on to the stack region.
3564 Populate the frame_region with child regions for the function call's
3565 parameters, using values from the arguments at the callsite in the
3566 caller's frame. */
3567
3568void
aef703cf 3569region_model::update_for_gcall (const gcall *call_stmt,
e92d0ff6
AS
3570 region_model_context *ctxt,
3571 function *callee)
757bf1df 3572{
808f4dfe 3573 /* Build a vec of argument svalues, using the current top
757bf1df 3574 frame for resolving tree expressions. */
808f4dfe 3575 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
757bf1df
DM
3576
3577 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
3578 {
3579 tree arg = gimple_call_arg (call_stmt, i);
808f4dfe 3580 arg_svals.quick_push (get_rvalue (arg, ctxt));
757bf1df
DM
3581 }
3582
e92d0ff6
AS
3583 if(!callee)
3584 {
3585 /* Get the function * from the gcall. */
3586 tree fn_decl = get_fndecl_for_call (call_stmt,ctxt);
3587 callee = DECL_STRUCT_FUNCTION (fn_decl);
3588 }
3589
3590 push_frame (callee, &arg_svals, ctxt);
757bf1df
DM
3591}
3592
a96f1c38
DM
3593/* Pop the top-most frame_region from the stack, and copy the return
3594 region's values (if any) into the region for the lvalue of the LHS of
757bf1df 3595 the call (if any). */
aef703cf 3596
757bf1df 3597void
aef703cf
AS
3598region_model::update_for_return_gcall (const gcall *call_stmt,
3599 region_model_context *ctxt)
757bf1df 3600{
a96f1c38 3601 /* Get the region for the result of the call, within the caller frame. */
808f4dfe 3602 const region *result_dst_reg = NULL;
757bf1df
DM
3603 tree lhs = gimple_call_lhs (call_stmt);
3604 if (lhs)
a96f1c38
DM
3605 {
3606 /* Normally we access the top-level frame, which is:
aef703cf
AS
3607 path_var (expr, get_stack_depth () - 1)
3608 whereas here we need the caller frame, hence "- 2" here. */
808f4dfe
DM
3609 gcc_assert (get_stack_depth () >= 2);
3610 result_dst_reg = get_lvalue (path_var (lhs, get_stack_depth () - 2),
aef703cf 3611 ctxt);
a96f1c38
DM
3612 }
3613
808f4dfe 3614 pop_frame (result_dst_reg, NULL, ctxt);
757bf1df
DM
3615}
3616
aef703cf
AS
3617/* Extract calling information from the superedge and update the model for the
3618 call */
3619
3620void
3621region_model::update_for_call_superedge (const call_superedge &call_edge,
3622 region_model_context *ctxt)
3623{
3624 const gcall *call_stmt = call_edge.get_call_stmt ();
e92d0ff6 3625 update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ());
aef703cf
AS
3626}
3627
3628/* Extract calling information from the return superedge and update the model
3629 for the returning call */
3630
3631void
3632region_model::update_for_return_superedge (const return_superedge &return_edge,
3633 region_model_context *ctxt)
3634{
3635 const gcall *call_stmt = return_edge.get_call_stmt ();
3636 update_for_return_gcall (call_stmt, ctxt);
3637}
3638
757bf1df
DM
3639/* Update this region_model with a summary of the effect of calling
3640 and returning from CG_SEDGE.
3641
3642 TODO: Currently this is extremely simplistic: we merely set the
3643 return value to "unknown". A proper implementation would e.g. update
3644 sm-state, and presumably be reworked to support multiple outcomes. */
3645
3646void
3647region_model::update_for_call_summary (const callgraph_superedge &cg_sedge,
3648 region_model_context *ctxt)
3649{
3650 /* For now, set any return value to "unknown". */
3651 const gcall *call_stmt = cg_sedge.get_call_stmt ();
3652 tree lhs = gimple_call_lhs (call_stmt);
3653 if (lhs)
3a66c289
DM
3654 mark_region_as_unknown (get_lvalue (lhs, ctxt),
3655 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
3656
3657 // TODO: actually implement some kind of summary here
3658}
3659
3660/* Given a true or false edge guarded by conditional statement COND_STMT,
3661 determine appropriate constraints for the edge to be taken.
3662
3663 If they are feasible, add the constraints and return true.
3664
3665 Return false if the constraints contradict existing knowledge
84fb3546
DM
3666 (and so the edge should not be taken).
3667 When returning false, if OUT is non-NULL, write a new rejected_constraint
3668 to it. */
757bf1df
DM
3669
3670bool
3671region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
3672 const gcond *cond_stmt,
84fb3546
DM
3673 region_model_context *ctxt,
3674 rejected_constraint **out)
757bf1df
DM
3675{
3676 ::edge cfg_edge = sedge.get_cfg_edge ();
3677 gcc_assert (cfg_edge != NULL);
3678 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
3679
3680 enum tree_code op = gimple_cond_code (cond_stmt);
3681 tree lhs = gimple_cond_lhs (cond_stmt);
3682 tree rhs = gimple_cond_rhs (cond_stmt);
3683 if (cfg_edge->flags & EDGE_FALSE_VALUE)
3684 op = invert_tree_comparison (op, false /* honor_nans */);
84fb3546 3685 return add_constraint (lhs, op, rhs, ctxt, out);
757bf1df
DM
3686}
3687
3688/* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
3689 for the edge to be taken.
3690
3691 If they are feasible, add the constraints and return true.
3692
3693 Return false if the constraints contradict existing knowledge
84fb3546
DM
3694 (and so the edge should not be taken).
3695 When returning false, if OUT is non-NULL, write a new rejected_constraint
3696 to it. */
757bf1df
DM
3697
3698bool
3699region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
3700 const gswitch *switch_stmt,
84fb3546
DM
3701 region_model_context *ctxt,
3702 rejected_constraint **out)
757bf1df 3703{
8ca7fa84
DM
3704 bounded_ranges_manager *ranges_mgr = get_range_manager ();
3705 const bounded_ranges *all_cases_ranges
3706 = ranges_mgr->get_or_create_ranges_for_switch (&edge, switch_stmt);
757bf1df 3707 tree index = gimple_switch_index (switch_stmt);
8ca7fa84
DM
3708 const svalue *index_sval = get_rvalue (index, ctxt);
3709 bool sat = m_constraints->add_bounded_ranges (index_sval, all_cases_ranges);
3710 if (!sat && out)
3711 *out = new rejected_ranges_constraint (*this, index, all_cases_ranges);
3712 return sat;
757bf1df
DM
3713}
3714
1690a839
DM
3715/* Apply any constraints due to an exception being thrown at LAST_STMT.
3716
3717 If they are feasible, add the constraints and return true.
3718
3719 Return false if the constraints contradict existing knowledge
84fb3546
DM
3720 (and so the edge should not be taken).
3721 When returning false, if OUT is non-NULL, write a new rejected_constraint
3722 to it. */
1690a839
DM
3723
3724bool
3725region_model::apply_constraints_for_exception (const gimple *last_stmt,
84fb3546
DM
3726 region_model_context *ctxt,
3727 rejected_constraint **out)
1690a839
DM
3728{
3729 gcc_assert (last_stmt);
3730 if (const gcall *call = dyn_cast <const gcall *> (last_stmt))
3731 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
3732 if (is_named_call_p (callee_fndecl, "operator new", call, 1)
3733 || is_named_call_p (callee_fndecl, "operator new []", call, 1))
3734 {
3735 /* We have an exception thrown from operator new.
3736 Add a constraint that the result was NULL, to avoid a false
3737 leak report due to the result being lost when following
3738 the EH edge. */
3739 if (tree lhs = gimple_call_lhs (call))
84fb3546 3740 return add_constraint (lhs, EQ_EXPR, null_pointer_node, ctxt, out);
1690a839
DM
3741 return true;
3742 }
3743 return true;
3744}
3745
808f4dfe
DM
3746/* For use with push_frame when handling a top-level call within the analysis.
3747 PARAM has a defined but unknown initial value.
3748 Anything it points to has escaped, since the calling context "knows"
3749 the pointer, and thus calls to unknown functions could read/write into
3750 the region. */
757bf1df
DM
3751
3752void
808f4dfe 3753region_model::on_top_level_param (tree param,
3a25f345 3754 region_model_context *ctxt)
757bf1df 3755{
808f4dfe 3756 if (POINTER_TYPE_P (TREE_TYPE (param)))
5eae0ac7 3757 {
808f4dfe
DM
3758 const region *param_reg = get_lvalue (param, ctxt);
3759 const svalue *init_ptr_sval
3760 = m_mgr->get_or_create_initial_value (param_reg);
3761 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
3762 m_store.mark_as_escaped (pointee_reg);
5eae0ac7 3763 }
757bf1df
DM
3764}
3765
808f4dfe
DM
3766/* Update this region_model to reflect pushing a frame onto the stack
3767 for a call to FUN.
757bf1df 3768
808f4dfe
DM
3769 If ARG_SVALS is non-NULL, use it to populate the parameters
3770 in the new frame.
3771 Otherwise, the params have their initial_svalues.
757bf1df 3772
808f4dfe 3773 Return the frame_region for the new frame. */
757bf1df 3774
808f4dfe
DM
3775const region *
3776region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
3777 region_model_context *ctxt)
757bf1df 3778{
808f4dfe
DM
3779 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
3780 if (arg_svals)
757bf1df 3781 {
808f4dfe
DM
3782 /* Arguments supplied from a caller frame. */
3783 tree fndecl = fun->decl;
3784 unsigned idx = 0;
3785 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
3786 iter_parm = DECL_CHAIN (iter_parm), ++idx)
757bf1df 3787 {
808f4dfe
DM
3788 /* If there's a mismatching declaration, the call stmt might
3789 not have enough args. Handle this case by leaving the
3790 rest of the params as uninitialized. */
3791 if (idx >= arg_svals->length ())
3792 break;
294b6da2
DM
3793 tree parm_lval = iter_parm;
3794 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
3795 parm_lval = parm_default_ssa;
3796 const region *parm_reg = get_lvalue (parm_lval, ctxt);
808f4dfe 3797 const svalue *arg_sval = (*arg_svals)[idx];
808f4dfe 3798 set_value (parm_reg, arg_sval, ctxt);
757bf1df 3799 }
757bf1df 3800 }
808f4dfe 3801 else
757bf1df 3802 {
808f4dfe
DM
3803 /* Otherwise we have a top-level call within the analysis. The params
3804 have defined but unknown initial values.
3805 Anything they point to has escaped. */
3806 tree fndecl = fun->decl;
3807 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
3808 iter_parm = DECL_CHAIN (iter_parm))
757bf1df 3809 {
294b6da2 3810 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
808f4dfe 3811 on_top_level_param (parm_default_ssa, ctxt);
294b6da2
DM
3812 else
3813 on_top_level_param (iter_parm, ctxt);
757bf1df
DM
3814 }
3815 }
757bf1df 3816
808f4dfe 3817 return m_current_frame;
757bf1df
DM
3818}
3819
808f4dfe
DM
3820/* Get the function of the top-most frame in this region_model's stack.
3821 There must be such a frame. */
757bf1df 3822
808f4dfe
DM
3823function *
3824region_model::get_current_function () const
757bf1df 3825{
808f4dfe
DM
3826 const frame_region *frame = get_current_frame ();
3827 gcc_assert (frame);
3828 return frame->get_function ();
757bf1df
DM
3829}
3830
808f4dfe 3831/* Pop the topmost frame_region from this region_model's stack;
757bf1df 3832
808f4dfe
DM
3833 If RESULT_DST_REG is non-null, copy any return value from the frame
3834 into RESULT_DST_REG's region.
3835 If OUT_RESULT is non-null, copy any return value from the frame
3836 into *OUT_RESULT.
757bf1df 3837
808f4dfe
DM
3838 Purge the frame region and all its descendent regions.
3839 Convert any pointers that point into such regions into
3840 POISON_KIND_POPPED_STACK svalues. */
757bf1df 3841
808f4dfe
DM
3842void
3843region_model::pop_frame (const region *result_dst_reg,
3844 const svalue **out_result,
3845 region_model_context *ctxt)
3846{
3847 gcc_assert (m_current_frame);
757bf1df 3848
808f4dfe
DM
3849 /* Evaluate the result, within the callee frame. */
3850 const frame_region *frame_reg = m_current_frame;
3851 tree fndecl = m_current_frame->get_function ()->decl;
3852 tree result = DECL_RESULT (fndecl);
3853 if (result && TREE_TYPE (result) != void_type_node)
3854 {
13ad6d9f 3855 const svalue *retval = get_rvalue (result, ctxt);
808f4dfe 3856 if (result_dst_reg)
13ad6d9f 3857 set_value (result_dst_reg, retval, ctxt);
808f4dfe 3858 if (out_result)
13ad6d9f 3859 *out_result = retval;
808f4dfe 3860 }
757bf1df 3861
808f4dfe
DM
3862 /* Pop the frame. */
3863 m_current_frame = m_current_frame->get_calling_frame ();
757bf1df 3864
808f4dfe 3865 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
757bf1df
DM
3866}
3867
808f4dfe 3868/* Get the number of frames in this region_model's stack. */
757bf1df 3869
808f4dfe
DM
3870int
3871region_model::get_stack_depth () const
757bf1df 3872{
808f4dfe
DM
3873 const frame_region *frame = get_current_frame ();
3874 if (frame)
3875 return frame->get_stack_depth ();
3876 else
3877 return 0;
757bf1df
DM
3878}
3879
808f4dfe
DM
3880/* Get the frame_region with the given index within the stack.
3881 The frame_region must exist. */
757bf1df 3882
808f4dfe
DM
3883const frame_region *
3884region_model::get_frame_at_index (int index) const
757bf1df 3885{
808f4dfe
DM
3886 const frame_region *frame = get_current_frame ();
3887 gcc_assert (frame);
3888 gcc_assert (index >= 0);
3889 gcc_assert (index <= frame->get_index ());
3890 while (index != frame->get_index ())
3891 {
3892 frame = frame->get_calling_frame ();
3893 gcc_assert (frame);
3894 }
3895 return frame;
757bf1df
DM
3896}
3897
808f4dfe
DM
3898/* Unbind svalues for any regions in REG and below.
3899 Find any pointers to such regions; convert them to
9a2c9579
DM
3900 poisoned values of kind PKIND.
3901 Also purge any dynamic extents. */
757bf1df 3902
808f4dfe
DM
3903void
3904region_model::unbind_region_and_descendents (const region *reg,
3905 enum poison_kind pkind)
757bf1df 3906{
808f4dfe
DM
3907 /* Gather a set of base regions to be unbound. */
3908 hash_set<const region *> base_regs;
3909 for (store::cluster_map_t::iterator iter = m_store.begin ();
3910 iter != m_store.end (); ++iter)
757bf1df 3911 {
808f4dfe
DM
3912 const region *iter_base_reg = (*iter).first;
3913 if (iter_base_reg->descendent_of_p (reg))
3914 base_regs.add (iter_base_reg);
757bf1df 3915 }
808f4dfe
DM
3916 for (hash_set<const region *>::iterator iter = base_regs.begin ();
3917 iter != base_regs.end (); ++iter)
3918 m_store.purge_cluster (*iter);
757bf1df 3919
808f4dfe
DM
3920 /* Find any pointers to REG or its descendents; convert to poisoned. */
3921 poison_any_pointers_to_descendents (reg, pkind);
9a2c9579
DM
3922
3923 /* Purge dynamic extents of any base regions in REG and below
3924 (e.g. VLAs and alloca stack regions). */
3925 for (auto iter : m_dynamic_extents)
3926 {
3927 const region *iter_reg = iter.first;
3928 if (iter_reg->descendent_of_p (reg))
3929 unset_dynamic_extents (iter_reg);
3930 }
757bf1df
DM
3931}
3932
808f4dfe
DM
3933/* Implementation of BindingVisitor.
3934 Update the bound svalues for regions below REG to use poisoned
3935 values instead. */
757bf1df 3936
808f4dfe 3937struct bad_pointer_finder
757bf1df 3938{
808f4dfe
DM
3939 bad_pointer_finder (const region *reg, enum poison_kind pkind,
3940 region_model_manager *mgr)
3941 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
3942 {}
757bf1df 3943
808f4dfe
DM
3944 void on_binding (const binding_key *, const svalue *&sval)
3945 {
3946 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
3947 {
3948 const region *ptr_dst = ptr_sval->get_pointee ();
3949 /* Poison ptrs to descendents of REG, but not to REG itself,
3950 otherwise double-free detection doesn't work (since sm-state
3951 for "free" is stored on the original ptr svalue). */
3952 if (ptr_dst->descendent_of_p (m_reg)
3953 && ptr_dst != m_reg)
3954 {
3955 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
3956 sval->get_type ());
3957 ++m_count;
3958 }
3959 }
3960 }
757bf1df 3961
808f4dfe
DM
3962 const region *m_reg;
3963 enum poison_kind m_pkind;
3964 region_model_manager *const m_mgr;
3965 int m_count;
3966};
757bf1df 3967
808f4dfe
DM
3968/* Find any pointers to REG or its descendents; convert them to
3969 poisoned values of kind PKIND.
3970 Return the number of pointers that were poisoned. */
757bf1df 3971
808f4dfe
DM
3972int
3973region_model::poison_any_pointers_to_descendents (const region *reg,
3974 enum poison_kind pkind)
3975{
3976 bad_pointer_finder bv (reg, pkind, m_mgr);
3977 m_store.for_each_binding (bv);
3978 return bv.m_count;
757bf1df
DM
3979}
3980
808f4dfe
DM
3981/* Attempt to merge THIS with OTHER_MODEL, writing the result
3982 to OUT_MODEL. Use POINT to distinguish values created as a
3983 result of merging. */
757bf1df 3984
808f4dfe
DM
3985bool
3986region_model::can_merge_with_p (const region_model &other_model,
3987 const program_point &point,
f573d351
DM
3988 region_model *out_model,
3989 const extrinsic_state *ext_state,
3990 const program_state *state_a,
3991 const program_state *state_b) const
757bf1df 3992{
808f4dfe
DM
3993 gcc_assert (out_model);
3994 gcc_assert (m_mgr == other_model.m_mgr);
3995 gcc_assert (m_mgr == out_model->m_mgr);
757bf1df 3996
808f4dfe
DM
3997 if (m_current_frame != other_model.m_current_frame)
3998 return false;
3999 out_model->m_current_frame = m_current_frame;
757bf1df 4000
f573d351
DM
4001 model_merger m (this, &other_model, point, out_model,
4002 ext_state, state_a, state_b);
757bf1df 4003
808f4dfe
DM
4004 if (!store::can_merge_p (&m_store, &other_model.m_store,
4005 &out_model->m_store, m_mgr->get_store_manager (),
4006 &m))
4007 return false;
4008
9a2c9579
DM
4009 if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
4010 &out_model->m_dynamic_extents))
4011 return false;
4012
808f4dfe
DM
4013 /* Merge constraints. */
4014 constraint_manager::merge (*m_constraints,
4015 *other_model.m_constraints,
c710051a 4016 out_model->m_constraints);
757bf1df 4017
808f4dfe 4018 return true;
757bf1df
DM
4019}
4020
4021/* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
4022 otherwise. */
4023
4024tree
4025region_model::get_fndecl_for_call (const gcall *call,
4026 region_model_context *ctxt)
4027{
4028 tree fn_ptr = gimple_call_fn (call);
4029 if (fn_ptr == NULL_TREE)
4030 return NULL_TREE;
808f4dfe
DM
4031 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
4032 if (const region_svalue *fn_ptr_ptr
4033 = fn_ptr_sval->dyn_cast_region_svalue ())
757bf1df 4034 {
808f4dfe
DM
4035 const region *reg = fn_ptr_ptr->get_pointee ();
4036 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
757bf1df 4037 {
808f4dfe 4038 tree fn_decl = fn_reg->get_fndecl ();
0ba70d1b
DM
4039 cgraph_node *node = cgraph_node::get (fn_decl);
4040 if (!node)
4041 return NULL_TREE;
4042 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
91f993b7
DM
4043 if (ultimate_node)
4044 return ultimate_node->decl;
757bf1df
DM
4045 }
4046 }
4047
4048 return NULL_TREE;
4049}
4050
808f4dfe 4051/* Would be much simpler to use a lambda here, if it were supported. */
757bf1df 4052
808f4dfe 4053struct append_ssa_names_cb_data
757bf1df 4054{
808f4dfe
DM
4055 const region_model *model;
4056 auto_vec<const decl_region *> *out;
4057};
757bf1df 4058
808f4dfe
DM
4059/* Populate *OUT with all decl_regions for SSA names in the current
4060 frame that have clusters within the store. */
757bf1df
DM
4061
4062void
808f4dfe
DM
4063region_model::
4064get_ssa_name_regions_for_current_frame (auto_vec<const decl_region *> *out)
4065 const
757bf1df 4066{
808f4dfe
DM
4067 append_ssa_names_cb_data data;
4068 data.model = this;
4069 data.out = out;
4070 m_store.for_each_cluster (append_ssa_names_cb, &data);
757bf1df
DM
4071}
4072
808f4dfe 4073/* Implementation detail of get_ssa_name_regions_for_current_frame. */
757bf1df 4074
808f4dfe
DM
4075void
4076region_model::append_ssa_names_cb (const region *base_reg,
4077 append_ssa_names_cb_data *cb_data)
757bf1df 4078{
808f4dfe
DM
4079 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
4080 return;
4081 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
757bf1df 4082 {
808f4dfe
DM
4083 if (TREE_CODE (decl_reg->get_decl ()) == SSA_NAME)
4084 cb_data->out->safe_push (decl_reg);
757bf1df
DM
4085 }
4086}
4087
b9365b93
DM
4088/* Return a new region describing a heap-allocated block of memory.
4089 Use CTXT to complain about tainted sizes. */
757bf1df 4090
808f4dfe 4091const region *
b9365b93
DM
4092region_model::create_region_for_heap_alloc (const svalue *size_in_bytes,
4093 region_model_context *ctxt)
757bf1df 4094{
808f4dfe 4095 const region *reg = m_mgr->create_region_for_heap_alloc ();
ea4e3218 4096 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
b9365b93 4097 set_dynamic_extents (reg, size_in_bytes, ctxt);
808f4dfe 4098 return reg;
757bf1df
DM
4099}
4100
808f4dfe 4101/* Return a new region describing a block of memory allocated within the
b9365b93
DM
4102 current frame.
4103 Use CTXT to complain about tainted sizes. */
757bf1df 4104
808f4dfe 4105const region *
b9365b93
DM
4106region_model::create_region_for_alloca (const svalue *size_in_bytes,
4107 region_model_context *ctxt)
757bf1df 4108{
808f4dfe 4109 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
ea4e3218 4110 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
b9365b93 4111 set_dynamic_extents (reg, size_in_bytes, ctxt);
808f4dfe 4112 return reg;
757bf1df
DM
4113}
4114
b9365b93
DM
4115/* Record that the size of REG is SIZE_IN_BYTES.
4116 Use CTXT to complain about tainted sizes. */
757bf1df
DM
4117
4118void
9a2c9579 4119region_model::set_dynamic_extents (const region *reg,
b9365b93
DM
4120 const svalue *size_in_bytes,
4121 region_model_context *ctxt)
9a2c9579
DM
4122{
4123 assert_compat_types (size_in_bytes->get_type (), size_type_node);
b9365b93
DM
4124 if (ctxt)
4125 check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
4126 ctxt);
9a2c9579
DM
4127 m_dynamic_extents.put (reg, size_in_bytes);
4128}
4129
4130/* Get the recording of REG in bytes, or NULL if no dynamic size was
4131 recorded. */
4132
4133const svalue *
4134region_model::get_dynamic_extents (const region *reg) const
757bf1df 4135{
9a2c9579
DM
4136 if (const svalue * const *slot = m_dynamic_extents.get (reg))
4137 return *slot;
4138 return NULL;
4139}
4140
4141/* Unset any recorded dynamic size of REG. */
4142
4143void
4144region_model::unset_dynamic_extents (const region *reg)
4145{
4146 m_dynamic_extents.remove (reg);
757bf1df
DM
4147}
4148
eafa9d96
DM
4149/* class noop_region_model_context : public region_model_context. */
4150
4151void
4152noop_region_model_context::bifurcate (custom_edge_info *info)
4153{
4154 delete info;
4155}
4156
4157void
4158noop_region_model_context::terminate_path ()
4159{
4160}
4161
808f4dfe 4162/* struct model_merger. */
757bf1df 4163
808f4dfe 4164/* Dump a multiline representation of this merger to PP. */
757bf1df
DM
4165
4166void
808f4dfe 4167model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
757bf1df 4168{
808f4dfe
DM
4169 pp_string (pp, "model A:");
4170 pp_newline (pp);
4171 m_model_a->dump_to_pp (pp, simple, true);
4172 pp_newline (pp);
757bf1df 4173
808f4dfe 4174 pp_string (pp, "model B:");
757bf1df 4175 pp_newline (pp);
808f4dfe 4176 m_model_b->dump_to_pp (pp, simple, true);
757bf1df
DM
4177 pp_newline (pp);
4178
808f4dfe 4179 pp_string (pp, "merged model:");
757bf1df 4180 pp_newline (pp);
808f4dfe 4181 m_merged_model->dump_to_pp (pp, simple, true);
757bf1df
DM
4182 pp_newline (pp);
4183}
4184
808f4dfe 4185/* Dump a multiline representation of this merger to FILE. */
757bf1df
DM
4186
4187void
808f4dfe 4188model_merger::dump (FILE *fp, bool simple) const
757bf1df
DM
4189{
4190 pretty_printer pp;
4191 pp_format_decoder (&pp) = default_tree_printer;
4192 pp_show_color (&pp) = pp_show_color (global_dc->printer);
4193 pp.buffer->stream = fp;
808f4dfe 4194 dump_to_pp (&pp, simple);
757bf1df
DM
4195 pp_flush (&pp);
4196}
4197
808f4dfe 4198/* Dump a multiline representation of this merger to stderr. */
757bf1df
DM
4199
4200DEBUG_FUNCTION void
808f4dfe 4201model_merger::dump (bool simple) const
757bf1df 4202{
808f4dfe 4203 dump (stderr, simple);
757bf1df
DM
4204}
4205
f573d351
DM
4206/* Return true if it's OK to merge SVAL with other svalues. */
4207
4208bool
4209model_merger::mergeable_svalue_p (const svalue *sval) const
4210{
4211 if (m_ext_state)
4212 {
4213 /* Reject merging svalues that have non-purgable sm-state,
4214 to avoid falsely reporting memory leaks by merging them
4215 with something else. For example, given a local var "p",
4216 reject the merger of a:
4217 store_a mapping "p" to a malloc-ed ptr
4218 with:
4219 store_b mapping "p" to a NULL ptr. */
4220 if (m_state_a)
4221 if (!m_state_a->can_purge_p (*m_ext_state, sval))
4222 return false;
4223 if (m_state_b)
4224 if (!m_state_b->can_purge_p (*m_ext_state, sval))
4225 return false;
4226 }
4227 return true;
4228}
4229
75038aa6
DM
4230} // namespace ana
4231
808f4dfe 4232/* Dump RMODEL fully to stderr (i.e. without summarization). */
757bf1df 4233
808f4dfe
DM
4234DEBUG_FUNCTION void
4235debug (const region_model &rmodel)
757bf1df 4236{
808f4dfe 4237 rmodel.dump (false);
757bf1df
DM
4238}
4239
8ca7fa84 4240/* class rejected_op_constraint : public rejected_constraint. */
84fb3546
DM
4241
4242void
8ca7fa84 4243rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
84fb3546
DM
4244{
4245 region_model m (m_model);
4246 const svalue *lhs_sval = m.get_rvalue (m_lhs, NULL);
4247 const svalue *rhs_sval = m.get_rvalue (m_rhs, NULL);
4248 lhs_sval->dump_to_pp (pp, true);
4249 pp_printf (pp, " %s ", op_symbol_code (m_op));
4250 rhs_sval->dump_to_pp (pp, true);
4251}
4252
8ca7fa84
DM
4253/* class rejected_ranges_constraint : public rejected_constraint. */
4254
4255void
4256rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
4257{
4258 region_model m (m_model);
4259 const svalue *sval = m.get_rvalue (m_expr, NULL);
4260 sval->dump_to_pp (pp, true);
4261 pp_string (pp, " in ");
4262 m_ranges->dump_to_pp (pp, true);
4263}
4264
808f4dfe 4265/* class engine. */
757bf1df 4266
11a2ff8d
DM
4267/* engine's ctor. */
4268
4269engine::engine (logger *logger)
4270: m_mgr (logger)
4271{
4272}
4273
808f4dfe 4274/* Dump the managed objects by class to LOGGER, and the per-class totals. */
757bf1df 4275
808f4dfe
DM
4276void
4277engine::log_stats (logger *logger) const
757bf1df 4278{
808f4dfe 4279 m_mgr.log_stats (logger, true);
757bf1df
DM
4280}
4281
75038aa6
DM
4282namespace ana {
4283
757bf1df
DM
4284#if CHECKING_P
4285
4286namespace selftest {
4287
8c08c983
DM
4288/* Build a constant tree of the given type from STR. */
4289
4290static tree
4291build_real_cst_from_string (tree type, const char *str)
4292{
4293 REAL_VALUE_TYPE real;
4294 real_from_string (&real, str);
4295 return build_real (type, real);
4296}
4297
4298/* Append various "interesting" constants to OUT (e.g. NaN). */
4299
4300static void
4301append_interesting_constants (auto_vec<tree> *out)
4302{
4303 out->safe_push (build_int_cst (integer_type_node, 0));
4304 out->safe_push (build_int_cst (integer_type_node, 42));
4305 out->safe_push (build_int_cst (unsigned_type_node, 0));
4306 out->safe_push (build_int_cst (unsigned_type_node, 42));
4307 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
4308 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
4309 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
4310 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
4311 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
4312 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
4313 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
4314 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
4315}
4316
4317/* Verify that tree_cmp is a well-behaved comparator for qsort, even
4318 if the underlying constants aren't comparable. */
4319
4320static void
4321test_tree_cmp_on_constants ()
4322{
4323 auto_vec<tree> csts;
4324 append_interesting_constants (&csts);
4325
4326 /* Try sorting every triple. */
4327 const unsigned num = csts.length ();
4328 for (unsigned i = 0; i < num; i++)
4329 for (unsigned j = 0; j < num; j++)
4330 for (unsigned k = 0; k < num; k++)
4331 {
4332 auto_vec<tree> v (3);
4333 v.quick_push (csts[i]);
4334 v.quick_push (csts[j]);
4335 v.quick_push (csts[k]);
4336 v.qsort (tree_cmp);
4337 }
4338}
4339
757bf1df
DM
4340/* Implementation detail of the ASSERT_CONDITION_* macros. */
4341
808f4dfe
DM
4342void
4343assert_condition (const location &loc,
4344 region_model &model,
4345 const svalue *lhs, tree_code op, const svalue *rhs,
4346 tristate expected)
4347{
4348 tristate actual = model.eval_condition (lhs, op, rhs);
4349 ASSERT_EQ_AT (loc, actual, expected);
4350}
4351
4352/* Implementation detail of the ASSERT_CONDITION_* macros. */
4353
757bf1df
DM
4354void
4355assert_condition (const location &loc,
4356 region_model &model,
4357 tree lhs, tree_code op, tree rhs,
4358 tristate expected)
4359{
4360 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
4361 ASSERT_EQ_AT (loc, actual, expected);
4362}
4363
90f7c300
DM
4364/* Implementation detail of ASSERT_DUMP_TREE_EQ. */
4365
4366static void
4367assert_dump_tree_eq (const location &loc, tree t, const char *expected)
4368{
4369 auto_fix_quotes sentinel;
4370 pretty_printer pp;
4371 pp_format_decoder (&pp) = default_tree_printer;
4372 dump_tree (&pp, t);
4373 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
4374}
4375
4376/* Assert that dump_tree (T) is EXPECTED. */
4377
4378#define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
4379 SELFTEST_BEGIN_STMT \
4380 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
4381 SELFTEST_END_STMT
4382
757bf1df
DM
4383/* Implementation detail of ASSERT_DUMP_EQ. */
4384
4385static void
4386assert_dump_eq (const location &loc,
4387 const region_model &model,
4388 bool summarize,
4389 const char *expected)
4390{
4391 auto_fix_quotes sentinel;
4392 pretty_printer pp;
4393 pp_format_decoder (&pp) = default_tree_printer;
808f4dfe
DM
4394
4395 model.dump_to_pp (&pp, summarize, true);
757bf1df
DM
4396 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
4397}
4398
4399/* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
4400
4401#define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
4402 SELFTEST_BEGIN_STMT \
4403 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
4404 SELFTEST_END_STMT
4405
4406/* Smoketest for region_model::dump_to_pp. */
4407
4408static void
4409test_dump ()
4410{
808f4dfe
DM
4411 region_model_manager mgr;
4412 region_model model (&mgr);
757bf1df
DM
4413
4414 ASSERT_DUMP_EQ (model, false,
808f4dfe
DM
4415 "stack depth: 0\n"
4416 "m_called_unknown_fn: FALSE\n"
4417 "constraint_manager:\n"
4418 " equiv classes:\n"
4419 " constraints:\n");
4420 ASSERT_DUMP_EQ (model, true,
4421 "stack depth: 0\n"
4422 "m_called_unknown_fn: FALSE\n"
4423 "constraint_manager:\n"
757bf1df
DM
4424 " equiv classes:\n"
4425 " constraints:\n");
757bf1df
DM
4426}
4427
884d9141
DM
4428/* Helper function for selftests. Create a struct or union type named NAME,
4429 with the fields given by the FIELD_DECLS in FIELDS.
4430 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
4431 create a UNION_TYPE. */
4432
4433static tree
4434make_test_compound_type (const char *name, bool is_struct,
4435 const auto_vec<tree> *fields)
4436{
4437 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
4438 TYPE_NAME (t) = get_identifier (name);
4439 TYPE_SIZE (t) = 0;
4440
4441 tree fieldlist = NULL;
4442 int i;
4443 tree field;
4444 FOR_EACH_VEC_ELT (*fields, i, field)
4445 {
4446 gcc_assert (TREE_CODE (field) == FIELD_DECL);
4447 DECL_CONTEXT (field) = t;
4448 fieldlist = chainon (field, fieldlist);
4449 }
4450 fieldlist = nreverse (fieldlist);
4451 TYPE_FIELDS (t) = fieldlist;
4452
4453 layout_type (t);
4454 return t;
4455}
4456
a96f1c38
DM
4457/* Selftest fixture for creating the type "struct coord {int x; int y; };". */
4458
4459struct coord_test
4460{
4461 coord_test ()
4462 {
4463 auto_vec<tree> fields;
4464 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
4465 get_identifier ("x"), integer_type_node);
4466 fields.safe_push (m_x_field);
4467 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
4468 get_identifier ("y"), integer_type_node);
4469 fields.safe_push (m_y_field);
4470 m_coord_type = make_test_compound_type ("coord", true, &fields);
4471 }
4472
4473 tree m_x_field;
4474 tree m_y_field;
4475 tree m_coord_type;
4476};
4477
808f4dfe 4478/* Verify usage of a struct. */
884d9141
DM
4479
4480static void
808f4dfe 4481test_struct ()
884d9141 4482{
a96f1c38
DM
4483 coord_test ct;
4484
4485 tree c = build_global_decl ("c", ct.m_coord_type);
4486 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4487 c, ct.m_x_field, NULL_TREE);
4488 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4489 c, ct.m_y_field, NULL_TREE);
884d9141
DM
4490
4491 tree int_17 = build_int_cst (integer_type_node, 17);
4492 tree int_m3 = build_int_cst (integer_type_node, -3);
4493
808f4dfe
DM
4494 region_model_manager mgr;
4495 region_model model (&mgr);
884d9141
DM
4496 model.set_value (c_x, int_17, NULL);
4497 model.set_value (c_y, int_m3, NULL);
4498
808f4dfe
DM
4499 /* Verify get_offset for "c.x". */
4500 {
4501 const region *c_x_reg = model.get_lvalue (c_x, NULL);
4502 region_offset offset = c_x_reg->get_offset ();
4503 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
4504 ASSERT_EQ (offset.get_bit_offset (), 0);
4505 }
4506
4507 /* Verify get_offset for "c.y". */
4508 {
4509 const region *c_y_reg = model.get_lvalue (c_y, NULL);
4510 region_offset offset = c_y_reg->get_offset ();
4511 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
4512 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
4513 }
884d9141
DM
4514}
4515
808f4dfe 4516/* Verify usage of an array element. */
884d9141
DM
4517
4518static void
808f4dfe 4519test_array_1 ()
884d9141
DM
4520{
4521 tree tlen = size_int (10);
4522 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
4523
4524 tree a = build_global_decl ("a", arr_type);
4525
808f4dfe
DM
4526 region_model_manager mgr;
4527 region_model model (&mgr);
884d9141
DM
4528 tree int_0 = build_int_cst (integer_type_node, 0);
4529 tree a_0 = build4 (ARRAY_REF, char_type_node,
4530 a, int_0, NULL_TREE, NULL_TREE);
4531 tree char_A = build_int_cst (char_type_node, 'A');
4532 model.set_value (a_0, char_A, NULL);
884d9141
DM
4533}
4534
90f7c300
DM
4535/* Verify that region_model::get_representative_tree works as expected. */
4536
4537static void
4538test_get_representative_tree ()
4539{
808f4dfe
DM
4540 region_model_manager mgr;
4541
90f7c300
DM
4542 /* STRING_CST. */
4543 {
4544 tree string_cst = build_string (4, "foo");
808f4dfe
DM
4545 region_model m (&mgr);
4546 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
4547 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
4548 ASSERT_EQ (rep, string_cst);
4549 }
4550
4551 /* String literal. */
4552 {
4553 tree string_cst_ptr = build_string_literal (4, "foo");
808f4dfe
DM
4554 region_model m (&mgr);
4555 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
4556 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
4557 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
4558 }
808f4dfe
DM
4559
4560 /* Value of an element within an array. */
4561 {
4562 tree tlen = size_int (10);
4563 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
4564 tree a = build_global_decl ("a", arr_type);
4565 placeholder_svalue test_sval (char_type_node, "test value");
4566
4567 /* Value of a[3]. */
4568 {
4569 test_region_model_context ctxt;
4570 region_model model (&mgr);
4571 tree int_3 = build_int_cst (integer_type_node, 3);
4572 tree a_3 = build4 (ARRAY_REF, char_type_node,
4573 a, int_3, NULL_TREE, NULL_TREE);
4574 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
4575 model.set_value (a_3_reg, &test_sval, &ctxt);
4576 tree rep = model.get_representative_tree (&test_sval);
4577 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
4578 }
4579
4580 /* Value of a[0]. */
4581 {
4582 test_region_model_context ctxt;
4583 region_model model (&mgr);
4584 tree idx = build_int_cst (integer_type_node, 0);
4585 tree a_0 = build4 (ARRAY_REF, char_type_node,
4586 a, idx, NULL_TREE, NULL_TREE);
4587 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
4588 model.set_value (a_0_reg, &test_sval, &ctxt);
4589 tree rep = model.get_representative_tree (&test_sval);
4590 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
4591 }
4592 }
4593
4594 /* Value of a field within a struct. */
4595 {
4596 coord_test ct;
4597
4598 tree c = build_global_decl ("c", ct.m_coord_type);
4599 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4600 c, ct.m_x_field, NULL_TREE);
4601 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4602 c, ct.m_y_field, NULL_TREE);
4603
4604 test_region_model_context ctxt;
4605
4606 /* Value of initial field. */
4607 {
4608 region_model m (&mgr);
4609 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
4610 placeholder_svalue test_sval_x (integer_type_node, "test x val");
4611 m.set_value (c_x_reg, &test_sval_x, &ctxt);
4612 tree rep = m.get_representative_tree (&test_sval_x);
4613 ASSERT_DUMP_TREE_EQ (rep, "c.x");
4614 }
4615
4616 /* Value of non-initial field. */
4617 {
4618 region_model m (&mgr);
4619 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
4620 placeholder_svalue test_sval_y (integer_type_node, "test y val");
4621 m.set_value (c_y_reg, &test_sval_y, &ctxt);
4622 tree rep = m.get_representative_tree (&test_sval_y);
4623 ASSERT_DUMP_TREE_EQ (rep, "c.y");
4624 }
4625 }
90f7c300
DM
4626}
4627
757bf1df 4628/* Verify that calling region_model::get_rvalue repeatedly on the same
808f4dfe 4629 tree constant retrieves the same svalue *. */
757bf1df
DM
4630
4631static void
4632test_unique_constants ()
4633{
4634 tree int_0 = build_int_cst (integer_type_node, 0);
4635 tree int_42 = build_int_cst (integer_type_node, 42);
4636
4637 test_region_model_context ctxt;
808f4dfe
DM
4638 region_model_manager mgr;
4639 region_model model (&mgr);
757bf1df
DM
4640 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
4641 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
4642 model.get_rvalue (int_42, &ctxt));
4643 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
4644 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
757bf1df 4645
808f4dfe
DM
4646 /* A "(const int)42" will be a different tree from "(int)42)"... */
4647 tree const_int_type_node
4648 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
4649 tree const_int_42 = build_int_cst (const_int_type_node, 42);
4650 ASSERT_NE (int_42, const_int_42);
4651 /* It should have a different const_svalue. */
4652 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
4653 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
4654 ASSERT_NE (int_42_sval, const_int_42_sval);
4655 /* But they should compare as equal. */
4656 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
4657 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
757bf1df
DM
4658}
4659
808f4dfe
DM
4660/* Verify that each type gets its own singleton unknown_svalue within a
4661 region_model_manager, and that NULL_TREE gets its own singleton. */
757bf1df
DM
4662
4663static void
808f4dfe 4664test_unique_unknowns ()
757bf1df 4665{
808f4dfe
DM
4666 region_model_manager mgr;
4667 const svalue *unknown_int
4668 = mgr.get_or_create_unknown_svalue (integer_type_node);
4669 /* Repeated calls with the same type should get the same "unknown"
4670 svalue. */
4671 const svalue *unknown_int_2
4672 = mgr.get_or_create_unknown_svalue (integer_type_node);
4673 ASSERT_EQ (unknown_int, unknown_int_2);
757bf1df 4674
808f4dfe
DM
4675 /* Different types (or the NULL type) should have different
4676 unknown_svalues. */
4677 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
4678 ASSERT_NE (unknown_NULL_type, unknown_int);
757bf1df 4679
808f4dfe
DM
4680 /* Repeated calls with NULL for the type should get the same "unknown"
4681 svalue. */
4682 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
4683 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
757bf1df
DM
4684}
4685
808f4dfe 4686/* Verify that initial_svalue are handled as expected. */
757bf1df 4687
808f4dfe
DM
4688static void
4689test_initial_svalue_folding ()
757bf1df 4690{
808f4dfe
DM
4691 region_model_manager mgr;
4692 tree x = build_global_decl ("x", integer_type_node);
4693 tree y = build_global_decl ("y", integer_type_node);
757bf1df 4694
808f4dfe
DM
4695 test_region_model_context ctxt;
4696 region_model model (&mgr);
4697 const svalue *x_init = model.get_rvalue (x, &ctxt);
4698 const svalue *y_init = model.get_rvalue (y, &ctxt);
4699 ASSERT_NE (x_init, y_init);
4700 const region *x_reg = model.get_lvalue (x, &ctxt);
4701 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
757bf1df 4702
808f4dfe 4703}
757bf1df 4704
808f4dfe 4705/* Verify that unary ops are folded as expected. */
757bf1df
DM
4706
4707static void
808f4dfe 4708test_unaryop_svalue_folding ()
757bf1df 4709{
808f4dfe 4710 region_model_manager mgr;
757bf1df
DM
4711 tree x = build_global_decl ("x", integer_type_node);
4712 tree y = build_global_decl ("y", integer_type_node);
4713
808f4dfe
DM
4714 test_region_model_context ctxt;
4715 region_model model (&mgr);
4716 const svalue *x_init = model.get_rvalue (x, &ctxt);
4717 const svalue *y_init = model.get_rvalue (y, &ctxt);
4718 const region *x_reg = model.get_lvalue (x, &ctxt);
4719 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
4720
4721 /* "(int)x" -> "x". */
4722 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
4723
4724 /* "(void *)x" -> something other than "x". */
4725 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
4726
4727 /* "!(x == y)" -> "x != y". */
4728 ASSERT_EQ (mgr.get_or_create_unaryop
4729 (boolean_type_node, TRUTH_NOT_EXPR,
4730 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
4731 x_init, y_init)),
4732 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
4733 x_init, y_init));
4734 /* "!(x > y)" -> "x <= y". */
4735 ASSERT_EQ (mgr.get_or_create_unaryop
4736 (boolean_type_node, TRUTH_NOT_EXPR,
4737 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
4738 x_init, y_init)),
4739 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
4740 x_init, y_init));
4741}
4742
4743/* Verify that binops on constant svalues are folded. */
757bf1df 4744
808f4dfe
DM
4745static void
4746test_binop_svalue_folding ()
4747{
4748#define NUM_CSTS 10
4749 tree cst_int[NUM_CSTS];
4750 region_model_manager mgr;
4751 const svalue *cst_sval[NUM_CSTS];
4752 for (int i = 0; i < NUM_CSTS; i++)
4753 {
4754 cst_int[i] = build_int_cst (integer_type_node, i);
4755 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
4756 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
4757 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
4758 }
757bf1df 4759
808f4dfe
DM
4760 for (int i = 0; i < NUM_CSTS; i++)
4761 for (int j = 0; j < NUM_CSTS; j++)
4762 {
4763 if (i != j)
4764 ASSERT_NE (cst_sval[i], cst_sval[j]);
4765 if (i + j < NUM_CSTS)
4766 {
4767 const svalue *sum
4768 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4769 cst_sval[i], cst_sval[j]);
4770 ASSERT_EQ (sum, cst_sval[i + j]);
4771 }
4772 if (i - j >= 0)
4773 {
4774 const svalue *difference
4775 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
4776 cst_sval[i], cst_sval[j]);
4777 ASSERT_EQ (difference, cst_sval[i - j]);
4778 }
4779 if (i * j < NUM_CSTS)
4780 {
4781 const svalue *product
4782 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4783 cst_sval[i], cst_sval[j]);
4784 ASSERT_EQ (product, cst_sval[i * j]);
4785 }
4786 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
4787 cst_sval[i], cst_sval[j]);
4788 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
4789 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
4790 cst_sval[i], cst_sval[j]);
4791 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
4792 // etc
4793 }
757bf1df 4794
808f4dfe 4795 tree x = build_global_decl ("x", integer_type_node);
757bf1df 4796
808f4dfe
DM
4797 test_region_model_context ctxt;
4798 region_model model (&mgr);
4799 const svalue *x_init = model.get_rvalue (x, &ctxt);
4800
4801 /* PLUS_EXPR folding. */
4802 const svalue *x_init_plus_zero
4803 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4804 x_init, cst_sval[0]);
4805 ASSERT_EQ (x_init_plus_zero, x_init);
4806 const svalue *zero_plus_x_init
4807 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4808 cst_sval[0], x_init);
4809 ASSERT_EQ (zero_plus_x_init, x_init);
4810
4811 /* MULT_EXPR folding. */
4812 const svalue *x_init_times_zero
4813 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4814 x_init, cst_sval[0]);
4815 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
4816 const svalue *zero_times_x_init
4817 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4818 cst_sval[0], x_init);
4819 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
4820
4821 const svalue *x_init_times_one
4822 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4823 x_init, cst_sval[1]);
4824 ASSERT_EQ (x_init_times_one, x_init);
4825 const svalue *one_times_x_init
4826 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4827 cst_sval[1], x_init);
4828 ASSERT_EQ (one_times_x_init, x_init);
4829
4830 // etc
4831 // TODO: do we want to use the match-and-simplify DSL for this?
4832
4833 /* Verify that binops put any constants on the RHS. */
4834 const svalue *four_times_x_init
4835 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4836 cst_sval[4], x_init);
4837 const svalue *x_init_times_four
4838 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4839 x_init, cst_sval[4]);
4840 ASSERT_EQ (four_times_x_init, x_init_times_four);
4841 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
4842 ASSERT_EQ (binop->get_op (), MULT_EXPR);
4843 ASSERT_EQ (binop->get_arg0 (), x_init);
4844 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
4845
4846 /* Verify that ((x + 1) + 1) == (x + 2). */
4847 const svalue *x_init_plus_one
4848 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4849 x_init, cst_sval[1]);
4850 const svalue *x_init_plus_two
4851 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4852 x_init, cst_sval[2]);
4853 const svalue *x_init_plus_one_plus_one
4854 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4855 x_init_plus_one, cst_sval[1]);
4856 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
4f34f8cc
DM
4857
4858 /* Verify various binops on booleans. */
4859 {
4860 const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1);
4861 const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0);
4862 const svalue *sval_unknown
4863 = mgr.get_or_create_unknown_svalue (boolean_type_node);
4864 const placeholder_svalue sval_placeholder (boolean_type_node, "v");
4865 for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR})
4866 {
4867 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
4868 sval_true, sval_unknown),
4869 sval_true);
4870 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
4871 sval_false, sval_unknown),
4872 sval_unknown);
4873 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
4874 sval_false, &sval_placeholder),
4875 &sval_placeholder);
4876 }
4877 for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR})
4878 {
4879 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
4880 sval_false, sval_unknown),
4881 sval_false);
4882 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
4883 sval_true, sval_unknown),
4884 sval_unknown);
4885 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
4886 sval_true, &sval_placeholder),
4887 &sval_placeholder);
4888 }
4889 }
808f4dfe
DM
4890}
4891
4892/* Verify that sub_svalues are folded as expected. */
757bf1df 4893
808f4dfe
DM
4894static void
4895test_sub_svalue_folding ()
4896{
4897 coord_test ct;
4898 tree c = build_global_decl ("c", ct.m_coord_type);
4899 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4900 c, ct.m_x_field, NULL_TREE);
757bf1df 4901
808f4dfe
DM
4902 region_model_manager mgr;
4903 region_model model (&mgr);
4904 test_region_model_context ctxt;
4905 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
757bf1df 4906
808f4dfe
DM
4907 /* Verify that sub_svalue of "unknown" simply
4908 yields an unknown. */
757bf1df 4909
808f4dfe
DM
4910 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
4911 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
4912 unknown, c_x_reg);
4913 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
4914 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
757bf1df
DM
4915}
4916
808f4dfe 4917/* Test that region::descendent_of_p works as expected. */
757bf1df
DM
4918
4919static void
808f4dfe 4920test_descendent_of_p ()
757bf1df 4921{
808f4dfe
DM
4922 region_model_manager mgr;
4923 const region *stack = mgr.get_stack_region ();
4924 const region *heap = mgr.get_heap_region ();
4925 const region *code = mgr.get_code_region ();
4926 const region *globals = mgr.get_globals_region ();
757bf1df 4927
808f4dfe
DM
4928 /* descendent_of_p should return true when used on the region itself. */
4929 ASSERT_TRUE (stack->descendent_of_p (stack));
4930 ASSERT_FALSE (stack->descendent_of_p (heap));
4931 ASSERT_FALSE (stack->descendent_of_p (code));
4932 ASSERT_FALSE (stack->descendent_of_p (globals));
757bf1df 4933
808f4dfe
DM
4934 tree x = build_global_decl ("x", integer_type_node);
4935 const region *x_reg = mgr.get_region_for_global (x);
4936 ASSERT_TRUE (x_reg->descendent_of_p (globals));
757bf1df 4937
808f4dfe
DM
4938 /* A cast_region should be a descendent of the original region. */
4939 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
4940 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
757bf1df
DM
4941}
4942
391512ad
DM
4943/* Verify that bit_range_region works as expected. */
4944
4945static void
4946test_bit_range_regions ()
4947{
4948 tree x = build_global_decl ("x", integer_type_node);
4949 region_model_manager mgr;
4950 const region *x_reg = mgr.get_region_for_global (x);
4951 const region *byte0
4952 = mgr.get_bit_range (x_reg, char_type_node, bit_range (0, 8));
4953 const region *byte1
4954 = mgr.get_bit_range (x_reg, char_type_node, bit_range (8, 8));
4955 ASSERT_TRUE (byte0->descendent_of_p (x_reg));
4956 ASSERT_TRUE (byte1->descendent_of_p (x_reg));
4957 ASSERT_NE (byte0, byte1);
4958}
4959
757bf1df
DM
4960/* Verify that simple assignments work as expected. */
4961
4962static void
4963test_assignment ()
4964{
4965 tree int_0 = build_int_cst (integer_type_node, 0);
4966 tree x = build_global_decl ("x", integer_type_node);
4967 tree y = build_global_decl ("y", integer_type_node);
4968
4969 /* "x == 0", then use of y, then "y = 0;". */
808f4dfe
DM
4970 region_model_manager mgr;
4971 region_model model (&mgr);
757bf1df
DM
4972 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
4973 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
4974 model.set_value (model.get_lvalue (y, NULL),
4975 model.get_rvalue (int_0, NULL),
4976 NULL);
4977 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
4978 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
757bf1df
DM
4979}
4980
a96f1c38
DM
4981/* Verify that compound assignments work as expected. */
4982
4983static void
4984test_compound_assignment ()
4985{
4986 coord_test ct;
4987
4988 tree c = build_global_decl ("c", ct.m_coord_type);
4989 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4990 c, ct.m_x_field, NULL_TREE);
4991 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4992 c, ct.m_y_field, NULL_TREE);
4993 tree d = build_global_decl ("d", ct.m_coord_type);
4994 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4995 d, ct.m_x_field, NULL_TREE);
4996 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4997 d, ct.m_y_field, NULL_TREE);
4998
4999 tree int_17 = build_int_cst (integer_type_node, 17);
5000 tree int_m3 = build_int_cst (integer_type_node, -3);
5001
808f4dfe
DM
5002 region_model_manager mgr;
5003 region_model model (&mgr);
a96f1c38
DM
5004 model.set_value (c_x, int_17, NULL);
5005 model.set_value (c_y, int_m3, NULL);
5006
a96f1c38 5007 /* Copy c to d. */
13ad6d9f
DM
5008 const svalue *sval = model.get_rvalue (c, NULL);
5009 model.set_value (model.get_lvalue (d, NULL), sval, NULL);
5010
a96f1c38
DM
5011 /* Check that the fields have the same svalues. */
5012 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
5013 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
5014}
5015
757bf1df
DM
5016/* Verify the details of pushing and popping stack frames. */
5017
5018static void
5019test_stack_frames ()
5020{
5021 tree int_42 = build_int_cst (integer_type_node, 42);
5022 tree int_10 = build_int_cst (integer_type_node, 10);
5023 tree int_5 = build_int_cst (integer_type_node, 5);
5024 tree int_0 = build_int_cst (integer_type_node, 0);
5025
5026 auto_vec <tree> param_types;
5027 tree parent_fndecl = make_fndecl (integer_type_node,
5028 "parent_fn",
5029 param_types);
5030 allocate_struct_function (parent_fndecl, true);
5031
5032 tree child_fndecl = make_fndecl (integer_type_node,
5033 "child_fn",
5034 param_types);
5035 allocate_struct_function (child_fndecl, true);
5036
5037 /* "a" and "b" in the parent frame. */
5038 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
5039 get_identifier ("a"),
5040 integer_type_node);
5041 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
5042 get_identifier ("b"),
5043 integer_type_node);
5044 /* "x" and "y" in a child frame. */
5045 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
5046 get_identifier ("x"),
5047 integer_type_node);
5048 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
5049 get_identifier ("y"),
5050 integer_type_node);
5051
5052 /* "p" global. */
5053 tree p = build_global_decl ("p", ptr_type_node);
5054
5055 /* "q" global. */
5056 tree q = build_global_decl ("q", ptr_type_node);
5057
808f4dfe 5058 region_model_manager mgr;
757bf1df 5059 test_region_model_context ctxt;
808f4dfe 5060 region_model model (&mgr);
757bf1df
DM
5061
5062 /* Push stack frame for "parent_fn". */
808f4dfe
DM
5063 const region *parent_frame_reg
5064 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl),
5065 NULL, &ctxt);
5066 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
5067 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
5068 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
5069 model.set_value (a_in_parent_reg,
5070 model.get_rvalue (int_42, &ctxt),
5071 &ctxt);
5072 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
5073
757bf1df
DM
5074 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
5075 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
5076 tristate (tristate::TS_TRUE));
5077
5078 /* Push stack frame for "child_fn". */
808f4dfe 5079 const region *child_frame_reg
757bf1df 5080 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
808f4dfe
DM
5081 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
5082 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
5083 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
5084 model.set_value (x_in_child_reg,
5085 model.get_rvalue (int_0, &ctxt),
5086 &ctxt);
5087 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
5088
757bf1df
DM
5089 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
5090 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
5091 tristate (tristate::TS_TRUE));
5092
5093 /* Point a global pointer at a local in the child frame: p = &x. */
808f4dfe
DM
5094 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
5095 model.set_value (p_in_globals_reg,
5096 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
757bf1df 5097 &ctxt);
808f4dfe 5098 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
757bf1df
DM
5099
5100 /* Point another global pointer at p: q = &p. */
808f4dfe
DM
5101 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
5102 model.set_value (q_in_globals_reg,
5103 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
757bf1df
DM
5104 &ctxt);
5105
808f4dfe
DM
5106 /* Test region::descendent_of_p. */
5107 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
5108 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
5109 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
757bf1df
DM
5110
5111 /* Pop the "child_fn" frame from the stack. */
808f4dfe
DM
5112 model.pop_frame (NULL, NULL, &ctxt);
5113 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
5114 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
757bf1df
DM
5115
5116 /* Verify that p (which was pointing at the local "x" in the popped
5117 frame) has been poisoned. */
33255ad3 5118 const svalue *new_p_sval = model.get_rvalue (p, NULL);
757bf1df
DM
5119 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
5120 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
5121 POISON_KIND_POPPED_STACK);
5122
5123 /* Verify that q still points to p, in spite of the region
5124 renumbering. */
808f4dfe 5125 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
757bf1df 5126 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
5932dd35 5127 ASSERT_EQ (new_q_sval->maybe_get_region (),
757bf1df
DM
5128 model.get_lvalue (p, &ctxt));
5129
5130 /* Verify that top of stack has been updated. */
808f4dfe 5131 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
757bf1df
DM
5132
5133 /* Verify locals in parent frame. */
5134 /* Verify "a" still has its value. */
808f4dfe 5135 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
757bf1df
DM
5136 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
5137 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
5138 int_42);
5139 /* Verify "b" still has its constraint. */
5140 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
5141 tristate (tristate::TS_TRUE));
5142}
5143
5144/* Verify that get_representative_path_var works as expected, that
808f4dfe 5145 we can map from regions to parms and back within a recursive call
757bf1df
DM
5146 stack. */
5147
5148static void
5149test_get_representative_path_var ()
5150{
5151 auto_vec <tree> param_types;
5152 tree fndecl = make_fndecl (integer_type_node,
5153 "factorial",
5154 param_types);
5155 allocate_struct_function (fndecl, true);
5156
5157 /* Parm "n". */
5158 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
5159 get_identifier ("n"),
5160 integer_type_node);
5161
808f4dfe
DM
5162 region_model_manager mgr;
5163 test_region_model_context ctxt;
5164 region_model model (&mgr);
757bf1df
DM
5165
5166 /* Push 5 stack frames for "factorial", each with a param */
808f4dfe
DM
5167 auto_vec<const region *> parm_regs;
5168 auto_vec<const svalue *> parm_svals;
757bf1df
DM
5169 for (int depth = 0; depth < 5; depth++)
5170 {
808f4dfe
DM
5171 const region *frame_n_reg
5172 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
5173 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
5174 parm_regs.safe_push (parm_n_reg);
757bf1df 5175
808f4dfe
DM
5176 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
5177 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
5178 parm_svals.safe_push (sval_n);
757bf1df
DM
5179 }
5180
5181 /* Verify that we can recognize that the regions are the parms,
5182 at every depth. */
5183 for (int depth = 0; depth < 5; depth++)
5184 {
808f4dfe
DM
5185 {
5186 svalue_set visited;
5187 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
5188 &visited),
5189 path_var (n, depth + 1));
5190 }
757bf1df
DM
5191 /* ...and that we can lookup lvalues for locals for all frames,
5192 not just the top. */
5193 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
808f4dfe 5194 parm_regs[depth]);
757bf1df 5195 /* ...and that we can locate the svalues. */
808f4dfe
DM
5196 {
5197 svalue_set visited;
5198 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
5199 &visited),
5200 path_var (n, depth + 1));
5201 }
757bf1df
DM
5202 }
5203}
5204
808f4dfe 5205/* Ensure that region_model::operator== works as expected. */
757bf1df
DM
5206
5207static void
808f4dfe 5208test_equality_1 ()
757bf1df 5209{
808f4dfe
DM
5210 tree int_42 = build_int_cst (integer_type_node, 42);
5211 tree int_17 = build_int_cst (integer_type_node, 17);
757bf1df 5212
808f4dfe
DM
5213/* Verify that "empty" region_model instances are equal to each other. */
5214 region_model_manager mgr;
5215 region_model model0 (&mgr);
5216 region_model model1 (&mgr);
757bf1df 5217 ASSERT_EQ (model0, model1);
808f4dfe
DM
5218
5219 /* Verify that setting state in model1 makes the models non-equal. */
5220 tree x = build_global_decl ("x", integer_type_node);
5221 model0.set_value (x, int_42, NULL);
5222 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
5223 ASSERT_NE (model0, model1);
5224
5225 /* Verify the copy-ctor. */
5226 region_model model2 (model0);
5227 ASSERT_EQ (model0, model2);
5228 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
5229 ASSERT_NE (model1, model2);
5230
5231 /* Verify that models obtained from copy-ctor are independently editable
5232 w/o affecting the original model. */
5233 model2.set_value (x, int_17, NULL);
5234 ASSERT_NE (model0, model2);
5235 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
5236 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
757bf1df
DM
5237}
5238
5239/* Verify that region models for
5240 x = 42; y = 113;
5241 and
5242 y = 113; x = 42;
808f4dfe 5243 are equal. */
757bf1df
DM
5244
5245static void
5246test_canonicalization_2 ()
5247{
5248 tree int_42 = build_int_cst (integer_type_node, 42);
5249 tree int_113 = build_int_cst (integer_type_node, 113);
5250 tree x = build_global_decl ("x", integer_type_node);
5251 tree y = build_global_decl ("y", integer_type_node);
5252
808f4dfe
DM
5253 region_model_manager mgr;
5254 region_model model0 (&mgr);
757bf1df
DM
5255 model0.set_value (model0.get_lvalue (x, NULL),
5256 model0.get_rvalue (int_42, NULL),
5257 NULL);
5258 model0.set_value (model0.get_lvalue (y, NULL),
5259 model0.get_rvalue (int_113, NULL),
5260 NULL);
5261
808f4dfe 5262 region_model model1 (&mgr);
757bf1df
DM
5263 model1.set_value (model1.get_lvalue (y, NULL),
5264 model1.get_rvalue (int_113, NULL),
5265 NULL);
5266 model1.set_value (model1.get_lvalue (x, NULL),
5267 model1.get_rvalue (int_42, NULL),
5268 NULL);
5269
757bf1df
DM
5270 ASSERT_EQ (model0, model1);
5271}
5272
5273/* Verify that constraints for
5274 x > 3 && y > 42
5275 and
5276 y > 42 && x > 3
5277 are equal after canonicalization. */
5278
5279static void
5280test_canonicalization_3 ()
5281{
5282 tree int_3 = build_int_cst (integer_type_node, 3);
5283 tree int_42 = build_int_cst (integer_type_node, 42);
5284 tree x = build_global_decl ("x", integer_type_node);
5285 tree y = build_global_decl ("y", integer_type_node);
5286
808f4dfe
DM
5287 region_model_manager mgr;
5288 region_model model0 (&mgr);
757bf1df
DM
5289 model0.add_constraint (x, GT_EXPR, int_3, NULL);
5290 model0.add_constraint (y, GT_EXPR, int_42, NULL);
5291
808f4dfe 5292 region_model model1 (&mgr);
757bf1df
DM
5293 model1.add_constraint (y, GT_EXPR, int_42, NULL);
5294 model1.add_constraint (x, GT_EXPR, int_3, NULL);
5295
808f4dfe
DM
5296 model0.canonicalize ();
5297 model1.canonicalize ();
757bf1df
DM
5298 ASSERT_EQ (model0, model1);
5299}
5300
8c08c983
DM
5301/* Verify that we can canonicalize a model containing NaN and other real
5302 constants. */
5303
5304static void
5305test_canonicalization_4 ()
5306{
5307 auto_vec<tree> csts;
5308 append_interesting_constants (&csts);
5309
808f4dfe
DM
5310 region_model_manager mgr;
5311 region_model model (&mgr);
8c08c983 5312
3f207ab3 5313 for (tree cst : csts)
8c08c983
DM
5314 model.get_rvalue (cst, NULL);
5315
808f4dfe 5316 model.canonicalize ();
8c08c983
DM
5317}
5318
757bf1df
DM
5319/* Assert that if we have two region_model instances
5320 with values VAL_A and VAL_B for EXPR that they are
5321 mergable. Write the merged model to *OUT_MERGED_MODEL,
5322 and the merged svalue ptr to *OUT_MERGED_SVALUE.
5323 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
5324 for that region_model. */
5325
5326static void
5327assert_region_models_merge (tree expr, tree val_a, tree val_b,
808f4dfe
DM
5328 region_model *out_merged_model,
5329 const svalue **out_merged_svalue)
757bf1df 5330{
808f4dfe 5331 program_point point (program_point::origin ());
757bf1df 5332 test_region_model_context ctxt;
808f4dfe
DM
5333 region_model_manager *mgr = out_merged_model->get_manager ();
5334 region_model model0 (mgr);
5335 region_model model1 (mgr);
757bf1df
DM
5336 if (val_a)
5337 model0.set_value (model0.get_lvalue (expr, &ctxt),
5338 model0.get_rvalue (val_a, &ctxt),
5339 &ctxt);
5340 if (val_b)
5341 model1.set_value (model1.get_lvalue (expr, &ctxt),
5342 model1.get_rvalue (val_b, &ctxt),
5343 &ctxt);
5344
5345 /* They should be mergeable. */
808f4dfe
DM
5346 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
5347 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
757bf1df
DM
5348}
5349
5350/* Verify that we can merge region_model instances. */
5351
5352static void
5353test_state_merging ()
5354{
5355 tree int_42 = build_int_cst (integer_type_node, 42);
5356 tree int_113 = build_int_cst (integer_type_node, 113);
5357 tree x = build_global_decl ("x", integer_type_node);
5358 tree y = build_global_decl ("y", integer_type_node);
5359 tree z = build_global_decl ("z", integer_type_node);
5360 tree p = build_global_decl ("p", ptr_type_node);
5361
5362 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
5363 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
5364
5365 auto_vec <tree> param_types;
5366 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
5367 allocate_struct_function (test_fndecl, true);
5368
5369 /* Param "a". */
5370 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
5371 get_identifier ("a"),
5372 integer_type_node);
5373 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
5374
455f58ec
DM
5375 /* Param "q", a pointer. */
5376 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
5377 get_identifier ("q"),
5378 ptr_type_node);
5379
808f4dfe
DM
5380 program_point point (program_point::origin ());
5381 region_model_manager mgr;
5382
757bf1df 5383 {
808f4dfe
DM
5384 region_model model0 (&mgr);
5385 region_model model1 (&mgr);
5386 region_model merged (&mgr);
757bf1df 5387 /* Verify empty models can be merged. */
808f4dfe 5388 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
5389 ASSERT_EQ (model0, merged);
5390 }
5391
5392 /* Verify that we can merge two contradictory constraints on the
5393 value for a global. */
5394 /* TODO: verify that the merged model doesn't have a value for
5395 the global */
5396 {
808f4dfe
DM
5397 region_model model0 (&mgr);
5398 region_model model1 (&mgr);
5399 region_model merged (&mgr);
757bf1df
DM
5400 test_region_model_context ctxt;
5401 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
5402 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe 5403 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
5404 ASSERT_NE (model0, merged);
5405 ASSERT_NE (model1, merged);
5406 }
5407
5408 /* Verify handling of a PARM_DECL. */
5409 {
5410 test_region_model_context ctxt;
808f4dfe
DM
5411 region_model model0 (&mgr);
5412 region_model model1 (&mgr);
757bf1df
DM
5413 ASSERT_EQ (model0.get_stack_depth (), 0);
5414 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
5415 ASSERT_EQ (model0.get_stack_depth (), 1);
757bf1df
DM
5416 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
5417
808f4dfe
DM
5418 placeholder_svalue test_sval (integer_type_node, "test sval");
5419 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
5420 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
757bf1df
DM
5421 ASSERT_EQ (model0, model1);
5422
757bf1df 5423 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
5424 region_model merged (&mgr);
5425 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 5426 ASSERT_EQ (model0, merged);
808f4dfe
DM
5427 /* In particular, "a" should have the placeholder value. */
5428 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
757bf1df
DM
5429 }
5430
5431 /* Verify handling of a global. */
5432 {
5433 test_region_model_context ctxt;
808f4dfe
DM
5434 region_model model0 (&mgr);
5435 region_model model1 (&mgr);
757bf1df 5436
808f4dfe
DM
5437 placeholder_svalue test_sval (integer_type_node, "test sval");
5438 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
5439 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
5440 ASSERT_EQ (model0, model1);
757bf1df
DM
5441
5442 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
5443 region_model merged (&mgr);
5444 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 5445 ASSERT_EQ (model0, merged);
808f4dfe
DM
5446 /* In particular, "x" should have the placeholder value. */
5447 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
757bf1df
DM
5448 }
5449
5450 /* Use global-handling to verify various combinations of values. */
5451
5452 /* Two equal constant values. */
5453 {
808f4dfe
DM
5454 region_model merged (&mgr);
5455 const svalue *merged_x_sval;
757bf1df
DM
5456 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
5457
5458 /* In particular, there should be a constant value for "x". */
5459 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
5460 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
5461 int_42);
5462 }
5463
5464 /* Two non-equal constant values. */
5465 {
808f4dfe
DM
5466 region_model merged (&mgr);
5467 const svalue *merged_x_sval;
757bf1df
DM
5468 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
5469
808f4dfe
DM
5470 /* In particular, there should be a "widening" value for "x". */
5471 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
757bf1df
DM
5472 }
5473
808f4dfe 5474 /* Initial and constant. */
757bf1df 5475 {
808f4dfe
DM
5476 region_model merged (&mgr);
5477 const svalue *merged_x_sval;
757bf1df
DM
5478 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
5479
5480 /* In particular, there should be an unknown value for "x". */
5481 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
5482 }
5483
808f4dfe 5484 /* Constant and initial. */
757bf1df 5485 {
808f4dfe
DM
5486 region_model merged (&mgr);
5487 const svalue *merged_x_sval;
757bf1df
DM
5488 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
5489
5490 /* In particular, there should be an unknown value for "x". */
5491 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
5492 }
5493
5494 /* Unknown and constant. */
5495 // TODO
5496
5497 /* Pointers: NULL and NULL. */
5498 // TODO
5499
5500 /* Pointers: NULL and non-NULL. */
5501 // TODO
5502
5503 /* Pointers: non-NULL and non-NULL: ptr to a local. */
5504 {
808f4dfe 5505 region_model model0 (&mgr);
757bf1df 5506 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
757bf1df
DM
5507 model0.set_value (model0.get_lvalue (p, NULL),
5508 model0.get_rvalue (addr_of_a, NULL), NULL);
5509
5510 region_model model1 (model0);
5511 ASSERT_EQ (model0, model1);
5512
5513 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
5514 region_model merged (&mgr);
5515 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
5516 ASSERT_EQ (model0, merged);
5517 }
5518
5519 /* Pointers: non-NULL and non-NULL: ptr to a global. */
5520 {
808f4dfe 5521 region_model merged (&mgr);
757bf1df 5522 /* p == &y in both input models. */
808f4dfe 5523 const svalue *merged_p_sval;
757bf1df
DM
5524 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
5525 &merged_p_sval);
5526
5527 /* We should get p == &y in the merged model. */
5528 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
808f4dfe
DM
5529 const region_svalue *merged_p_ptr
5530 = merged_p_sval->dyn_cast_region_svalue ();
5531 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
5532 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
757bf1df
DM
5533 }
5534
5535 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
5536 {
808f4dfe
DM
5537 region_model merged (&mgr);
5538 /* x == &y vs x == &z in the input models; these are actually casts
5539 of the ptrs to "int". */
5540 const svalue *merged_x_sval;
5541 // TODO:
757bf1df
DM
5542 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
5543 &merged_x_sval);
5544
5545 /* We should get x == unknown in the merged model. */
5546 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
5547 }
5548
5549 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
5550 {
5551 test_region_model_context ctxt;
808f4dfe 5552 region_model model0 (&mgr);
9a2c9579 5553 tree size = build_int_cst (size_type_node, 1024);
808f4dfe 5554 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
b9365b93
DM
5555 const region *new_reg
5556 = model0.create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe 5557 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
757bf1df 5558 model0.set_value (model0.get_lvalue (p, &ctxt),
808f4dfe 5559 ptr_sval, &ctxt);
757bf1df
DM
5560
5561 region_model model1 (model0);
5562
5563 ASSERT_EQ (model0, model1);
5564
808f4dfe
DM
5565 region_model merged (&mgr);
5566 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 5567
808f4dfe 5568 /* The merged model ought to be identical. */
757bf1df
DM
5569 ASSERT_EQ (model0, merged);
5570 }
5571
808f4dfe
DM
5572 /* Two regions sharing the same placeholder svalue should continue sharing
5573 it after self-merger. */
757bf1df
DM
5574 {
5575 test_region_model_context ctxt;
808f4dfe
DM
5576 region_model model0 (&mgr);
5577 placeholder_svalue placeholder_sval (integer_type_node, "test");
5578 model0.set_value (model0.get_lvalue (x, &ctxt),
5579 &placeholder_sval, &ctxt);
5580 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
757bf1df
DM
5581 region_model model1 (model0);
5582
5583 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
5584 region_model merged (&mgr);
5585 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
5586 ASSERT_EQ (model0, merged);
5587
5588 /* In particular, we should have x == y. */
5589 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
5590 tristate (tristate::TS_TRUE));
5591 }
5592
757bf1df 5593 {
808f4dfe
DM
5594 region_model model0 (&mgr);
5595 region_model model1 (&mgr);
757bf1df
DM
5596 test_region_model_context ctxt;
5597 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
5598 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
808f4dfe
DM
5599 region_model merged (&mgr);
5600 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
5601 }
5602
5603 {
808f4dfe
DM
5604 region_model model0 (&mgr);
5605 region_model model1 (&mgr);
757bf1df
DM
5606 test_region_model_context ctxt;
5607 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
5608 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
5609 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe
DM
5610 region_model merged (&mgr);
5611 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 5612 }
757bf1df
DM
5613
5614 // TODO: what can't we merge? need at least one such test
5615
5616 /* TODO: various things
5617 - heap regions
5618 - value merging:
5619 - every combination, but in particular
808f4dfe 5620 - pairs of regions
757bf1df
DM
5621 */
5622
5623 /* Views. */
5624 {
5625 test_region_model_context ctxt;
808f4dfe 5626 region_model model0 (&mgr);
757bf1df 5627
808f4dfe
DM
5628 const region *x_reg = model0.get_lvalue (x, &ctxt);
5629 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
757bf1df
DM
5630 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
5631
5632 region_model model1 (model0);
5633 ASSERT_EQ (model1, model0);
5634
5635 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
5636 region_model merged (&mgr);
5637 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 5638 }
455f58ec
DM
5639
5640 /* Verify that we can merge a model in which a local in an older stack
5641 frame points to a local in a more recent stack frame. */
5642 {
808f4dfe 5643 region_model model0 (&mgr);
455f58ec 5644 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
808f4dfe 5645 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
455f58ec
DM
5646
5647 /* Push a second frame. */
808f4dfe 5648 const region *reg_2nd_frame
455f58ec
DM
5649 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
5650
5651 /* Have a pointer in the older frame point to a local in the
5652 more recent frame. */
808f4dfe
DM
5653 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
5654 model0.set_value (q_in_first_frame, sval_ptr, NULL);
455f58ec
DM
5655
5656 /* Verify that it's pointing at the newer frame. */
5932dd35 5657 const region *reg_pointee = sval_ptr->maybe_get_region ();
808f4dfe 5658 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
455f58ec 5659
808f4dfe 5660 model0.canonicalize ();
455f58ec
DM
5661
5662 region_model model1 (model0);
5663 ASSERT_EQ (model0, model1);
5664
5665 /* They should be mergeable, and the result should be the same
5666 (after canonicalization, at least). */
808f4dfe
DM
5667 region_model merged (&mgr);
5668 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5669 merged.canonicalize ();
455f58ec
DM
5670 ASSERT_EQ (model0, merged);
5671 }
5672
5673 /* Verify that we can merge a model in which a local points to a global. */
5674 {
808f4dfe 5675 region_model model0 (&mgr);
455f58ec
DM
5676 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
5677 model0.set_value (model0.get_lvalue (q, NULL),
5678 model0.get_rvalue (addr_of_y, NULL), NULL);
5679
455f58ec
DM
5680 region_model model1 (model0);
5681 ASSERT_EQ (model0, model1);
5682
5683 /* They should be mergeable, and the result should be the same
5684 (after canonicalization, at least). */
808f4dfe
DM
5685 region_model merged (&mgr);
5686 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
455f58ec
DM
5687 ASSERT_EQ (model0, merged);
5688 }
757bf1df
DM
5689}
5690
5691/* Verify that constraints are correctly merged when merging region_model
5692 instances. */
5693
5694static void
5695test_constraint_merging ()
5696{
5697 tree int_0 = build_int_cst (integer_type_node, 0);
5698 tree int_5 = build_int_cst (integer_type_node, 5);
5699 tree x = build_global_decl ("x", integer_type_node);
5700 tree y = build_global_decl ("y", integer_type_node);
5701 tree z = build_global_decl ("z", integer_type_node);
5702 tree n = build_global_decl ("n", integer_type_node);
5703
808f4dfe 5704 region_model_manager mgr;
757bf1df
DM
5705 test_region_model_context ctxt;
5706
5707 /* model0: 0 <= (x == y) < n. */
808f4dfe 5708 region_model model0 (&mgr);
757bf1df
DM
5709 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
5710 model0.add_constraint (x, GE_EXPR, int_0, NULL);
5711 model0.add_constraint (x, LT_EXPR, n, NULL);
5712
5713 /* model1: z != 5 && (0 <= x < n). */
808f4dfe 5714 region_model model1 (&mgr);
757bf1df
DM
5715 model1.add_constraint (z, NE_EXPR, int_5, NULL);
5716 model1.add_constraint (x, GE_EXPR, int_0, NULL);
5717 model1.add_constraint (x, LT_EXPR, n, NULL);
5718
5719 /* They should be mergeable; the merged constraints should
5720 be: (0 <= x < n). */
808f4dfe
DM
5721 program_point point (program_point::origin ());
5722 region_model merged (&mgr);
5723 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
5724
5725 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
5726 tristate (tristate::TS_TRUE));
5727 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
5728 tristate (tristate::TS_TRUE));
5729
5730 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
5731 tristate (tristate::TS_UNKNOWN));
5732 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
5733 tristate (tristate::TS_UNKNOWN));
5734}
5735
808f4dfe
DM
5736/* Verify that widening_svalue::eval_condition_without_cm works as
5737 expected. */
5738
5739static void
5740test_widening_constraints ()
5741{
5742 program_point point (program_point::origin ());
5743 tree int_0 = build_int_cst (integer_type_node, 0);
5744 tree int_m1 = build_int_cst (integer_type_node, -1);
5745 tree int_1 = build_int_cst (integer_type_node, 1);
5746 tree int_256 = build_int_cst (integer_type_node, 256);
5747 region_model_manager mgr;
5748 test_region_model_context ctxt;
5749 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
5750 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
5751 const svalue *w_zero_then_one_sval
5752 = mgr.get_or_create_widening_svalue (integer_type_node, point,
5753 int_0_sval, int_1_sval);
5754 const widening_svalue *w_zero_then_one
5755 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
5756 ASSERT_EQ (w_zero_then_one->get_direction (),
5757 widening_svalue::DIR_ASCENDING);
5758 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
5759 tristate::TS_FALSE);
5760 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
5761 tristate::TS_FALSE);
5762 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
5763 tristate::TS_UNKNOWN);
5764 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
5765 tristate::TS_UNKNOWN);
5766
5767 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
5768 tristate::TS_FALSE);
5769 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
5770 tristate::TS_UNKNOWN);
5771 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
5772 tristate::TS_UNKNOWN);
5773 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
5774 tristate::TS_UNKNOWN);
5775
5776 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
5777 tristate::TS_TRUE);
5778 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
5779 tristate::TS_UNKNOWN);
5780 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
5781 tristate::TS_UNKNOWN);
5782 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
5783 tristate::TS_UNKNOWN);
5784
5785 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
5786 tristate::TS_TRUE);
5787 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
5788 tristate::TS_TRUE);
5789 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
5790 tristate::TS_UNKNOWN);
5791 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
5792 tristate::TS_UNKNOWN);
5793
5794 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
5795 tristate::TS_FALSE);
5796 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
5797 tristate::TS_UNKNOWN);
5798 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
5799 tristate::TS_UNKNOWN);
5800 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
5801 tristate::TS_UNKNOWN);
5802
5803 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
5804 tristate::TS_TRUE);
5805 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
5806 tristate::TS_UNKNOWN);
5807 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
5808 tristate::TS_UNKNOWN);
5809 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
5810 tristate::TS_UNKNOWN);
5811}
5812
5813/* Verify merging constraints for states simulating successive iterations
5814 of a loop.
5815 Simulate:
5816 for (i = 0; i < 256; i++)
5817 [...body...]
5818 i.e. this gimple:.
5819 i_15 = 0;
5820 goto <bb 4>;
5821
5822 <bb 4> :
5823 i_11 = PHI <i_15(2), i_23(3)>
5824 if (i_11 <= 255)
5825 goto <bb 3>;
5826 else
5827 goto [AFTER LOOP]
5828
5829 <bb 3> :
5830 [LOOP BODY]
5831 i_23 = i_11 + 1;
5832
5833 and thus these ops (and resultant states):
5834 i_11 = PHI()
5835 {i_11: 0}
5836 add_constraint (i_11 <= 255) [for the true edge]
5837 {i_11: 0} [constraint was a no-op]
5838 i_23 = i_11 + 1;
5839 {i_22: 1}
5840 i_11 = PHI()
5841 {i_11: WIDENED (at phi, 0, 1)}
5842 add_constraint (i_11 <= 255) [for the true edge]
5843 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
5844 i_23 = i_11 + 1;
5845 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
5846 i_11 = PHI(); merge with state at phi above
5847 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
5848 [changing meaning of "WIDENED" here]
5849 if (i_11 <= 255)
5850 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
5851 F: {i_11: 256}
5852 */
5853
5854static void
5855test_iteration_1 ()
5856{
5857 program_point point (program_point::origin ());
5858
5859 tree int_0 = build_int_cst (integer_type_node, 0);
5860 tree int_1 = build_int_cst (integer_type_node, 1);
5861 tree int_256 = build_int_cst (integer_type_node, 256);
5862 tree int_257 = build_int_cst (integer_type_node, 257);
5863 tree i = build_global_decl ("i", integer_type_node);
5864
5865 region_model_manager mgr;
5866 test_region_model_context ctxt;
5867
5868 /* model0: i: 0. */
5869 region_model model0 (&mgr);
5870 model0.set_value (i, int_0, &ctxt);
5871
5872 /* model1: i: 1. */
5873 region_model model1 (&mgr);
5874 model1.set_value (i, int_1, &ctxt);
5875
5876 /* Should merge "i" to a widened value. */
5877 region_model model2 (&mgr);
5878 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
5879 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
5880 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
5881 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
5882 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
5883
5884 /* Add constraint: i < 256 */
5885 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
5886 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
5887 tristate (tristate::TS_TRUE));
5888 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
5889 tristate (tristate::TS_TRUE));
5890
5891 /* Try merging with the initial state. */
5892 region_model model3 (&mgr);
5893 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
5894 /* Merging the merged value with the initial value should be idempotent,
5895 so that the analysis converges. */
5896 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
5897 /* Merger of 0 and a widening value with constraint < CST
5898 should retain the constraint, even though it was implicit
5899 for the 0 case. */
5900 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
5901 tristate (tristate::TS_TRUE));
5902 /* ...and we should have equality: the analysis should have converged. */
5903 ASSERT_EQ (model3, model2);
5904
5905 /* "i_23 = i_11 + 1;" */
5906 region_model model4 (model3);
5907 ASSERT_EQ (model4, model2);
5908 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
5909 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
5910 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
5911
5912 /* Try merging with the "i: 1" state. */
5913 region_model model5 (&mgr);
5914 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
5915 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
5916 ASSERT_EQ (model5, model4);
5917
5918 /* "i_11 = PHI();" merge with state at phi above.
5919 For i, we should have a merger of WIDENING with WIDENING + 1,
5920 and this should be WIDENING again. */
5921 region_model model6 (&mgr);
5922 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
5923 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
5924 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
5925
5926 ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257);
5927}
5928
6969ac30
DM
5929/* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
5930 all cast pointers to that region are also known to be non-NULL. */
5931
5932static void
5933test_malloc_constraints ()
5934{
808f4dfe
DM
5935 region_model_manager mgr;
5936 region_model model (&mgr);
6969ac30
DM
5937 tree p = build_global_decl ("p", ptr_type_node);
5938 tree char_star = build_pointer_type (char_type_node);
5939 tree q = build_global_decl ("q", char_star);
5940 tree null_ptr = build_int_cst (ptr_type_node, 0);
5941
808f4dfe 5942 const svalue *size_in_bytes
9a2c9579 5943 = mgr.get_or_create_unknown_svalue (size_type_node);
b9365b93 5944 const region *reg = model.create_region_for_heap_alloc (size_in_bytes, NULL);
808f4dfe
DM
5945 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
5946 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
6969ac30
DM
5947 model.set_value (q, p, NULL);
5948
6969ac30
DM
5949 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
5950 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
5951 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
5952 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
5953
5954 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
5955
6969ac30
DM
5956 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
5957 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
5958 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
5959 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
5960}
5961
808f4dfe
DM
5962/* Smoketest of getting and setting the value of a variable. */
5963
5964static void
5965test_var ()
5966{
5967 /* "int i;" */
5968 tree i = build_global_decl ("i", integer_type_node);
5969
5970 tree int_17 = build_int_cst (integer_type_node, 17);
5971 tree int_m3 = build_int_cst (integer_type_node, -3);
5972
5973 region_model_manager mgr;
5974 region_model model (&mgr);
5975
5976 const region *i_reg = model.get_lvalue (i, NULL);
5977 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
5978
5979 /* Reading "i" should give a symbolic "initial value". */
5980 const svalue *sval_init = model.get_rvalue (i, NULL);
5981 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
5982 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
5983 /* ..and doing it again should give the same "initial value". */
5984 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
5985
5986 /* "i = 17;". */
5987 model.set_value (i, int_17, NULL);
5988 ASSERT_EQ (model.get_rvalue (i, NULL),
5989 model.get_rvalue (int_17, NULL));
5990
5991 /* "i = -3;". */
5992 model.set_value (i, int_m3, NULL);
5993 ASSERT_EQ (model.get_rvalue (i, NULL),
5994 model.get_rvalue (int_m3, NULL));
5995
5996 /* Verify get_offset for "i". */
5997 {
5998 region_offset offset = i_reg->get_offset ();
5999 ASSERT_EQ (offset.get_base_region (), i_reg);
6000 ASSERT_EQ (offset.get_bit_offset (), 0);
6001 }
6002}
6003
6004static void
6005test_array_2 ()
6006{
6007 /* "int arr[10];" */
6008 tree tlen = size_int (10);
6009 tree arr_type
6010 = build_array_type (integer_type_node, build_index_type (tlen));
6011 tree arr = build_global_decl ("arr", arr_type);
6012
6013 /* "int i;" */
6014 tree i = build_global_decl ("i", integer_type_node);
6015
6016 tree int_0 = build_int_cst (integer_type_node, 0);
6017 tree int_1 = build_int_cst (integer_type_node, 1);
6018
6019 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
6020 arr, int_0, NULL_TREE, NULL_TREE);
6021 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
6022 arr, int_1, NULL_TREE, NULL_TREE);
6023 tree arr_i = build4 (ARRAY_REF, integer_type_node,
6024 arr, i, NULL_TREE, NULL_TREE);
6025
6026 tree int_17 = build_int_cst (integer_type_node, 17);
6027 tree int_42 = build_int_cst (integer_type_node, 42);
6028 tree int_m3 = build_int_cst (integer_type_node, -3);
6029
6030 region_model_manager mgr;
6031 region_model model (&mgr);
6032 /* "arr[0] = 17;". */
6033 model.set_value (arr_0, int_17, NULL);
6034 /* "arr[1] = -3;". */
6035 model.set_value (arr_1, int_m3, NULL);
6036
6037 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
6038 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
6039
6040 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
6041 model.set_value (arr_1, int_42, NULL);
6042 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
6043
6044 /* Verify get_offset for "arr[0]". */
6045 {
6046 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
6047 region_offset offset = arr_0_reg->get_offset ();
6048 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
6049 ASSERT_EQ (offset.get_bit_offset (), 0);
6050 }
6051
6052 /* Verify get_offset for "arr[1]". */
6053 {
6054 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
6055 region_offset offset = arr_1_reg->get_offset ();
6056 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
6057 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
6058 }
6059
6060 /* "arr[i] = i;" - this should remove the earlier bindings. */
6061 model.set_value (arr_i, i, NULL);
6062 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
6063 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
6064
6065 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
6066 model.set_value (arr_0, int_17, NULL);
6067 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
6068 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
6069}
6070
6071/* Smoketest of dereferencing a pointer via MEM_REF. */
6072
6073static void
6074test_mem_ref ()
6075{
6076 /*
6077 x = 17;
6078 p = &x;
6079 *p;
6080 */
6081 tree x = build_global_decl ("x", integer_type_node);
6082 tree int_star = build_pointer_type (integer_type_node);
6083 tree p = build_global_decl ("p", int_star);
6084
6085 tree int_17 = build_int_cst (integer_type_node, 17);
6086 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
6087 tree offset_0 = build_int_cst (integer_type_node, 0);
6088 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
6089
6090 region_model_manager mgr;
6091 region_model model (&mgr);
6092
6093 /* "x = 17;". */
6094 model.set_value (x, int_17, NULL);
6095
6096 /* "p = &x;". */
6097 model.set_value (p, addr_of_x, NULL);
6098
6099 const svalue *sval = model.get_rvalue (star_p, NULL);
6100 ASSERT_EQ (sval->maybe_get_constant (), int_17);
6101}
6102
6103/* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
6104 Analogous to this code:
6105 void test_6 (int a[10])
6106 {
6107 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
6108 a[3] = 42;
6109 __analyzer_eval (a[3] == 42); [should be TRUE]
6110 }
6111 from data-model-1.c, which looks like this at the gimple level:
6112 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
6113 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
6114 int _2 = *_1; # MEM_REF
6115 _Bool _3 = _2 == 42;
6116 int _4 = (int) _3;
6117 __analyzer_eval (_4);
6118
6119 # a[3] = 42;
6120 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
6121 *_5 = 42; # MEM_REF
6122
6123 # __analyzer_eval (a[3] == 42); [should be TRUE]
6124 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
6125 int _7 = *_6; # MEM_REF
6126 _Bool _8 = _7 == 42;
6127 int _9 = (int) _8;
6128 __analyzer_eval (_9); */
6129
6130static void
6131test_POINTER_PLUS_EXPR_then_MEM_REF ()
6132{
6133 tree int_star = build_pointer_type (integer_type_node);
6134 tree a = build_global_decl ("a", int_star);
6135 tree offset_12 = build_int_cst (size_type_node, 12);
6136 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
6137 tree offset_0 = build_int_cst (integer_type_node, 0);
6138 tree mem_ref = build2 (MEM_REF, integer_type_node,
6139 pointer_plus_expr, offset_0);
6140 region_model_manager mgr;
6141 region_model m (&mgr);
6142
6143 tree int_42 = build_int_cst (integer_type_node, 42);
6144 m.set_value (mem_ref, int_42, NULL);
6145 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
6146}
6147
6148/* Verify that malloc works. */
6149
6150static void
6151test_malloc ()
6152{
6153 tree int_star = build_pointer_type (integer_type_node);
6154 tree p = build_global_decl ("p", int_star);
6155 tree n = build_global_decl ("n", integer_type_node);
6156 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
6157 n, build_int_cst (size_type_node, 4));
6158
6159 region_model_manager mgr;
6160 test_region_model_context ctxt;
6161 region_model model (&mgr);
6162
6163 /* "p = malloc (n * 4);". */
6164 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
b9365b93 6165 const region *reg = model.create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe
DM
6166 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
6167 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 6168 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
6169}
6170
6171/* Verify that alloca works. */
6172
6173static void
6174test_alloca ()
6175{
6176 auto_vec <tree> param_types;
6177 tree fndecl = make_fndecl (integer_type_node,
6178 "test_fn",
6179 param_types);
6180 allocate_struct_function (fndecl, true);
6181
6182
6183 tree int_star = build_pointer_type (integer_type_node);
6184 tree p = build_global_decl ("p", int_star);
6185 tree n = build_global_decl ("n", integer_type_node);
6186 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
6187 n, build_int_cst (size_type_node, 4));
6188
6189 region_model_manager mgr;
6190 test_region_model_context ctxt;
6191 region_model model (&mgr);
6192
6193 /* Push stack frame. */
6194 const region *frame_reg
6195 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl),
6196 NULL, &ctxt);
6197 /* "p = alloca (n * 4);". */
6198 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
b9365b93 6199 const region *reg = model.create_region_for_alloca (size_sval, &ctxt);
808f4dfe
DM
6200 ASSERT_EQ (reg->get_parent_region (), frame_reg);
6201 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
6202 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 6203 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
6204
6205 /* Verify that the pointers to the alloca region are replaced by
6206 poisoned values when the frame is popped. */
6207 model.pop_frame (NULL, NULL, &ctxt);
33255ad3 6208 ASSERT_EQ (model.get_rvalue (p, NULL)->get_kind (), SK_POISONED);
808f4dfe
DM
6209}
6210
71fc4655
DM
6211/* Verify that svalue::involves_p works. */
6212
6213static void
6214test_involves_p ()
6215{
6216 region_model_manager mgr;
6217 tree int_star = build_pointer_type (integer_type_node);
6218 tree p = build_global_decl ("p", int_star);
6219 tree q = build_global_decl ("q", int_star);
6220
6221 test_region_model_context ctxt;
6222 region_model model (&mgr);
6223 const svalue *p_init = model.get_rvalue (p, &ctxt);
6224 const svalue *q_init = model.get_rvalue (q, &ctxt);
6225
6226 ASSERT_TRUE (p_init->involves_p (p_init));
6227 ASSERT_FALSE (p_init->involves_p (q_init));
6228
6229 const region *star_p_reg = mgr.get_symbolic_region (p_init);
6230 const region *star_q_reg = mgr.get_symbolic_region (q_init);
6231
6232 const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
6233 const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
6234
6235 ASSERT_TRUE (init_star_p->involves_p (p_init));
6236 ASSERT_FALSE (p_init->involves_p (init_star_p));
6237 ASSERT_FALSE (init_star_p->involves_p (q_init));
6238 ASSERT_TRUE (init_star_q->involves_p (q_init));
6239 ASSERT_FALSE (init_star_q->involves_p (p_init));
6240}
6241
757bf1df
DM
6242/* Run all of the selftests within this file. */
6243
6244void
6245analyzer_region_model_cc_tests ()
6246{
8c08c983 6247 test_tree_cmp_on_constants ();
757bf1df 6248 test_dump ();
808f4dfe
DM
6249 test_struct ();
6250 test_array_1 ();
90f7c300 6251 test_get_representative_tree ();
757bf1df 6252 test_unique_constants ();
808f4dfe
DM
6253 test_unique_unknowns ();
6254 test_initial_svalue_folding ();
6255 test_unaryop_svalue_folding ();
6256 test_binop_svalue_folding ();
6257 test_sub_svalue_folding ();
6258 test_descendent_of_p ();
391512ad 6259 test_bit_range_regions ();
757bf1df 6260 test_assignment ();
a96f1c38 6261 test_compound_assignment ();
757bf1df
DM
6262 test_stack_frames ();
6263 test_get_representative_path_var ();
808f4dfe 6264 test_equality_1 ();
757bf1df
DM
6265 test_canonicalization_2 ();
6266 test_canonicalization_3 ();
8c08c983 6267 test_canonicalization_4 ();
757bf1df
DM
6268 test_state_merging ();
6269 test_constraint_merging ();
808f4dfe
DM
6270 test_widening_constraints ();
6271 test_iteration_1 ();
6969ac30 6272 test_malloc_constraints ();
808f4dfe
DM
6273 test_var ();
6274 test_array_2 ();
6275 test_mem_ref ();
6276 test_POINTER_PLUS_EXPR_then_MEM_REF ();
6277 test_malloc ();
6278 test_alloca ();
71fc4655 6279 test_involves_p ();
757bf1df
DM
6280}
6281
6282} // namespace selftest
6283
6284#endif /* CHECKING_P */
6285
75038aa6
DM
6286} // namespace ana
6287
757bf1df 6288#endif /* #if ENABLE_ANALYZER */