]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/analyzer/region-model.cc
analyzer: fix duplicates in check_for_tainted_size_arg
[thirdparty/gcc.git] / gcc / analyzer / region-model.cc
CommitLineData
757bf1df 1/* Classes for modeling the state of memory.
7adcbafe 2 Copyright (C) 2019-2022 Free Software Foundation, Inc.
757bf1df
DM
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tree.h"
25#include "function.h"
26#include "basic-block.h"
27#include "gimple.h"
28#include "gimple-iterator.h"
7892ff37 29#include "diagnostic-core.h"
757bf1df
DM
30#include "graphviz.h"
31#include "options.h"
32#include "cgraph.h"
33#include "tree-dfa.h"
34#include "stringpool.h"
35#include "convert.h"
36#include "target.h"
37#include "fold-const.h"
38#include "tree-pretty-print.h"
39#include "diagnostic-color.h"
40#include "diagnostic-metadata.h"
757bf1df 41#include "tristate.h"
ef7827b0 42#include "bitmap.h"
757bf1df
DM
43#include "selftest.h"
44#include "function.h"
809192e7 45#include "json.h"
757bf1df
DM
46#include "analyzer/analyzer.h"
47#include "analyzer/analyzer-logging.h"
48#include "ordered-hash-map.h"
49#include "options.h"
50#include "cgraph.h"
51#include "cfg.h"
52#include "digraph.h"
53#include "analyzer/supergraph.h"
54#include "sbitmap.h"
808f4dfe
DM
55#include "analyzer/call-string.h"
56#include "analyzer/program-point.h"
57#include "analyzer/store.h"
757bf1df
DM
58#include "analyzer/region-model.h"
59#include "analyzer/constraint-manager.h"
60#include "diagnostic-event-id.h"
61#include "analyzer/sm.h"
62#include "diagnostic-event-id.h"
63#include "analyzer/sm.h"
64#include "analyzer/pending-diagnostic.h"
808f4dfe 65#include "analyzer/region-model-reachability.h"
757bf1df 66#include "analyzer/analyzer-selftests.h"
f573d351 67#include "analyzer/program-state.h"
884d9141 68#include "stor-layout.h"
c7e276b8 69#include "attribs.h"
9a2c9579 70#include "tree-object-size.h"
1e2fe671
DM
71#include "gimple-ssa.h"
72#include "tree-phinodes.h"
73#include "tree-ssa-operands.h"
74#include "ssa-iterators.h"
5fbcbcaf 75#include "calls.h"
757bf1df
DM
76
77#if ENABLE_ANALYZER
78
75038aa6
DM
79namespace ana {
80
757bf1df
DM
81/* Dump T to PP in language-independent form, for debugging/logging/dumping
82 purposes. */
83
757bf1df 84void
808f4dfe 85dump_tree (pretty_printer *pp, tree t)
757bf1df 86{
808f4dfe 87 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
757bf1df
DM
88}
89
808f4dfe
DM
90/* Dump T to PP in language-independent form in quotes, for
91 debugging/logging/dumping purposes. */
757bf1df
DM
92
93void
808f4dfe 94dump_quoted_tree (pretty_printer *pp, tree t)
757bf1df 95{
808f4dfe
DM
96 pp_begin_quote (pp, pp_show_color (pp));
97 dump_tree (pp, t);
98 pp_end_quote (pp, pp_show_color (pp));
757bf1df
DM
99}
100
808f4dfe
DM
101/* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
102 calls within other pp_printf calls.
757bf1df 103
808f4dfe
DM
104 default_tree_printer handles 'T' and some other codes by calling
105 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
106 dump_generic_node calls pp_printf in various places, leading to
107 garbled output.
757bf1df 108
808f4dfe
DM
109 Ideally pp_printf could be made to be reentrant, but in the meantime
110 this function provides a workaround. */
6969ac30
DM
111
112void
808f4dfe 113print_quoted_type (pretty_printer *pp, tree t)
6969ac30 114{
808f4dfe
DM
115 pp_begin_quote (pp, pp_show_color (pp));
116 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
117 pp_end_quote (pp, pp_show_color (pp));
6969ac30
DM
118}
119
d726a57b
DM
120/* class region_to_value_map. */
121
122/* Assignment operator for region_to_value_map. */
123
124region_to_value_map &
125region_to_value_map::operator= (const region_to_value_map &other)
126{
127 m_hash_map.empty ();
128 for (auto iter : other.m_hash_map)
129 {
130 const region *reg = iter.first;
131 const svalue *sval = iter.second;
132 m_hash_map.put (reg, sval);
133 }
134 return *this;
135}
136
137/* Equality operator for region_to_value_map. */
138
139bool
140region_to_value_map::operator== (const region_to_value_map &other) const
141{
142 if (m_hash_map.elements () != other.m_hash_map.elements ())
143 return false;
144
145 for (auto iter : *this)
146 {
147 const region *reg = iter.first;
148 const svalue *sval = iter.second;
149 const svalue * const *other_slot = other.get (reg);
150 if (other_slot == NULL)
151 return false;
152 if (sval != *other_slot)
153 return false;
154 }
155
156 return true;
157}
158
159/* Dump this object to PP. */
160
161void
162region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
163 bool multiline) const
164{
165 auto_vec<const region *> regs;
166 for (iterator iter = begin (); iter != end (); ++iter)
167 regs.safe_push ((*iter).first);
168 regs.qsort (region::cmp_ptr_ptr);
169 if (multiline)
170 pp_newline (pp);
171 else
172 pp_string (pp, " {");
173 unsigned i;
174 const region *reg;
175 FOR_EACH_VEC_ELT (regs, i, reg)
176 {
177 if (multiline)
178 pp_string (pp, " ");
179 else if (i > 0)
180 pp_string (pp, ", ");
181 reg->dump_to_pp (pp, simple);
182 pp_string (pp, ": ");
183 const svalue *sval = *get (reg);
184 sval->dump_to_pp (pp, true);
185 if (multiline)
186 pp_newline (pp);
187 }
188 if (!multiline)
189 pp_string (pp, "}");
190}
191
192/* Dump this object to stderr. */
193
194DEBUG_FUNCTION void
195region_to_value_map::dump (bool simple) const
196{
197 pretty_printer pp;
198 pp_format_decoder (&pp) = default_tree_printer;
199 pp_show_color (&pp) = pp_show_color (global_dc->printer);
200 pp.buffer->stream = stderr;
201 dump_to_pp (&pp, simple, true);
202 pp_newline (&pp);
203 pp_flush (&pp);
204}
205
206
207/* Attempt to merge THIS with OTHER, writing the result
208 to OUT.
209
210 For now, write (region, value) mappings that are in common between THIS
211 and OTHER to OUT, effectively taking the intersection, rather than
212 rejecting differences. */
213
214bool
215region_to_value_map::can_merge_with_p (const region_to_value_map &other,
216 region_to_value_map *out) const
217{
218 for (auto iter : *this)
219 {
220 const region *iter_reg = iter.first;
221 const svalue *iter_sval = iter.second;
222 const svalue * const * other_slot = other.get (iter_reg);
223 if (other_slot)
224 if (iter_sval == *other_slot)
225 out->put (iter_reg, iter_sval);
226 }
227 return true;
228}
229
33255ad3
DM
230/* Purge any state involving SVAL. */
231
232void
233region_to_value_map::purge_state_involving (const svalue *sval)
234{
235 auto_vec<const region *> to_purge;
236 for (auto iter : *this)
237 {
238 const region *iter_reg = iter.first;
239 const svalue *iter_sval = iter.second;
240 if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
241 to_purge.safe_push (iter_reg);
242 }
243 for (auto iter : to_purge)
244 m_hash_map.remove (iter);
245}
246
757bf1df
DM
247/* class region_model. */
248
808f4dfe 249/* Ctor for region_model: construct an "empty" model. */
757bf1df 250
808f4dfe 251region_model::region_model (region_model_manager *mgr)
9a2c9579
DM
252: m_mgr (mgr), m_store (), m_current_frame (NULL),
253 m_dynamic_extents ()
757bf1df 254{
808f4dfe 255 m_constraints = new constraint_manager (mgr);
757bf1df
DM
256}
257
258/* region_model's copy ctor. */
259
260region_model::region_model (const region_model &other)
808f4dfe
DM
261: m_mgr (other.m_mgr), m_store (other.m_store),
262 m_constraints (new constraint_manager (*other.m_constraints)),
9a2c9579
DM
263 m_current_frame (other.m_current_frame),
264 m_dynamic_extents (other.m_dynamic_extents)
757bf1df 265{
757bf1df
DM
266}
267
268/* region_model's dtor. */
269
270region_model::~region_model ()
271{
272 delete m_constraints;
273}
274
275/* region_model's assignment operator. */
276
277region_model &
278region_model::operator= (const region_model &other)
279{
808f4dfe
DM
280 /* m_mgr is const. */
281 gcc_assert (m_mgr == other.m_mgr);
757bf1df 282
808f4dfe 283 m_store = other.m_store;
757bf1df
DM
284
285 delete m_constraints;
808f4dfe 286 m_constraints = new constraint_manager (*other.m_constraints);
757bf1df 287
808f4dfe 288 m_current_frame = other.m_current_frame;
757bf1df 289
9a2c9579
DM
290 m_dynamic_extents = other.m_dynamic_extents;
291
757bf1df
DM
292 return *this;
293}
294
295/* Equality operator for region_model.
296
808f4dfe
DM
297 Amongst other things this directly compares the stores and the constraint
298 managers, so for this to be meaningful both this and OTHER should
757bf1df
DM
299 have been canonicalized. */
300
301bool
302region_model::operator== (const region_model &other) const
303{
808f4dfe
DM
304 /* We can only compare instances that use the same manager. */
305 gcc_assert (m_mgr == other.m_mgr);
757bf1df 306
808f4dfe 307 if (m_store != other.m_store)
757bf1df
DM
308 return false;
309
310 if (*m_constraints != *other.m_constraints)
311 return false;
312
808f4dfe
DM
313 if (m_current_frame != other.m_current_frame)
314 return false;
757bf1df 315
9a2c9579
DM
316 if (m_dynamic_extents != other.m_dynamic_extents)
317 return false;
318
757bf1df
DM
319 gcc_checking_assert (hash () == other.hash ());
320
321 return true;
322}
323
324/* Generate a hash value for this region_model. */
325
326hashval_t
808f4dfe
DM
327region_model::hash () const
328{
329 hashval_t result = m_store.hash ();
330 result ^= m_constraints->hash ();
331 return result;
757bf1df
DM
332}
333
808f4dfe
DM
334/* Dump a representation of this model to PP, showing the
335 stack, the store, and any constraints.
336 Use SIMPLE to control how svalues and regions are printed. */
757bf1df
DM
337
338void
808f4dfe
DM
339region_model::dump_to_pp (pretty_printer *pp, bool simple,
340 bool multiline) const
757bf1df 341{
808f4dfe
DM
342 /* Dump stack. */
343 pp_printf (pp, "stack depth: %i", get_stack_depth ());
344 if (multiline)
345 pp_newline (pp);
346 else
347 pp_string (pp, " {");
348 for (const frame_region *iter_frame = m_current_frame; iter_frame;
349 iter_frame = iter_frame->get_calling_frame ())
350 {
351 if (multiline)
352 pp_string (pp, " ");
353 else if (iter_frame != m_current_frame)
354 pp_string (pp, ", ");
355 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
356 iter_frame->dump_to_pp (pp, simple);
357 if (multiline)
358 pp_newline (pp);
359 }
360 if (!multiline)
361 pp_string (pp, "}");
362
363 /* Dump store. */
364 if (!multiline)
365 pp_string (pp, ", {");
366 m_store.dump_to_pp (pp, simple, multiline,
367 m_mgr->get_store_manager ());
368 if (!multiline)
369 pp_string (pp, "}");
370
371 /* Dump constraints. */
372 pp_string (pp, "constraint_manager:");
373 if (multiline)
374 pp_newline (pp);
375 else
376 pp_string (pp, " {");
377 m_constraints->dump_to_pp (pp, multiline);
378 if (!multiline)
379 pp_string (pp, "}");
9a2c9579
DM
380
381 /* Dump sizes of dynamic regions, if any are known. */
382 if (!m_dynamic_extents.is_empty ())
383 {
384 pp_string (pp, "dynamic_extents:");
385 m_dynamic_extents.dump_to_pp (pp, simple, multiline);
386 }
808f4dfe 387}
757bf1df 388
808f4dfe 389/* Dump a representation of this model to FILE. */
757bf1df 390
808f4dfe
DM
391void
392region_model::dump (FILE *fp, bool simple, bool multiline) const
393{
394 pretty_printer pp;
395 pp_format_decoder (&pp) = default_tree_printer;
396 pp_show_color (&pp) = pp_show_color (global_dc->printer);
397 pp.buffer->stream = fp;
398 dump_to_pp (&pp, simple, multiline);
399 pp_newline (&pp);
400 pp_flush (&pp);
757bf1df
DM
401}
402
808f4dfe 403/* Dump a multiline representation of this model to stderr. */
757bf1df 404
808f4dfe
DM
405DEBUG_FUNCTION void
406region_model::dump (bool simple) const
407{
408 dump (stderr, simple, true);
409}
757bf1df 410
808f4dfe 411/* Dump a multiline representation of this model to stderr. */
757bf1df 412
808f4dfe
DM
413DEBUG_FUNCTION void
414region_model::debug () const
757bf1df 415{
808f4dfe 416 dump (true);
757bf1df
DM
417}
418
e61ffa20
DM
419/* Assert that this object is valid. */
420
421void
422region_model::validate () const
423{
424 m_store.validate ();
425}
426
808f4dfe
DM
427/* Canonicalize the store and constraints, to maximize the chance of
428 equality between region_model instances. */
757bf1df
DM
429
430void
808f4dfe 431region_model::canonicalize ()
757bf1df 432{
808f4dfe
DM
433 m_store.canonicalize (m_mgr->get_store_manager ());
434 m_constraints->canonicalize ();
757bf1df
DM
435}
436
437/* Return true if this region_model is in canonical form. */
438
439bool
440region_model::canonicalized_p () const
441{
442 region_model copy (*this);
808f4dfe 443 copy.canonicalize ();
757bf1df
DM
444 return *this == copy;
445}
446
808f4dfe
DM
447/* See the comment for store::loop_replay_fixup. */
448
449void
450region_model::loop_replay_fixup (const region_model *dst_state)
451{
452 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
453}
454
757bf1df
DM
455/* A subclass of pending_diagnostic for complaining about uses of
456 poisoned values. */
457
458class poisoned_value_diagnostic
459: public pending_diagnostic_subclass<poisoned_value_diagnostic>
460{
461public:
00e7d024
DM
462 poisoned_value_diagnostic (tree expr, enum poison_kind pkind,
463 const region *src_region)
464 : m_expr (expr), m_pkind (pkind),
465 m_src_region (src_region)
757bf1df
DM
466 {}
467
468 const char *get_kind () const FINAL OVERRIDE { return "poisoned_value_diagnostic"; }
469
33255ad3
DM
470 bool use_of_uninit_p () const FINAL OVERRIDE
471 {
472 return m_pkind == POISON_KIND_UNINIT;
473 }
474
757bf1df
DM
475 bool operator== (const poisoned_value_diagnostic &other) const
476 {
00e7d024
DM
477 return (m_expr == other.m_expr
478 && m_pkind == other.m_pkind
479 && m_src_region == other.m_src_region);
757bf1df
DM
480 }
481
482 bool emit (rich_location *rich_loc) FINAL OVERRIDE
483 {
484 switch (m_pkind)
485 {
486 default:
487 gcc_unreachable ();
33255ad3
DM
488 case POISON_KIND_UNINIT:
489 {
490 diagnostic_metadata m;
491 m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
492 return warning_meta (rich_loc, m,
493 OPT_Wanalyzer_use_of_uninitialized_value,
494 "use of uninitialized value %qE",
495 m_expr);
496 }
497 break;
757bf1df
DM
498 case POISON_KIND_FREED:
499 {
500 diagnostic_metadata m;
501 m.add_cwe (416); /* "CWE-416: Use After Free". */
6c8e5844
DM
502 return warning_meta (rich_loc, m,
503 OPT_Wanalyzer_use_after_free,
504 "use after %<free%> of %qE",
505 m_expr);
757bf1df
DM
506 }
507 break;
508 case POISON_KIND_POPPED_STACK:
509 {
757bf1df 510 /* TODO: which CWE? */
808f4dfe
DM
511 return warning_at
512 (rich_loc,
513 OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame,
514 "dereferencing pointer %qE to within stale stack frame",
515 m_expr);
757bf1df
DM
516 }
517 break;
518 }
519 }
520
521 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
522 {
523 switch (m_pkind)
524 {
525 default:
526 gcc_unreachable ();
33255ad3
DM
527 case POISON_KIND_UNINIT:
528 return ev.formatted_print ("use of uninitialized value %qE here",
529 m_expr);
757bf1df
DM
530 case POISON_KIND_FREED:
531 return ev.formatted_print ("use after %<free%> of %qE here",
532 m_expr);
533 case POISON_KIND_POPPED_STACK:
534 return ev.formatted_print
808f4dfe 535 ("dereferencing pointer %qE to within stale stack frame",
757bf1df
DM
536 m_expr);
537 }
538 }
539
00e7d024
DM
540 void mark_interesting_stuff (interesting_t *interest) FINAL OVERRIDE
541 {
542 if (m_src_region)
543 interest->add_region_creation (m_src_region);
544 }
545
757bf1df
DM
546private:
547 tree m_expr;
548 enum poison_kind m_pkind;
00e7d024 549 const region *m_src_region;
757bf1df
DM
550};
551
5e00ad3f
DM
552/* A subclass of pending_diagnostic for complaining about shifts
553 by negative counts. */
554
555class shift_count_negative_diagnostic
556: public pending_diagnostic_subclass<shift_count_negative_diagnostic>
557{
558public:
559 shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
560 : m_assign (assign), m_count_cst (count_cst)
561 {}
562
563 const char *get_kind () const FINAL OVERRIDE
564 {
565 return "shift_count_negative_diagnostic";
566 }
567
568 bool operator== (const shift_count_negative_diagnostic &other) const
569 {
570 return (m_assign == other.m_assign
571 && same_tree_p (m_count_cst, other.m_count_cst));
572 }
573
574 bool emit (rich_location *rich_loc) FINAL OVERRIDE
575 {
576 return warning_at (rich_loc, OPT_Wanalyzer_shift_count_negative,
577 "shift by negative count (%qE)", m_count_cst);
578 }
579
580 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
581 {
582 return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst);
583 }
584
585private:
586 const gassign *m_assign;
587 tree m_count_cst;
588};
589
590/* A subclass of pending_diagnostic for complaining about shifts
591 by counts >= the width of the operand type. */
592
593class shift_count_overflow_diagnostic
594: public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
595{
596public:
597 shift_count_overflow_diagnostic (const gassign *assign,
598 int operand_precision,
599 tree count_cst)
600 : m_assign (assign), m_operand_precision (operand_precision),
601 m_count_cst (count_cst)
602 {}
603
604 const char *get_kind () const FINAL OVERRIDE
605 {
606 return "shift_count_overflow_diagnostic";
607 }
608
609 bool operator== (const shift_count_overflow_diagnostic &other) const
610 {
611 return (m_assign == other.m_assign
612 && m_operand_precision == other.m_operand_precision
613 && same_tree_p (m_count_cst, other.m_count_cst));
614 }
615
616 bool emit (rich_location *rich_loc) FINAL OVERRIDE
617 {
618 return warning_at (rich_loc, OPT_Wanalyzer_shift_count_overflow,
619 "shift by count (%qE) >= precision of type (%qi)",
620 m_count_cst, m_operand_precision);
621 }
622
623 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
624 {
625 return ev.formatted_print ("shift by count %qE here", m_count_cst);
626 }
627
628private:
629 const gassign *m_assign;
630 int m_operand_precision;
631 tree m_count_cst;
632};
633
808f4dfe
DM
634/* If ASSIGN is a stmt that can be modelled via
635 set_value (lhs_reg, SVALUE, CTXT)
636 for some SVALUE, get the SVALUE.
637 Otherwise return NULL. */
757bf1df 638
808f4dfe
DM
639const svalue *
640region_model::get_gassign_result (const gassign *assign,
641 region_model_context *ctxt)
757bf1df
DM
642{
643 tree lhs = gimple_assign_lhs (assign);
644 tree rhs1 = gimple_assign_rhs1 (assign);
757bf1df
DM
645 enum tree_code op = gimple_assign_rhs_code (assign);
646 switch (op)
647 {
648 default:
808f4dfe 649 return NULL;
757bf1df
DM
650
651 case POINTER_PLUS_EXPR:
652 {
653 /* e.g. "_1 = a_10(D) + 12;" */
654 tree ptr = rhs1;
655 tree offset = gimple_assign_rhs2 (assign);
656
808f4dfe
DM
657 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
658 const svalue *offset_sval = get_rvalue (offset, ctxt);
659 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
660 is an integer of type sizetype". */
661 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
662
663 const svalue *sval_binop
664 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
665 ptr_sval, offset_sval);
666 return sval_binop;
757bf1df
DM
667 }
668 break;
669
670 case POINTER_DIFF_EXPR:
671 {
672 /* e.g. "_1 = p_2(D) - q_3(D);". */
808f4dfe
DM
673 tree rhs2 = gimple_assign_rhs2 (assign);
674 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
675 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 676
808f4dfe 677 // TODO: perhaps fold to zero if they're known to be equal?
757bf1df 678
808f4dfe
DM
679 const svalue *sval_binop
680 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
681 rhs1_sval, rhs2_sval);
682 return sval_binop;
757bf1df
DM
683 }
684 break;
685
808f4dfe
DM
686 /* Assignments of the form
687 set_value (lvalue (LHS), rvalue (EXPR))
688 for various EXPR.
689 We already have the lvalue for the LHS above, as "lhs_reg". */
690 case ADDR_EXPR: /* LHS = &RHS; */
691 case BIT_FIELD_REF:
692 case COMPONENT_REF: /* LHS = op0.op1; */
757bf1df 693 case MEM_REF:
757bf1df 694 case REAL_CST:
808f4dfe
DM
695 case COMPLEX_CST:
696 case VECTOR_CST:
757bf1df
DM
697 case INTEGER_CST:
698 case ARRAY_REF:
808f4dfe
DM
699 case SSA_NAME: /* LHS = VAR; */
700 case VAR_DECL: /* LHS = VAR; */
701 case PARM_DECL:/* LHS = VAR; */
702 case REALPART_EXPR:
703 case IMAGPART_EXPR:
704 return get_rvalue (rhs1, ctxt);
705
706 case ABS_EXPR:
707 case ABSU_EXPR:
708 case CONJ_EXPR:
709 case BIT_NOT_EXPR:
757bf1df
DM
710 case FIX_TRUNC_EXPR:
711 case FLOAT_EXPR:
808f4dfe 712 case NEGATE_EXPR:
757bf1df 713 case NOP_EXPR:
808f4dfe 714 case VIEW_CONVERT_EXPR:
757bf1df 715 {
808f4dfe
DM
716 /* Unary ops. */
717 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
718 const svalue *sval_unaryop
719 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
720 return sval_unaryop;
757bf1df 721 }
757bf1df
DM
722
723 case EQ_EXPR:
724 case GE_EXPR:
725 case LE_EXPR:
726 case NE_EXPR:
727 case GT_EXPR:
728 case LT_EXPR:
808f4dfe
DM
729 case UNORDERED_EXPR:
730 case ORDERED_EXPR:
757bf1df
DM
731 {
732 tree rhs2 = gimple_assign_rhs2 (assign);
733
808f4dfe
DM
734 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
735 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 736
2f5951bd 737 if (TREE_TYPE (lhs) == boolean_type_node)
808f4dfe 738 {
2f5951bd
DM
739 /* Consider constraints between svalues. */
740 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
741 if (t.is_known ())
742 return m_mgr->get_or_create_constant_svalue
743 (t.is_true () ? boolean_true_node : boolean_false_node);
808f4dfe 744 }
2f5951bd
DM
745
746 /* Otherwise, generate a symbolic binary op. */
747 const svalue *sval_binop
748 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
749 rhs1_sval, rhs2_sval);
750 return sval_binop;
757bf1df
DM
751 }
752 break;
753
754 case PLUS_EXPR:
755 case MINUS_EXPR:
756 case MULT_EXPR:
808f4dfe 757 case MULT_HIGHPART_EXPR:
757bf1df 758 case TRUNC_DIV_EXPR:
808f4dfe
DM
759 case CEIL_DIV_EXPR:
760 case FLOOR_DIV_EXPR:
761 case ROUND_DIV_EXPR:
757bf1df 762 case TRUNC_MOD_EXPR:
808f4dfe
DM
763 case CEIL_MOD_EXPR:
764 case FLOOR_MOD_EXPR:
765 case ROUND_MOD_EXPR:
766 case RDIV_EXPR:
767 case EXACT_DIV_EXPR:
757bf1df
DM
768 case LSHIFT_EXPR:
769 case RSHIFT_EXPR:
808f4dfe
DM
770 case LROTATE_EXPR:
771 case RROTATE_EXPR:
757bf1df
DM
772 case BIT_IOR_EXPR:
773 case BIT_XOR_EXPR:
774 case BIT_AND_EXPR:
775 case MIN_EXPR:
776 case MAX_EXPR:
808f4dfe 777 case COMPLEX_EXPR:
757bf1df
DM
778 {
779 /* Binary ops. */
780 tree rhs2 = gimple_assign_rhs2 (assign);
781
808f4dfe
DM
782 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
783 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 784
5e00ad3f
DM
785 if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
786 {
787 /* "INT34-C. Do not shift an expression by a negative number of bits
788 or by greater than or equal to the number of bits that exist in
789 the operand." */
790 if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
791 if (TREE_CODE (rhs2_cst) == INTEGER_CST)
792 {
793 if (tree_int_cst_sgn (rhs2_cst) < 0)
794 ctxt->warn (new shift_count_negative_diagnostic
795 (assign, rhs2_cst));
796 else if (compare_tree_int (rhs2_cst,
797 TYPE_PRECISION (TREE_TYPE (rhs1)))
798 >= 0)
799 ctxt->warn (new shift_count_overflow_diagnostic
800 (assign, TYPE_PRECISION (TREE_TYPE (rhs1)),
801 rhs2_cst));
802 }
803 }
804
808f4dfe
DM
805 const svalue *sval_binop
806 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
807 rhs1_sval, rhs2_sval);
808 return sval_binop;
809 }
810
811 /* Vector expressions. In theory we could implement these elementwise,
812 but for now, simply return unknown values. */
813 case VEC_DUPLICATE_EXPR:
814 case VEC_SERIES_EXPR:
815 case VEC_COND_EXPR:
816 case VEC_PERM_EXPR:
1b0be822
DM
817 case VEC_WIDEN_MULT_HI_EXPR:
818 case VEC_WIDEN_MULT_LO_EXPR:
819 case VEC_WIDEN_MULT_EVEN_EXPR:
820 case VEC_WIDEN_MULT_ODD_EXPR:
821 case VEC_UNPACK_HI_EXPR:
822 case VEC_UNPACK_LO_EXPR:
823 case VEC_UNPACK_FLOAT_HI_EXPR:
824 case VEC_UNPACK_FLOAT_LO_EXPR:
825 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
826 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
827 case VEC_PACK_TRUNC_EXPR:
828 case VEC_PACK_SAT_EXPR:
829 case VEC_PACK_FIX_TRUNC_EXPR:
830 case VEC_PACK_FLOAT_EXPR:
831 case VEC_WIDEN_LSHIFT_HI_EXPR:
832 case VEC_WIDEN_LSHIFT_LO_EXPR:
808f4dfe
DM
833 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
834 }
835}
836
1e2fe671
DM
837/* Workaround for discarding certain false positives from
838 -Wanalyzer-use-of-uninitialized-value
839 of the form:
840 ((A OR-IF B) OR-IF C)
841 and:
842 ((A AND-IF B) AND-IF C)
843 where evaluating B is redundant, but could involve simple accesses of
844 uninitialized locals.
845
846 When optimization is turned on the FE can immediately fold compound
847 conditionals. Specifically, c_parser_condition parses this condition:
848 ((A OR-IF B) OR-IF C)
849 and calls c_fully_fold on the condition.
850 Within c_fully_fold, fold_truth_andor is called, which bails when
851 optimization is off, but if any optimization is turned on can convert the
852 ((A OR-IF B) OR-IF C)
853 into:
854 ((A OR B) OR_IF C)
855 for sufficiently simple B
856 i.e. the inner OR-IF becomes an OR.
857 At gimplification time the inner OR becomes BIT_IOR_EXPR (in gimplify_expr),
858 giving this for the inner condition:
859 tmp = A | B;
860 if (tmp)
861 thus effectively synthesizing a redundant access of B when optimization
862 is turned on, when compared to:
863 if (A) goto L1; else goto L4;
864 L1: if (B) goto L2; else goto L4;
865 L2: if (C) goto L3; else goto L4;
866 for the unoptimized case.
867
868 Return true if CTXT appears to be handling such a short-circuitable stmt,
869 such as the def-stmt for B for the:
870 tmp = A | B;
871 case above, for the case where A is true and thus B would have been
872 short-circuited without optimization, using MODEL for the value of A. */
873
874static bool
875within_short_circuited_stmt_p (const region_model *model,
876 region_model_context *ctxt)
877{
878 gcc_assert (ctxt);
879 const gimple *curr_stmt = ctxt->get_stmt ();
880 if (curr_stmt == NULL)
881 return false;
882
883 /* We must have an assignment to a temporary of _Bool type. */
884 const gassign *assign_stmt = dyn_cast <const gassign *> (curr_stmt);
885 if (!assign_stmt)
886 return false;
887 tree lhs = gimple_assign_lhs (assign_stmt);
888 if (TREE_TYPE (lhs) != boolean_type_node)
889 return false;
890 if (TREE_CODE (lhs) != SSA_NAME)
891 return false;
892 if (SSA_NAME_VAR (lhs) != NULL_TREE)
893 return false;
894
895 /* The temporary bool must be used exactly once: as the second arg of
896 a BIT_IOR_EXPR or BIT_AND_EXPR. */
897 use_operand_p use_op;
898 gimple *use_stmt;
899 if (!single_imm_use (lhs, &use_op, &use_stmt))
900 return false;
901 const gassign *use_assign = dyn_cast <const gassign *> (use_stmt);
902 if (!use_assign)
903 return false;
904 enum tree_code op = gimple_assign_rhs_code (use_assign);
905 if (!(op == BIT_IOR_EXPR ||op == BIT_AND_EXPR))
906 return false;
907 if (!(gimple_assign_rhs1 (use_assign) != lhs
908 && gimple_assign_rhs2 (use_assign) == lhs))
909 return false;
910
911 /* The first arg of the bitwise stmt must have a known value in MODEL
912 that implies that the value of the second arg doesn't matter, i.e.
913 1 for bitwise or, 0 for bitwise and. */
914 tree other_arg = gimple_assign_rhs1 (use_assign);
915 /* Use a NULL ctxt here to avoid generating warnings. */
916 const svalue *other_arg_sval = model->get_rvalue (other_arg, NULL);
917 tree other_arg_cst = other_arg_sval->maybe_get_constant ();
918 if (!other_arg_cst)
919 return false;
920 switch (op)
921 {
922 default:
923 gcc_unreachable ();
924 case BIT_IOR_EXPR:
925 if (zerop (other_arg_cst))
926 return false;
927 break;
928 case BIT_AND_EXPR:
929 if (!zerop (other_arg_cst))
930 return false;
931 break;
932 }
933
934 /* All tests passed. We appear to be in a stmt that generates a boolean
935 temporary with a value that won't matter. */
936 return true;
937}
938
33255ad3
DM
939/* Check for SVAL being poisoned, adding a warning to CTXT.
940 Return SVAL, or, if a warning is added, another value, to avoid
941 repeatedly complaining about the same poisoned value in followup code. */
942
943const svalue *
944region_model::check_for_poison (const svalue *sval,
945 tree expr,
946 region_model_context *ctxt) const
947{
948 if (!ctxt)
949 return sval;
950
951 if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
952 {
cc68ad87
DM
953 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
954
955 /* Ignore uninitialized uses of empty types; there's nothing
956 to initialize. */
957 if (pkind == POISON_KIND_UNINIT
958 && sval->get_type ()
959 && is_empty_type (sval->get_type ()))
960 return sval;
961
1e2fe671
DM
962 /* Special case to avoid certain false positives. */
963 if (pkind == POISON_KIND_UNINIT
964 && within_short_circuited_stmt_p (this, ctxt))
965 return sval;
966
33255ad3
DM
967 /* If we have an SSA name for a temporary, we don't want to print
968 '<unknown>'.
969 Poisoned values are shared by type, and so we can't reconstruct
970 the tree other than via the def stmts, using
971 fixup_tree_for_diagnostic. */
972 tree diag_arg = fixup_tree_for_diagnostic (expr);
00e7d024
DM
973 const region *src_region = NULL;
974 if (pkind == POISON_KIND_UNINIT)
975 src_region = get_region_for_poisoned_expr (expr);
976 if (ctxt->warn (new poisoned_value_diagnostic (diag_arg, pkind,
977 src_region)))
33255ad3
DM
978 {
979 /* We only want to report use of a poisoned value at the first
980 place it gets used; return an unknown value to avoid generating
981 a chain of followup warnings. */
982 sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
983 }
984
985 return sval;
986 }
987
988 return sval;
989}
990
00e7d024
DM
991/* Attempt to get a region for describing EXPR, the source of region of
992 a poisoned_svalue for use in a poisoned_value_diagnostic.
993 Return NULL if there is no good region to use. */
994
995const region *
996region_model::get_region_for_poisoned_expr (tree expr) const
997{
998 if (TREE_CODE (expr) == SSA_NAME)
999 {
1000 tree decl = SSA_NAME_VAR (expr);
1001 if (decl && DECL_P (decl))
1002 expr = decl;
1003 else
1004 return NULL;
1005 }
1006 return get_lvalue (expr, NULL);
1007}
1008
808f4dfe
DM
1009/* Update this model for the ASSIGN stmt, using CTXT to report any
1010 diagnostics. */
1011
1012void
1013region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
1014{
1015 tree lhs = gimple_assign_lhs (assign);
1016 tree rhs1 = gimple_assign_rhs1 (assign);
1017
1018 const region *lhs_reg = get_lvalue (lhs, ctxt);
1019
1020 /* Most assignments are handled by:
1021 set_value (lhs_reg, SVALUE, CTXT)
1022 for some SVALUE. */
1023 if (const svalue *sval = get_gassign_result (assign, ctxt))
1024 {
33255ad3
DM
1025 tree expr = get_diagnostic_tree_for_gassign (assign);
1026 check_for_poison (sval, expr, ctxt);
808f4dfe
DM
1027 set_value (lhs_reg, sval, ctxt);
1028 return;
1029 }
1030
1031 enum tree_code op = gimple_assign_rhs_code (assign);
1032 switch (op)
1033 {
1034 default:
1035 {
1b0be822 1036 if (0)
808f4dfe
DM
1037 sorry_at (assign->location, "unhandled assignment op: %qs",
1038 get_tree_code_name (op));
1b0be822
DM
1039 const svalue *unknown_sval
1040 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1041 set_value (lhs_reg, unknown_sval, ctxt);
757bf1df
DM
1042 }
1043 break;
1044
808f4dfe
DM
1045 case CONSTRUCTOR:
1046 {
1047 if (TREE_CLOBBER_P (rhs1))
1048 {
1049 /* e.g. "x ={v} {CLOBBER};" */
1050 clobber_region (lhs_reg);
1051 }
1052 else
1053 {
1054 /* Any CONSTRUCTOR that survives to this point is either
1055 just a zero-init of everything, or a vector. */
1056 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
1057 zero_fill_region (lhs_reg);
1058 unsigned ix;
1059 tree index;
1060 tree val;
1061 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
1062 {
1063 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
1064 if (!index)
1065 index = build_int_cst (integer_type_node, ix);
1066 gcc_assert (TREE_CODE (index) == INTEGER_CST);
1067 const svalue *index_sval
1068 = m_mgr->get_or_create_constant_svalue (index);
1069 gcc_assert (index_sval);
1070 const region *sub_reg
1071 = m_mgr->get_element_region (lhs_reg,
1072 TREE_TYPE (val),
1073 index_sval);
1074 const svalue *val_sval = get_rvalue (val, ctxt);
1075 set_value (sub_reg, val_sval, ctxt);
1076 }
1077 }
1078 }
1079 break;
1080
1081 case STRING_CST:
757bf1df 1082 {
808f4dfe 1083 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
808f4dfe
DM
1084 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1085 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 1086 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
1087 }
1088 break;
1089 }
1090}
1091
33255ad3
DM
1092/* A pending_diagnostic subclass for implementing "__analyzer_dump_path". */
1093
1094class dump_path_diagnostic
1095 : public pending_diagnostic_subclass<dump_path_diagnostic>
1096{
1097public:
1098 bool emit (rich_location *richloc) FINAL OVERRIDE
1099 {
1100 inform (richloc, "path");
1101 return true;
1102 }
1103
1104 const char *get_kind () const FINAL OVERRIDE { return "dump_path_diagnostic"; }
1105
1106 bool operator== (const dump_path_diagnostic &) const
1107 {
1108 return true;
1109 }
1110};
1111
1112/* Handle the pre-sm-state part of STMT, modifying this object in-place.
1113 Write true to *OUT_TERMINATE_PATH if the path should be terminated.
1114 Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
1115 side effects. */
1116
1117void
1118region_model::on_stmt_pre (const gimple *stmt,
1119 bool *out_terminate_path,
1120 bool *out_unknown_side_effects,
1121 region_model_context *ctxt)
1122{
1123 switch (gimple_code (stmt))
1124 {
1125 default:
1126 /* No-op for now. */
1127 break;
1128
1129 case GIMPLE_ASSIGN:
1130 {
1131 const gassign *assign = as_a <const gassign *> (stmt);
1132 on_assignment (assign, ctxt);
1133 }
1134 break;
1135
1136 case GIMPLE_ASM:
ded2c2c0
DM
1137 {
1138 const gasm *asm_stmt = as_a <const gasm *> (stmt);
1139 on_asm_stmt (asm_stmt, ctxt);
1140 }
33255ad3
DM
1141 break;
1142
1143 case GIMPLE_CALL:
1144 {
1145 /* Track whether we have a gcall to a function that's not recognized by
1146 anything, for which we don't have a function body, or for which we
1147 don't know the fndecl. */
1148 const gcall *call = as_a <const gcall *> (stmt);
1149
1150 /* Debugging/test support. */
1151 if (is_special_named_call_p (call, "__analyzer_describe", 2))
1152 impl_call_analyzer_describe (call, ctxt);
1153 else if (is_special_named_call_p (call, "__analyzer_dump_capacity", 1))
1154 impl_call_analyzer_dump_capacity (call, ctxt);
4409152a
DM
1155 else if (is_special_named_call_p (call, "__analyzer_dump_escaped", 0))
1156 impl_call_analyzer_dump_escaped (call);
33255ad3
DM
1157 else if (is_special_named_call_p (call, "__analyzer_dump_path", 0))
1158 {
1159 /* Handle the builtin "__analyzer_dump_path" by queuing a
1160 diagnostic at this exploded_node. */
1161 ctxt->warn (new dump_path_diagnostic ());
1162 }
1163 else if (is_special_named_call_p (call, "__analyzer_dump_region_model",
1164 0))
1165 {
1166 /* Handle the builtin "__analyzer_dump_region_model" by dumping
1167 the region model's state to stderr. */
1168 dump (false);
1169 }
1170 else if (is_special_named_call_p (call, "__analyzer_eval", 1))
1171 impl_call_analyzer_eval (call, ctxt);
1172 else if (is_special_named_call_p (call, "__analyzer_break", 0))
1173 {
1174 /* Handle the builtin "__analyzer_break" by triggering a
1175 breakpoint. */
1176 /* TODO: is there a good cross-platform way to do this? */
1177 raise (SIGINT);
1178 }
1179 else if (is_special_named_call_p (call,
1180 "__analyzer_dump_exploded_nodes",
1181 1))
1182 {
1183 /* This is handled elsewhere. */
1184 }
1185 else
1186 *out_unknown_side_effects = on_call_pre (call, ctxt,
1187 out_terminate_path);
1188 }
1189 break;
1190
1191 case GIMPLE_RETURN:
1192 {
1193 const greturn *return_ = as_a <const greturn *> (stmt);
1194 on_return (return_, ctxt);
1195 }
1196 break;
1197 }
1198}
1199
9ff3e236
DM
1200/* Ensure that all arguments at the call described by CD are checked
1201 for poisoned values, by calling get_rvalue on each argument. */
1202
1203void
1204region_model::check_call_args (const call_details &cd) const
1205{
1206 for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++)
1207 cd.get_arg_svalue (arg_idx);
1208}
1209
aee1adf2
DM
1210/* Return true if CD is known to be a call to a function with
1211 __attribute__((const)). */
1212
1213static bool
1214const_fn_p (const call_details &cd)
1215{
1216 tree fndecl = cd.get_fndecl_for_call ();
1217 if (!fndecl)
1218 return false;
1219 gcc_assert (DECL_P (fndecl));
1220 return TREE_READONLY (fndecl);
1221}
1222
1223/* If this CD is known to be a call to a function with
1224 __attribute__((const)), attempt to get a const_fn_result_svalue
1225 based on the arguments, or return NULL otherwise. */
1226
1227static const svalue *
1228maybe_get_const_fn_result (const call_details &cd)
1229{
1230 if (!const_fn_p (cd))
1231 return NULL;
1232
1233 unsigned num_args = cd.num_args ();
1234 if (num_args > const_fn_result_svalue::MAX_INPUTS)
1235 /* Too many arguments. */
1236 return NULL;
1237
1238 auto_vec<const svalue *> inputs (num_args);
1239 for (unsigned arg_idx = 0; arg_idx < num_args; arg_idx++)
1240 {
1241 const svalue *arg_sval = cd.get_arg_svalue (arg_idx);
1242 if (!arg_sval->can_have_associated_state_p ())
1243 return NULL;
1244 inputs.quick_push (arg_sval);
1245 }
1246
1247 region_model_manager *mgr = cd.get_manager ();
1248 const svalue *sval
1249 = mgr->get_or_create_const_fn_result_svalue (cd.get_lhs_type (),
1250 cd.get_fndecl_for_call (),
1251 inputs);
1252 return sval;
1253}
1254
757bf1df
DM
1255/* Update this model for the CALL stmt, using CTXT to report any
1256 diagnostics - the first half.
1257
1258 Updates to the region_model that should be made *before* sm-states
1259 are updated are done here; other updates to the region_model are done
ef7827b0 1260 in region_model::on_call_post.
757bf1df 1261
ef7827b0
DM
1262 Return true if the function call has unknown side effects (it wasn't
1263 recognized and we don't have a body for it, or are unable to tell which
5ee4ba03
DM
1264 fndecl it is).
1265
1266 Write true to *OUT_TERMINATE_PATH if this execution path should be
1267 terminated (e.g. the function call terminates the process). */
ef7827b0
DM
1268
1269bool
5ee4ba03
DM
1270region_model::on_call_pre (const gcall *call, region_model_context *ctxt,
1271 bool *out_terminate_path)
757bf1df 1272{
48e8a7a6
DM
1273 call_details cd (call, this, ctxt);
1274
ef7827b0
DM
1275 bool unknown_side_effects = false;
1276
9b4eee5f
DM
1277 /* Special-case for IFN_DEFERRED_INIT.
1278 We want to report uninitialized variables with -fanalyzer (treating
1279 -ftrivial-auto-var-init= as purely a mitigation feature).
1280 Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the
1281 lhs of the call, so that it is still uninitialized from the point of
1282 view of the analyzer. */
1283 if (gimple_call_internal_p (call)
1284 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1285 return false;
1286
33255ad3
DM
1287 /* Some of the cases below update the lhs of the call based on the
1288 return value, but not all. Provide a default value, which may
1289 get overwritten below. */
1290 if (tree lhs = gimple_call_lhs (call))
1291 {
1292 const region *lhs_region = get_lvalue (lhs, ctxt);
aee1adf2
DM
1293 const svalue *sval = maybe_get_const_fn_result (cd);
1294 if (!sval)
1295 {
1296 /* For the common case of functions without __attribute__((const)),
1297 use a conjured value, and purge any prior state involving that
1298 value (in case this is in a loop). */
1299 sval = m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs), call,
1300 lhs_region);
1301 purge_state_involving (sval, ctxt);
1302 }
3a1d168e 1303 set_value (lhs_region, sval, ctxt);
33255ad3
DM
1304 }
1305
48e8a7a6 1306 if (gimple_call_internal_p (call))
757bf1df 1307 {
48e8a7a6
DM
1308 switch (gimple_call_internal_fn (call))
1309 {
1310 default:
1311 break;
1312 case IFN_BUILTIN_EXPECT:
b5081130
DM
1313 impl_call_builtin_expect (cd);
1314 return false;
37eb3ef4
DM
1315 case IFN_UBSAN_BOUNDS:
1316 return false;
48e8a7a6
DM
1317 }
1318 }
808f4dfe 1319
48e8a7a6
DM
1320 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1321 {
808f4dfe
DM
1322 /* The various impl_call_* member functions are implemented
1323 in region-model-impl-calls.cc.
1324 Having them split out into separate functions makes it easier
1325 to put breakpoints on the handling of specific functions. */
5fbcbcaf 1326 int callee_fndecl_flags = flags_from_decl_or_type (callee_fndecl);
ee7bfbe5 1327
47997a32 1328 if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
ee7bfbe5
DM
1329 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
1330 switch (DECL_UNCHECKED_FUNCTION_CODE (callee_fndecl))
1331 {
1332 default:
5fbcbcaf 1333 if (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
b7028f06 1334 unknown_side_effects = true;
ee7bfbe5
DM
1335 break;
1336 case BUILT_IN_ALLOCA:
1337 case BUILT_IN_ALLOCA_WITH_ALIGN:
b5081130
DM
1338 impl_call_alloca (cd);
1339 return false;
ee7bfbe5 1340 case BUILT_IN_CALLOC:
b5081130
DM
1341 impl_call_calloc (cd);
1342 return false;
ee7bfbe5
DM
1343 case BUILT_IN_EXPECT:
1344 case BUILT_IN_EXPECT_WITH_PROBABILITY:
b5081130
DM
1345 impl_call_builtin_expect (cd);
1346 return false;
ee7bfbe5
DM
1347 case BUILT_IN_FREE:
1348 /* Handle in "on_call_post". */
1349 break;
1350 case BUILT_IN_MALLOC:
b5081130
DM
1351 impl_call_malloc (cd);
1352 return false;
b7028f06
DM
1353 case BUILT_IN_MEMCPY:
1354 case BUILT_IN_MEMCPY_CHK:
1355 impl_call_memcpy (cd);
1356 return false;
ee7bfbe5 1357 case BUILT_IN_MEMSET:
bc62bfb0 1358 case BUILT_IN_MEMSET_CHK:
ee7bfbe5
DM
1359 impl_call_memset (cd);
1360 return false;
1361 break;
a6baafca 1362 case BUILT_IN_REALLOC:
a6baafca 1363 return false;
111fd515
DM
1364 case BUILT_IN_STRCHR:
1365 impl_call_strchr (cd);
1366 return false;
b7028f06
DM
1367 case BUILT_IN_STRCPY:
1368 case BUILT_IN_STRCPY_CHK:
1369 impl_call_strcpy (cd);
1370 return false;
ee7bfbe5 1371 case BUILT_IN_STRLEN:
b5081130
DM
1372 impl_call_strlen (cd);
1373 return false;
b7028f06 1374
37eb3ef4
DM
1375 case BUILT_IN_STACK_SAVE:
1376 case BUILT_IN_STACK_RESTORE:
1377 return false;
1378
b7028f06
DM
1379 /* Stdio builtins. */
1380 case BUILT_IN_FPRINTF:
1381 case BUILT_IN_FPRINTF_UNLOCKED:
1382 case BUILT_IN_PUTC:
1383 case BUILT_IN_PUTC_UNLOCKED:
1384 case BUILT_IN_FPUTC:
1385 case BUILT_IN_FPUTC_UNLOCKED:
1386 case BUILT_IN_FPUTS:
1387 case BUILT_IN_FPUTS_UNLOCKED:
1388 case BUILT_IN_FWRITE:
1389 case BUILT_IN_FWRITE_UNLOCKED:
1390 case BUILT_IN_PRINTF:
1391 case BUILT_IN_PRINTF_UNLOCKED:
1392 case BUILT_IN_PUTCHAR:
1393 case BUILT_IN_PUTCHAR_UNLOCKED:
1394 case BUILT_IN_PUTS:
1395 case BUILT_IN_PUTS_UNLOCKED:
1396 case BUILT_IN_VFPRINTF:
1397 case BUILT_IN_VPRINTF:
1398 /* These stdio builtins have external effects that are out
1399 of scope for the analyzer: we only want to model the effects
1400 on the return value. */
9ff3e236 1401 check_call_args (cd);
b7028f06 1402 break;
ee7bfbe5 1403 }
ee7bfbe5 1404 else if (is_named_call_p (callee_fndecl, "malloc", call, 1))
b5081130
DM
1405 {
1406 impl_call_malloc (cd);
1407 return false;
1408 }
808f4dfe 1409 else if (is_named_call_p (callee_fndecl, "calloc", call, 2))
b5081130
DM
1410 {
1411 impl_call_calloc (cd);
1412 return false;
1413 }
ee7bfbe5 1414 else if (is_named_call_p (callee_fndecl, "alloca", call, 1))
b5081130
DM
1415 {
1416 impl_call_alloca (cd);
1417 return false;
1418 }
a6baafca
DM
1419 else if (is_named_call_p (callee_fndecl, "realloc", call, 2))
1420 {
1421 impl_call_realloc (cd);
1422 return false;
1423 }
5ee4ba03
DM
1424 else if (is_named_call_p (callee_fndecl, "error"))
1425 {
1426 if (impl_call_error (cd, 3, out_terminate_path))
1427 return false;
1428 else
1429 unknown_side_effects = true;
1430 }
1431 else if (is_named_call_p (callee_fndecl, "error_at_line"))
1432 {
1433 if (impl_call_error (cd, 5, out_terminate_path))
1434 return false;
1435 else
1436 unknown_side_effects = true;
1437 }
33255ad3
DM
1438 else if (is_named_call_p (callee_fndecl, "fgets", call, 3)
1439 || is_named_call_p (callee_fndecl, "fgets_unlocked", call, 3))
1440 {
1441 impl_call_fgets (cd);
1442 return false;
1443 }
1444 else if (is_named_call_p (callee_fndecl, "fread", call, 4))
1445 {
1446 impl_call_fread (cd);
1447 return false;
1448 }
e097c9ab
DM
1449 else if (is_named_call_p (callee_fndecl, "getchar", call, 0))
1450 {
1451 /* No side-effects (tracking stream state is out-of-scope
1452 for the analyzer). */
1453 }
1e19ecd7
DM
1454 else if (is_named_call_p (callee_fndecl, "memset", call, 3)
1455 && POINTER_TYPE_P (cd.get_arg_type (0)))
e516294a 1456 {
808f4dfe 1457 impl_call_memset (cd);
e516294a
DM
1458 return false;
1459 }
111fd515
DM
1460 else if (is_named_call_p (callee_fndecl, "strchr", call, 2)
1461 && POINTER_TYPE_P (cd.get_arg_type (0)))
1462 {
1463 impl_call_strchr (cd);
1464 return false;
1465 }
1e19ecd7
DM
1466 else if (is_named_call_p (callee_fndecl, "strlen", call, 1)
1467 && POINTER_TYPE_P (cd.get_arg_type (0)))
757bf1df 1468 {
b5081130
DM
1469 impl_call_strlen (cd);
1470 return false;
757bf1df 1471 }
1690a839 1472 else if (is_named_call_p (callee_fndecl, "operator new", call, 1))
b5081130
DM
1473 {
1474 impl_call_operator_new (cd);
1475 return false;
1476 }
1690a839 1477 else if (is_named_call_p (callee_fndecl, "operator new []", call, 1))
b5081130
DM
1478 {
1479 impl_call_operator_new (cd);
1480 return false;
1481 }
1690a839
DM
1482 else if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
1483 || is_named_call_p (callee_fndecl, "operator delete", call, 2)
1484 || is_named_call_p (callee_fndecl, "operator delete []", call, 1))
1485 {
1486 /* Handle in "on_call_post". */
1487 }
ef7827b0 1488 else if (!fndecl_has_gimple_body_p (callee_fndecl)
5fbcbcaf 1489 && (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
808f4dfe 1490 && !fndecl_built_in_p (callee_fndecl))
ef7827b0 1491 unknown_side_effects = true;
757bf1df 1492 }
ef7827b0
DM
1493 else
1494 unknown_side_effects = true;
757bf1df 1495
ef7827b0 1496 return unknown_side_effects;
757bf1df
DM
1497}
1498
1499/* Update this model for the CALL stmt, using CTXT to report any
1500 diagnostics - the second half.
1501
1502 Updates to the region_model that should be made *after* sm-states
1503 are updated are done here; other updates to the region_model are done
ef7827b0
DM
1504 in region_model::on_call_pre.
1505
1506 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
1507 to purge state. */
757bf1df
DM
1508
1509void
ef7827b0
DM
1510region_model::on_call_post (const gcall *call,
1511 bool unknown_side_effects,
1512 region_model_context *ctxt)
757bf1df 1513{
757bf1df 1514 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1690a839 1515 {
eafa9d96 1516 call_details cd (call, this, ctxt);
1690a839
DM
1517 if (is_named_call_p (callee_fndecl, "free", call, 1))
1518 {
1690a839
DM
1519 impl_call_free (cd);
1520 return;
1521 }
1522 if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
1523 || is_named_call_p (callee_fndecl, "operator delete", call, 2)
1524 || is_named_call_p (callee_fndecl, "operator delete []", call, 1))
1525 {
1690a839
DM
1526 impl_call_operator_delete (cd);
1527 return;
1528 }
c7e276b8
DM
1529 /* Was this fndecl referenced by
1530 __attribute__((malloc(FOO)))? */
1531 if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
1532 {
c7e276b8
DM
1533 impl_deallocation_call (cd);
1534 return;
1535 }
eafa9d96
DM
1536 if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
1537 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
1538 switch (DECL_UNCHECKED_FUNCTION_CODE (callee_fndecl))
1539 {
1540 default:
1541 break;
1542 case BUILT_IN_REALLOC:
1543 impl_call_realloc (cd);
1544 return;
1545 }
1690a839 1546 }
ef7827b0
DM
1547
1548 if (unknown_side_effects)
1549 handle_unrecognized_call (call, ctxt);
1550}
1551
33255ad3
DM
1552/* Purge state involving SVAL from this region_model, using CTXT
1553 (if non-NULL) to purge other state in a program_state.
1554
1555 For example, if we're at the def-stmt of an SSA name, then we need to
1556 purge any state for svalues that involve that SSA name. This avoids
1557 false positives in loops, since a symbolic value referring to the
1558 SSA name will be referring to the previous value of that SSA name.
1559
1560 For example, in:
1561 while ((e = hashmap_iter_next(&iter))) {
1562 struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
1563 free (e_strbuf->value);
1564 }
1565 at the def-stmt of e_8:
1566 e_8 = hashmap_iter_next (&iter);
1567 we should purge the "freed" state of:
1568 INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
1569 which is the "e_strbuf->value" value from the previous iteration,
1570 or we will erroneously report a double-free - the "e_8" within it
1571 refers to the previous value. */
1572
1573void
1574region_model::purge_state_involving (const svalue *sval,
1575 region_model_context *ctxt)
1576{
a113b143
DM
1577 if (!sval->can_have_associated_state_p ())
1578 return;
33255ad3
DM
1579 m_store.purge_state_involving (sval, m_mgr);
1580 m_constraints->purge_state_involving (sval);
1581 m_dynamic_extents.purge_state_involving (sval);
1582 if (ctxt)
1583 ctxt->purge_state_involving (sval);
1584}
1585
ef7827b0
DM
1586/* Handle a call CALL to a function with unknown behavior.
1587
1588 Traverse the regions in this model, determining what regions are
1589 reachable from pointer arguments to CALL and from global variables,
1590 recursively.
1591
1592 Set all reachable regions to new unknown values and purge sm-state
1593 from their values, and from values that point to them. */
1594
1595void
1596region_model::handle_unrecognized_call (const gcall *call,
1597 region_model_context *ctxt)
1598{
1599 tree fndecl = get_fndecl_for_call (call, ctxt);
1600
c710051a 1601 reachable_regions reachable_regs (this);
ef7827b0
DM
1602
1603 /* Determine the reachable regions and their mutability. */
1604 {
808f4dfe
DM
1605 /* Add globals and regions that already escaped in previous
1606 unknown calls. */
1607 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1608 &reachable_regs);
ef7827b0
DM
1609
1610 /* Params that are pointers. */
1611 tree iter_param_types = NULL_TREE;
1612 if (fndecl)
1613 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
1614 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
1615 {
1616 /* Track expected param type, where available. */
1617 tree param_type = NULL_TREE;
1618 if (iter_param_types)
1619 {
1620 param_type = TREE_VALUE (iter_param_types);
1621 gcc_assert (param_type);
1622 iter_param_types = TREE_CHAIN (iter_param_types);
1623 }
1624
1625 tree parm = gimple_call_arg (call, arg_idx);
808f4dfe
DM
1626 const svalue *parm_sval = get_rvalue (parm, ctxt);
1627 reachable_regs.handle_parm (parm_sval, param_type);
ef7827b0
DM
1628 }
1629 }
1630
33255ad3 1631 uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULL;
3a66c289 1632
808f4dfe
DM
1633 /* Purge sm-state for the svalues that were reachable,
1634 both in non-mutable and mutable form. */
1635 for (svalue_set::iterator iter
1636 = reachable_regs.begin_reachable_svals ();
1637 iter != reachable_regs.end_reachable_svals (); ++iter)
ef7827b0 1638 {
808f4dfe 1639 const svalue *sval = (*iter);
33255ad3
DM
1640 if (ctxt)
1641 ctxt->on_unknown_change (sval, false);
808f4dfe
DM
1642 }
1643 for (svalue_set::iterator iter
1644 = reachable_regs.begin_mutable_svals ();
1645 iter != reachable_regs.end_mutable_svals (); ++iter)
1646 {
1647 const svalue *sval = (*iter);
33255ad3
DM
1648 if (ctxt)
1649 ctxt->on_unknown_change (sval, true);
3a66c289
DM
1650 if (uncertainty)
1651 uncertainty->on_mutable_sval_at_unknown_call (sval);
808f4dfe 1652 }
ef7827b0 1653
808f4dfe 1654 /* Mark any clusters that have escaped. */
af66094d 1655 reachable_regs.mark_escaped_clusters (ctxt);
ef7827b0 1656
808f4dfe
DM
1657 /* Update bindings for all clusters that have escaped, whether above,
1658 or previously. */
1659 m_store.on_unknown_fncall (call, m_mgr->get_store_manager ());
9a2c9579
DM
1660
1661 /* Purge dynamic extents from any regions that have escaped mutably:
1662 realloc could have been called on them. */
1663 for (hash_set<const region *>::iterator
1664 iter = reachable_regs.begin_mutable_base_regs ();
1665 iter != reachable_regs.end_mutable_base_regs ();
1666 ++iter)
1667 {
1668 const region *base_reg = (*iter);
1669 unset_dynamic_extents (base_reg);
1670 }
808f4dfe 1671}
ef7827b0 1672
808f4dfe
DM
1673/* Traverse the regions in this model, determining what regions are
1674 reachable from the store and populating *OUT.
ef7827b0 1675
808f4dfe
DM
1676 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
1677 for reachability (for handling return values from functions when
1678 analyzing return of the only function on the stack).
1679
3a66c289
DM
1680 If UNCERTAINTY is non-NULL, treat any svalues that were recorded
1681 within it as being maybe-bound as additional "roots" for reachability.
1682
808f4dfe
DM
1683 Find svalues that haven't leaked. */
1684
1685void
1686region_model::get_reachable_svalues (svalue_set *out,
3a66c289
DM
1687 const svalue *extra_sval,
1688 const uncertainty_t *uncertainty)
808f4dfe 1689{
c710051a 1690 reachable_regions reachable_regs (this);
808f4dfe
DM
1691
1692 /* Add globals and regions that already escaped in previous
1693 unknown calls. */
1694 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1695 &reachable_regs);
1696
1697 if (extra_sval)
1698 reachable_regs.handle_sval (extra_sval);
ef7827b0 1699
3a66c289
DM
1700 if (uncertainty)
1701 for (uncertainty_t::iterator iter
1702 = uncertainty->begin_maybe_bound_svals ();
1703 iter != uncertainty->end_maybe_bound_svals (); ++iter)
1704 reachable_regs.handle_sval (*iter);
1705
808f4dfe
DM
1706 /* Get regions for locals that have explicitly bound values. */
1707 for (store::cluster_map_t::iterator iter = m_store.begin ();
1708 iter != m_store.end (); ++iter)
1709 {
1710 const region *base_reg = (*iter).first;
1711 if (const region *parent = base_reg->get_parent_region ())
1712 if (parent->get_kind () == RK_FRAME)
1713 reachable_regs.add (base_reg, false);
1714 }
1715
1716 /* Populate *OUT based on the values that were reachable. */
1717 for (svalue_set::iterator iter
1718 = reachable_regs.begin_reachable_svals ();
1719 iter != reachable_regs.end_reachable_svals (); ++iter)
1720 out->add (*iter);
757bf1df
DM
1721}
1722
1723/* Update this model for the RETURN_STMT, using CTXT to report any
1724 diagnostics. */
1725
1726void
1727region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
1728{
1729 tree callee = get_current_function ()->decl;
1730 tree lhs = DECL_RESULT (callee);
1731 tree rhs = gimple_return_retval (return_stmt);
1732
1733 if (lhs && rhs)
13ad6d9f
DM
1734 {
1735 const svalue *sval = get_rvalue (rhs, ctxt);
1736 const region *ret_reg = get_lvalue (lhs, ctxt);
1737 set_value (ret_reg, sval, ctxt);
1738 }
757bf1df
DM
1739}
1740
342e14ff
DM
1741/* Update this model for a call and return of setjmp/sigsetjmp at CALL within
1742 ENODE, using CTXT to report any diagnostics.
757bf1df 1743
342e14ff
DM
1744 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
1745 0), as opposed to any second return due to longjmp/sigsetjmp. */
757bf1df
DM
1746
1747void
1748region_model::on_setjmp (const gcall *call, const exploded_node *enode,
1749 region_model_context *ctxt)
1750{
808f4dfe
DM
1751 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
1752 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
1753 ctxt);
757bf1df 1754
808f4dfe
DM
1755 /* Create a setjmp_svalue for this call and store it in BUF_REG's
1756 region. */
1757 if (buf_reg)
757bf1df 1758 {
fd9982bb 1759 setjmp_record r (enode, call);
808f4dfe
DM
1760 const svalue *sval
1761 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
1762 set_value (buf_reg, sval, ctxt);
757bf1df
DM
1763 }
1764
1765 /* Direct calls to setjmp return 0. */
1766 if (tree lhs = gimple_call_lhs (call))
1767 {
1aff29d4
DM
1768 const svalue *new_sval
1769 = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
808f4dfe
DM
1770 const region *lhs_reg = get_lvalue (lhs, ctxt);
1771 set_value (lhs_reg, new_sval, ctxt);
757bf1df
DM
1772 }
1773}
1774
1775/* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
1776 to a "setjmp" at SETJMP_CALL where the final stack depth should be
808f4dfe
DM
1777 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
1778 done, and should be done by the caller. */
757bf1df
DM
1779
1780void
1781region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
808f4dfe 1782 int setjmp_stack_depth, region_model_context *ctxt)
757bf1df
DM
1783{
1784 /* Evaluate the val, using the frame of the "longjmp". */
1785 tree fake_retval = gimple_call_arg (longjmp_call, 1);
808f4dfe 1786 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
757bf1df
DM
1787
1788 /* Pop any frames until we reach the stack depth of the function where
1789 setjmp was called. */
1790 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
1791 while (get_stack_depth () > setjmp_stack_depth)
808f4dfe 1792 pop_frame (NULL, NULL, ctxt);
757bf1df
DM
1793
1794 gcc_assert (get_stack_depth () == setjmp_stack_depth);
1795
1796 /* Assign to LHS of "setjmp" in new_state. */
1797 if (tree lhs = gimple_call_lhs (setjmp_call))
1798 {
1799 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
1aff29d4
DM
1800 const svalue *zero_sval
1801 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
808f4dfe 1802 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
757bf1df
DM
1803 /* If we have 0, use 1. */
1804 if (eq_zero.is_true ())
1805 {
808f4dfe 1806 const svalue *one_sval
1aff29d4 1807 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
808f4dfe 1808 fake_retval_sval = one_sval;
757bf1df
DM
1809 }
1810 else
1811 {
1812 /* Otherwise note that the value is nonzero. */
808f4dfe 1813 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
757bf1df
DM
1814 }
1815
808f4dfe
DM
1816 /* Decorate the return value from setjmp as being unmergeable,
1817 so that we don't attempt to merge states with it as zero
1818 with states in which it's nonzero, leading to a clean distinction
1819 in the exploded_graph betweeen the first return and the second
1820 return. */
1821 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
757bf1df 1822
808f4dfe
DM
1823 const region *lhs_reg = get_lvalue (lhs, ctxt);
1824 set_value (lhs_reg, fake_retval_sval, ctxt);
1825 }
757bf1df
DM
1826}
1827
1828/* Update this region_model for a phi stmt of the form
1829 LHS = PHI <...RHS...>.
e0a7a675
DM
1830 where RHS is for the appropriate edge.
1831 Get state from OLD_STATE so that all of the phi stmts for a basic block
1832 are effectively handled simultaneously. */
757bf1df
DM
1833
1834void
8525d1f5 1835region_model::handle_phi (const gphi *phi,
808f4dfe 1836 tree lhs, tree rhs,
e0a7a675 1837 const region_model &old_state,
757bf1df
DM
1838 region_model_context *ctxt)
1839{
1840 /* For now, don't bother tracking the .MEM SSA names. */
1841 if (tree var = SSA_NAME_VAR (lhs))
1842 if (TREE_CODE (var) == VAR_DECL)
1843 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
1844 return;
1845
e0a7a675
DM
1846 const svalue *src_sval = old_state.get_rvalue (rhs, ctxt);
1847 const region *dst_reg = old_state.get_lvalue (lhs, ctxt);
757bf1df 1848
e0a7a675 1849 set_value (dst_reg, src_sval, ctxt);
8525d1f5
DM
1850
1851 if (ctxt)
1852 ctxt->on_phi (phi, rhs);
757bf1df
DM
1853}
1854
1855/* Implementation of region_model::get_lvalue; the latter adds type-checking.
1856
1857 Get the id of the region for PV within this region_model,
1858 emitting any diagnostics to CTXT. */
1859
808f4dfe 1860const region *
53cb324c 1861region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
1862{
1863 tree expr = pv.m_tree;
1864
1865 gcc_assert (expr);
1866
1867 switch (TREE_CODE (expr))
1868 {
1869 default:
808f4dfe
DM
1870 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
1871 dump_location_t ());
757bf1df
DM
1872
1873 case ARRAY_REF:
1874 {
1875 tree array = TREE_OPERAND (expr, 0);
1876 tree index = TREE_OPERAND (expr, 1);
757bf1df 1877
808f4dfe
DM
1878 const region *array_reg = get_lvalue (array, ctxt);
1879 const svalue *index_sval = get_rvalue (index, ctxt);
1880 return m_mgr->get_element_region (array_reg,
1881 TREE_TYPE (TREE_TYPE (array)),
1882 index_sval);
757bf1df
DM
1883 }
1884 break;
1885
93e759fc
DM
1886 case BIT_FIELD_REF:
1887 {
1888 tree inner_expr = TREE_OPERAND (expr, 0);
1889 const region *inner_reg = get_lvalue (inner_expr, ctxt);
1890 tree num_bits = TREE_OPERAND (expr, 1);
1891 tree first_bit_offset = TREE_OPERAND (expr, 2);
1892 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
1893 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
1894 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
1895 TREE_INT_CST_LOW (num_bits));
1896 return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits);
1897 }
1898 break;
1899
757bf1df
DM
1900 case MEM_REF:
1901 {
1902 tree ptr = TREE_OPERAND (expr, 0);
1903 tree offset = TREE_OPERAND (expr, 1);
808f4dfe
DM
1904 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
1905 const svalue *offset_sval = get_rvalue (offset, ctxt);
1906 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
1907 return m_mgr->get_offset_region (star_ptr,
1908 TREE_TYPE (expr),
1909 offset_sval);
757bf1df
DM
1910 }
1911 break;
1912
808f4dfe
DM
1913 case FUNCTION_DECL:
1914 return m_mgr->get_region_for_fndecl (expr);
1915
1916 case LABEL_DECL:
1917 return m_mgr->get_region_for_label (expr);
1918
757bf1df
DM
1919 case VAR_DECL:
1920 /* Handle globals. */
1921 if (is_global_var (expr))
808f4dfe 1922 return m_mgr->get_region_for_global (expr);
757bf1df
DM
1923
1924 /* Fall through. */
1925
1926 case SSA_NAME:
1927 case PARM_DECL:
1928 case RESULT_DECL:
1929 {
1930 gcc_assert (TREE_CODE (expr) == SSA_NAME
1931 || TREE_CODE (expr) == PARM_DECL
1932 || TREE_CODE (expr) == VAR_DECL
1933 || TREE_CODE (expr) == RESULT_DECL);
1934
808f4dfe
DM
1935 int stack_index = pv.m_stack_depth;
1936 const frame_region *frame = get_frame_at_index (stack_index);
757bf1df 1937 gcc_assert (frame);
808f4dfe 1938 return frame->get_region_for_local (m_mgr, expr);
757bf1df
DM
1939 }
1940
1941 case COMPONENT_REF:
1942 {
1943 /* obj.field */
1944 tree obj = TREE_OPERAND (expr, 0);
1945 tree field = TREE_OPERAND (expr, 1);
808f4dfe
DM
1946 const region *obj_reg = get_lvalue (obj, ctxt);
1947 return m_mgr->get_field_region (obj_reg, field);
41a9e940
DM
1948 }
1949 break;
1950
757bf1df 1951 case STRING_CST:
808f4dfe 1952 return m_mgr->get_region_for_string (expr);
757bf1df
DM
1953 }
1954}
1955
1956/* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
1957
09bea584
DM
1958static void
1959assert_compat_types (tree src_type, tree dst_type)
1960{
1961 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
808f4dfe
DM
1962 {
1963#if CHECKING_P
1964 if (!(useless_type_conversion_p (src_type, dst_type)))
1965 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
1966#endif
1967 }
09bea584 1968}
757bf1df 1969
ea4e3218
DM
1970/* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
1971
e66b9f67 1972bool
ea4e3218
DM
1973compat_types_p (tree src_type, tree dst_type)
1974{
1975 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
1976 if (!(useless_type_conversion_p (src_type, dst_type)))
1977 return false;
1978 return true;
1979}
1980
808f4dfe 1981/* Get the region for PV within this region_model,
757bf1df
DM
1982 emitting any diagnostics to CTXT. */
1983
808f4dfe 1984const region *
53cb324c 1985region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
1986{
1987 if (pv.m_tree == NULL_TREE)
808f4dfe 1988 return NULL;
757bf1df 1989
808f4dfe
DM
1990 const region *result_reg = get_lvalue_1 (pv, ctxt);
1991 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
1992 return result_reg;
757bf1df
DM
1993}
1994
808f4dfe 1995/* Get the region for EXPR within this region_model (assuming the most
757bf1df
DM
1996 recent stack frame if it's a local). */
1997
808f4dfe 1998const region *
53cb324c 1999region_model::get_lvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
2000{
2001 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2002}
2003
2004/* Implementation of region_model::get_rvalue; the latter adds type-checking.
2005
2006 Get the value of PV within this region_model,
2007 emitting any diagnostics to CTXT. */
2008
808f4dfe 2009const svalue *
53cb324c 2010region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2011{
2012 gcc_assert (pv.m_tree);
2013
2014 switch (TREE_CODE (pv.m_tree))
2015 {
2016 default:
2242b975 2017 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
757bf1df
DM
2018
2019 case ADDR_EXPR:
2020 {
2021 /* "&EXPR". */
2022 tree expr = pv.m_tree;
2023 tree op0 = TREE_OPERAND (expr, 0);
808f4dfe
DM
2024 const region *expr_reg = get_lvalue (op0, ctxt);
2025 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
757bf1df
DM
2026 }
2027 break;
2028
808f4dfe 2029 case BIT_FIELD_REF:
d3b1ef7a
DM
2030 {
2031 tree expr = pv.m_tree;
2032 tree op0 = TREE_OPERAND (expr, 0);
2033 const region *reg = get_lvalue (op0, ctxt);
2034 tree num_bits = TREE_OPERAND (expr, 1);
2035 tree first_bit_offset = TREE_OPERAND (expr, 2);
2036 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2037 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2038 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2039 TREE_INT_CST_LOW (num_bits));
9faf8348 2040 return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
d3b1ef7a 2041 }
808f4dfe
DM
2042
2043 case SSA_NAME:
2044 case VAR_DECL:
2045 case PARM_DECL:
2046 case RESULT_DECL:
757bf1df
DM
2047 case ARRAY_REF:
2048 {
da7c2773 2049 const region *reg = get_lvalue (pv, ctxt);
9faf8348 2050 return get_store_value (reg, ctxt);
757bf1df
DM
2051 }
2052
808f4dfe
DM
2053 case REALPART_EXPR:
2054 case IMAGPART_EXPR:
2055 case VIEW_CONVERT_EXPR:
2056 {
2057 tree expr = pv.m_tree;
2058 tree arg = TREE_OPERAND (expr, 0);
2059 const svalue *arg_sval = get_rvalue (arg, ctxt);
2060 const svalue *sval_unaryop
2061 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
2062 arg_sval);
2063 return sval_unaryop;
2064 };
2065
757bf1df
DM
2066 case INTEGER_CST:
2067 case REAL_CST:
808f4dfe
DM
2068 case COMPLEX_CST:
2069 case VECTOR_CST:
757bf1df 2070 case STRING_CST:
808f4dfe
DM
2071 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
2072
2073 case POINTER_PLUS_EXPR:
2074 {
2075 tree expr = pv.m_tree;
2076 tree ptr = TREE_OPERAND (expr, 0);
2077 tree offset = TREE_OPERAND (expr, 1);
2078 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2079 const svalue *offset_sval = get_rvalue (offset, ctxt);
2080 const svalue *sval_binop
2081 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
2082 ptr_sval, offset_sval);
2083 return sval_binop;
2084 }
2085
2086 /* Binary ops. */
2087 case PLUS_EXPR:
2088 case MULT_EXPR:
2089 {
2090 tree expr = pv.m_tree;
2091 tree arg0 = TREE_OPERAND (expr, 0);
2092 tree arg1 = TREE_OPERAND (expr, 1);
2093 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
2094 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
2095 const svalue *sval_binop
2096 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
2097 arg0_sval, arg1_sval);
2098 return sval_binop;
2099 }
757bf1df
DM
2100
2101 case COMPONENT_REF:
2102 case MEM_REF:
757bf1df 2103 {
808f4dfe 2104 const region *ref_reg = get_lvalue (pv, ctxt);
9faf8348 2105 return get_store_value (ref_reg, ctxt);
757bf1df 2106 }
1b342485
AS
2107 case OBJ_TYPE_REF:
2108 {
2109 tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
2110 return get_rvalue (expr, ctxt);
2111 }
757bf1df
DM
2112 }
2113}
2114
2115/* Get the value of PV within this region_model,
2116 emitting any diagnostics to CTXT. */
2117
808f4dfe 2118const svalue *
53cb324c 2119region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2120{
2121 if (pv.m_tree == NULL_TREE)
808f4dfe 2122 return NULL;
757bf1df 2123
808f4dfe 2124 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
757bf1df 2125
808f4dfe
DM
2126 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
2127
33255ad3
DM
2128 result_sval = check_for_poison (result_sval, pv.m_tree, ctxt);
2129
808f4dfe 2130 return result_sval;
757bf1df
DM
2131}
2132
2133/* Get the value of EXPR within this region_model (assuming the most
2134 recent stack frame if it's a local). */
2135
808f4dfe 2136const svalue *
53cb324c 2137region_model::get_rvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
2138{
2139 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2140}
2141
623bc027
DM
2142/* Return true if this model is on a path with "main" as the entrypoint
2143 (as opposed to one in which we're merely analyzing a subset of the
2144 path through the code). */
2145
2146bool
2147region_model::called_from_main_p () const
2148{
2149 if (!m_current_frame)
2150 return false;
2151 /* Determine if the oldest stack frame in this model is for "main". */
2152 const frame_region *frame0 = get_frame_at_index (0);
2153 gcc_assert (frame0);
2154 return id_equal (DECL_NAME (frame0->get_function ()->decl), "main");
2155}
2156
2157/* Subroutine of region_model::get_store_value for when REG is (or is within)
2158 a global variable that hasn't been touched since the start of this path
2159 (or was implicitly touched due to a call to an unknown function). */
2160
2161const svalue *
2162region_model::get_initial_value_for_global (const region *reg) const
2163{
2164 /* Get the decl that REG is for (or is within). */
2165 const decl_region *base_reg
2166 = reg->get_base_region ()->dyn_cast_decl_region ();
2167 gcc_assert (base_reg);
2168 tree decl = base_reg->get_decl ();
2169
2170 /* Special-case: to avoid having to explicitly update all previously
2171 untracked globals when calling an unknown fn, they implicitly have
2172 an unknown value if an unknown call has occurred, unless this is
2173 static to-this-TU and hasn't escaped. Globals that have escaped
2174 are explicitly tracked, so we shouldn't hit this case for them. */
af66094d
DM
2175 if (m_store.called_unknown_fn_p ()
2176 && TREE_PUBLIC (decl)
2177 && !TREE_READONLY (decl))
623bc027
DM
2178 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2179
2180 /* If we are on a path from the entrypoint from "main" and we have a
2181 global decl defined in this TU that hasn't been touched yet, then
2182 the initial value of REG can be taken from the initialization value
2183 of the decl. */
16ad9ae8 2184 if (called_from_main_p () || TREE_READONLY (decl))
623bc027 2185 {
61a43de5
DM
2186 /* Attempt to get the initializer value for base_reg. */
2187 if (const svalue *base_reg_init
2188 = base_reg->get_svalue_for_initializer (m_mgr))
623bc027 2189 {
61a43de5
DM
2190 if (reg == base_reg)
2191 return base_reg_init;
2192 else
623bc027 2193 {
61a43de5
DM
2194 /* Get the value for REG within base_reg_init. */
2195 binding_cluster c (base_reg);
e61ffa20 2196 c.bind (m_mgr->get_store_manager (), base_reg, base_reg_init);
61a43de5
DM
2197 const svalue *sval
2198 = c.get_any_binding (m_mgr->get_store_manager (), reg);
2199 if (sval)
2200 {
2201 if (reg->get_type ())
2202 sval = m_mgr->get_or_create_cast (reg->get_type (),
2203 sval);
2204 return sval;
2205 }
623bc027
DM
2206 }
2207 }
2208 }
2209
2210 /* Otherwise, return INIT_VAL(REG). */
2211 return m_mgr->get_or_create_initial_value (reg);
2212}
2213
808f4dfe 2214/* Get a value for REG, looking it up in the store, or otherwise falling
9faf8348
DM
2215 back to "initial" or "unknown" values.
2216 Use CTXT to report any warnings associated with reading from REG. */
757bf1df 2217
808f4dfe 2218const svalue *
9faf8348
DM
2219region_model::get_store_value (const region *reg,
2220 region_model_context *ctxt) const
757bf1df 2221{
9faf8348
DM
2222 check_region_for_read (reg, ctxt);
2223
2867118d
DM
2224 /* Special-case: handle var_decls in the constant pool. */
2225 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
2226 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
2227 return sval;
2228
808f4dfe
DM
2229 const svalue *sval
2230 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
2231 if (sval)
757bf1df 2232 {
808f4dfe
DM
2233 if (reg->get_type ())
2234 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
2235 return sval;
757bf1df 2236 }
757bf1df 2237
808f4dfe
DM
2238 /* Special-case: read at a constant index within a STRING_CST. */
2239 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
2240 if (tree byte_offset_cst
2241 = offset_reg->get_byte_offset ()->maybe_get_constant ())
2242 if (const string_region *str_reg
2243 = reg->get_parent_region ()->dyn_cast_string_region ())
757bf1df 2244 {
808f4dfe
DM
2245 tree string_cst = str_reg->get_string_cst ();
2246 if (const svalue *char_sval
2247 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2248 byte_offset_cst))
2249 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
757bf1df 2250 }
757bf1df 2251
808f4dfe
DM
2252 /* Special-case: read the initial char of a STRING_CST. */
2253 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
2254 if (const string_region *str_reg
2255 = cast_reg->get_original_region ()->dyn_cast_string_region ())
2256 {
2257 tree string_cst = str_reg->get_string_cst ();
2258 tree byte_offset_cst = build_int_cst (integer_type_node, 0);
2259 if (const svalue *char_sval
2260 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2261 byte_offset_cst))
2262 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2263 }
757bf1df 2264
808f4dfe
DM
2265 /* Otherwise we implicitly have the initial value of the region
2266 (if the cluster had been touched, binding_cluster::get_any_binding,
2267 would have returned UNKNOWN, and we would already have returned
2268 that above). */
757bf1df 2269
623bc027
DM
2270 /* Handle globals. */
2271 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
2272 == RK_GLOBALS)
2273 return get_initial_value_for_global (reg);
757bf1df 2274
808f4dfe 2275 return m_mgr->get_or_create_initial_value (reg);
757bf1df
DM
2276}
2277
808f4dfe
DM
2278/* Return false if REG does not exist, true if it may do.
2279 This is for detecting regions within the stack that don't exist anymore
2280 after frames are popped. */
757bf1df 2281
808f4dfe
DM
2282bool
2283region_model::region_exists_p (const region *reg) const
757bf1df 2284{
808f4dfe
DM
2285 /* If within a stack frame, check that the stack frame is live. */
2286 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
757bf1df 2287 {
808f4dfe
DM
2288 /* Check that the current frame is the enclosing frame, or is called
2289 by it. */
2290 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
2291 iter_frame = iter_frame->get_calling_frame ())
2292 if (iter_frame == enclosing_frame)
2293 return true;
2294 return false;
757bf1df 2295 }
808f4dfe
DM
2296
2297 return true;
757bf1df
DM
2298}
2299
808f4dfe
DM
2300/* Get a region for referencing PTR_SVAL, creating a region if need be, and
2301 potentially generating warnings via CTXT.
35e3f082 2302 PTR_SVAL must be of pointer type.
808f4dfe 2303 PTR_TREE if non-NULL can be used when emitting diagnostics. */
757bf1df 2304
808f4dfe
DM
2305const region *
2306region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
53cb324c 2307 region_model_context *ctxt) const
757bf1df 2308{
808f4dfe 2309 gcc_assert (ptr_sval);
35e3f082 2310 gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
757bf1df 2311
49bfbf18
DM
2312 /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
2313 as a constraint. This suppresses false positives from
2314 -Wanalyzer-null-dereference for the case where we later have an
2315 if (PTR_SVAL) that would occur if we considered the false branch
2316 and transitioned the malloc state machine from start->null. */
2317 tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
2318 const svalue *null_ptr = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
2319 m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
2320
808f4dfe 2321 switch (ptr_sval->get_kind ())
757bf1df 2322 {
808f4dfe 2323 default:
23ebfda0 2324 break;
808f4dfe 2325
757bf1df
DM
2326 case SK_REGION:
2327 {
808f4dfe
DM
2328 const region_svalue *region_sval
2329 = as_a <const region_svalue *> (ptr_sval);
757bf1df
DM
2330 return region_sval->get_pointee ();
2331 }
2332
808f4dfe
DM
2333 case SK_BINOP:
2334 {
2335 const binop_svalue *binop_sval
2336 = as_a <const binop_svalue *> (ptr_sval);
2337 switch (binop_sval->get_op ())
2338 {
2339 case POINTER_PLUS_EXPR:
2340 {
2341 /* If we have a symbolic value expressing pointer arithmentic,
2342 try to convert it to a suitable region. */
2343 const region *parent_region
2344 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
2345 const svalue *offset = binop_sval->get_arg1 ();
2346 tree type= TREE_TYPE (ptr_sval->get_type ());
2347 return m_mgr->get_offset_region (parent_region, type, offset);
2348 }
2349 default:
23ebfda0 2350 break;
808f4dfe
DM
2351 }
2352 }
23ebfda0 2353 break;
757bf1df
DM
2354
2355 case SK_POISONED:
2356 {
2357 if (ctxt)
808f4dfe
DM
2358 {
2359 tree ptr = get_representative_tree (ptr_sval);
2360 /* If we can't get a representative tree for PTR_SVAL
2361 (e.g. if it hasn't been bound into the store), then
2362 fall back on PTR_TREE, if non-NULL. */
2363 if (!ptr)
2364 ptr = ptr_tree;
2365 if (ptr)
2366 {
2367 const poisoned_svalue *poisoned_sval
2368 = as_a <const poisoned_svalue *> (ptr_sval);
2369 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
00e7d024 2370 ctxt->warn (new poisoned_value_diagnostic (ptr, pkind, NULL));
808f4dfe
DM
2371 }
2372 }
757bf1df 2373 }
23ebfda0 2374 break;
757bf1df
DM
2375 }
2376
23ebfda0 2377 return m_mgr->get_symbolic_region (ptr_sval);
757bf1df
DM
2378}
2379
d3b1ef7a
DM
2380/* Attempt to get BITS within any value of REG, as TYPE.
2381 In particular, extract values from compound_svalues for the case
2382 where there's a concrete binding at BITS.
9faf8348
DM
2383 Return an unknown svalue if we can't handle the given case.
2384 Use CTXT to report any warnings associated with reading from REG. */
d3b1ef7a
DM
2385
2386const svalue *
2387region_model::get_rvalue_for_bits (tree type,
2388 const region *reg,
9faf8348
DM
2389 const bit_range &bits,
2390 region_model_context *ctxt) const
d3b1ef7a 2391{
9faf8348 2392 const svalue *sval = get_store_value (reg, ctxt);
e61ffa20 2393 return m_mgr->get_or_create_bits_within (type, bits, sval);
d3b1ef7a
DM
2394}
2395
3175d40f
DM
2396/* A subclass of pending_diagnostic for complaining about writes to
2397 constant regions of memory. */
2398
2399class write_to_const_diagnostic
2400: public pending_diagnostic_subclass<write_to_const_diagnostic>
2401{
2402public:
2403 write_to_const_diagnostic (const region *reg, tree decl)
2404 : m_reg (reg), m_decl (decl)
2405 {}
2406
2407 const char *get_kind () const FINAL OVERRIDE
2408 {
2409 return "write_to_const_diagnostic";
2410 }
2411
2412 bool operator== (const write_to_const_diagnostic &other) const
2413 {
2414 return (m_reg == other.m_reg
2415 && m_decl == other.m_decl);
2416 }
2417
2418 bool emit (rich_location *rich_loc) FINAL OVERRIDE
2419 {
111fd515
DM
2420 auto_diagnostic_group d;
2421 bool warned;
2422 switch (m_reg->get_kind ())
2423 {
2424 default:
2425 warned = warning_at (rich_loc, OPT_Wanalyzer_write_to_const,
2426 "write to %<const%> object %qE", m_decl);
2427 break;
2428 case RK_FUNCTION:
2429 warned = warning_at (rich_loc, OPT_Wanalyzer_write_to_const,
2430 "write to function %qE", m_decl);
2431 break;
2432 case RK_LABEL:
2433 warned = warning_at (rich_loc, OPT_Wanalyzer_write_to_const,
2434 "write to label %qE", m_decl);
2435 break;
2436 }
3175d40f
DM
2437 if (warned)
2438 inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
2439 return warned;
2440 }
2441
2442 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
2443 {
111fd515
DM
2444 switch (m_reg->get_kind ())
2445 {
2446 default:
2447 return ev.formatted_print ("write to %<const%> object %qE here", m_decl);
2448 case RK_FUNCTION:
2449 return ev.formatted_print ("write to function %qE here", m_decl);
2450 case RK_LABEL:
2451 return ev.formatted_print ("write to label %qE here", m_decl);
2452 }
3175d40f
DM
2453 }
2454
2455private:
2456 const region *m_reg;
2457 tree m_decl;
2458};
2459
2460/* A subclass of pending_diagnostic for complaining about writes to
2461 string literals. */
2462
2463class write_to_string_literal_diagnostic
2464: public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
2465{
2466public:
2467 write_to_string_literal_diagnostic (const region *reg)
2468 : m_reg (reg)
2469 {}
2470
2471 const char *get_kind () const FINAL OVERRIDE
2472 {
2473 return "write_to_string_literal_diagnostic";
2474 }
2475
2476 bool operator== (const write_to_string_literal_diagnostic &other) const
2477 {
2478 return m_reg == other.m_reg;
2479 }
2480
2481 bool emit (rich_location *rich_loc) FINAL OVERRIDE
2482 {
2483 return warning_at (rich_loc, OPT_Wanalyzer_write_to_string_literal,
2484 "write to string literal");
2485 /* Ideally we would show the location of the STRING_CST as well,
2486 but it is not available at this point. */
2487 }
2488
2489 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
2490 {
2491 return ev.formatted_print ("write to string literal here");
2492 }
2493
2494private:
2495 const region *m_reg;
2496};
2497
2498/* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
2499
2500void
2501region_model::check_for_writable_region (const region* dest_reg,
2502 region_model_context *ctxt) const
2503{
2504 /* Fail gracefully if CTXT is NULL. */
2505 if (!ctxt)
2506 return;
2507
2508 const region *base_reg = dest_reg->get_base_region ();
2509 switch (base_reg->get_kind ())
2510 {
2511 default:
2512 break;
111fd515
DM
2513 case RK_FUNCTION:
2514 {
2515 const function_region *func_reg = as_a <const function_region *> (base_reg);
2516 tree fndecl = func_reg->get_fndecl ();
2517 ctxt->warn (new write_to_const_diagnostic (func_reg, fndecl));
2518 }
2519 break;
2520 case RK_LABEL:
2521 {
2522 const label_region *label_reg = as_a <const label_region *> (base_reg);
2523 tree label = label_reg->get_label ();
2524 ctxt->warn (new write_to_const_diagnostic (label_reg, label));
2525 }
2526 break;
3175d40f
DM
2527 case RK_DECL:
2528 {
2529 const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
2530 tree decl = decl_reg->get_decl ();
2531 /* Warn about writes to const globals.
2532 Don't warn for writes to const locals, and params in particular,
2533 since we would warn in push_frame when setting them up (e.g the
2534 "this" param is "T* const"). */
2535 if (TREE_READONLY (decl)
2536 && is_global_var (decl))
2537 ctxt->warn (new write_to_const_diagnostic (dest_reg, decl));
2538 }
2539 break;
2540 case RK_STRING:
2541 ctxt->warn (new write_to_string_literal_diagnostic (dest_reg));
2542 break;
2543 }
2544}
2545
9a2c9579
DM
2546/* Get the capacity of REG in bytes. */
2547
2548const svalue *
2549region_model::get_capacity (const region *reg) const
2550{
2551 switch (reg->get_kind ())
2552 {
2553 default:
2554 break;
2555 case RK_DECL:
2556 {
2557 const decl_region *decl_reg = as_a <const decl_region *> (reg);
2558 tree decl = decl_reg->get_decl ();
2559 if (TREE_CODE (decl) == SSA_NAME)
2560 {
2561 tree type = TREE_TYPE (decl);
2562 tree size = TYPE_SIZE (type);
2563 return get_rvalue (size, NULL);
2564 }
2565 else
2566 {
2567 tree size = decl_init_size (decl, false);
2568 if (size)
2569 return get_rvalue (size, NULL);
2570 }
2571 }
2572 break;
e61ffa20
DM
2573 case RK_SIZED:
2574 /* Look through sized regions to get at the capacity
2575 of the underlying regions. */
2576 return get_capacity (reg->get_parent_region ());
9a2c9579
DM
2577 }
2578
2579 if (const svalue *recorded = get_dynamic_extents (reg))
2580 return recorded;
2581
2582 return m_mgr->get_or_create_unknown_svalue (sizetype);
2583}
2584
9faf8348
DM
2585/* If CTXT is non-NULL, use it to warn about any problems accessing REG,
2586 using DIR to determine if this access is a read or write. */
2587
2588void
2589region_model::check_region_access (const region *reg,
2590 enum access_direction dir,
2591 region_model_context *ctxt) const
2592{
2593 /* Fail gracefully if CTXT is NULL. */
2594 if (!ctxt)
2595 return;
2596
b9365b93
DM
2597 check_region_for_taint (reg, dir, ctxt);
2598
9faf8348
DM
2599 switch (dir)
2600 {
2601 default:
2602 gcc_unreachable ();
2603 case DIR_READ:
2604 /* Currently a no-op. */
2605 break;
2606 case DIR_WRITE:
2607 check_for_writable_region (reg, ctxt);
2608 break;
2609 }
2610}
2611
2612/* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
2613
2614void
2615region_model::check_region_for_write (const region *dest_reg,
2616 region_model_context *ctxt) const
2617{
2618 check_region_access (dest_reg, DIR_WRITE, ctxt);
2619}
2620
2621/* If CTXT is non-NULL, use it to warn about any problems reading from REG. */
2622
2623void
2624region_model::check_region_for_read (const region *src_reg,
2625 region_model_context *ctxt) const
2626{
2627 check_region_access (src_reg, DIR_READ, ctxt);
2628}
2629
808f4dfe 2630/* Set the value of the region given by LHS_REG to the value given
9faf8348
DM
2631 by RHS_SVAL.
2632 Use CTXT to report any warnings associated with writing to LHS_REG. */
757bf1df 2633
808f4dfe
DM
2634void
2635region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
3175d40f 2636 region_model_context *ctxt)
757bf1df 2637{
808f4dfe
DM
2638 gcc_assert (lhs_reg);
2639 gcc_assert (rhs_sval);
2640
9faf8348 2641 check_region_for_write (lhs_reg, ctxt);
3175d40f 2642
808f4dfe 2643 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 2644 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
2645}
2646
808f4dfe 2647/* Set the value of the region given by LHS to the value given by RHS. */
757bf1df
DM
2648
2649void
808f4dfe 2650region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
757bf1df 2651{
808f4dfe
DM
2652 const region *lhs_reg = get_lvalue (lhs, ctxt);
2653 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
2654 gcc_assert (lhs_reg);
2655 gcc_assert (rhs_sval);
2656 set_value (lhs_reg, rhs_sval, ctxt);
757bf1df
DM
2657}
2658
808f4dfe 2659/* Remove all bindings overlapping REG within the store. */
884d9141
DM
2660
2661void
808f4dfe
DM
2662region_model::clobber_region (const region *reg)
2663{
2664 m_store.clobber_region (m_mgr->get_store_manager(), reg);
2665}
2666
2667/* Remove any bindings for REG within the store. */
2668
2669void
2670region_model::purge_region (const region *reg)
2671{
2672 m_store.purge_region (m_mgr->get_store_manager(), reg);
2673}
2674
e61ffa20
DM
2675/* Fill REG with SVAL. */
2676
2677void
2678region_model::fill_region (const region *reg, const svalue *sval)
2679{
2680 m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
2681}
2682
808f4dfe
DM
2683/* Zero-fill REG. */
2684
2685void
2686region_model::zero_fill_region (const region *reg)
2687{
2688 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
2689}
2690
2691/* Mark REG as having unknown content. */
2692
2693void
3a66c289
DM
2694region_model::mark_region_as_unknown (const region *reg,
2695 uncertainty_t *uncertainty)
884d9141 2696{
3a66c289
DM
2697 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
2698 uncertainty);
884d9141
DM
2699}
2700
808f4dfe 2701/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
757bf1df
DM
2702 this model. */
2703
2704tristate
808f4dfe
DM
2705region_model::eval_condition (const svalue *lhs,
2706 enum tree_code op,
2707 const svalue *rhs) const
757bf1df 2708{
e978955d
DM
2709 /* For now, make no attempt to capture constraints on floating-point
2710 values. */
2711 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
2712 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
2713 return tristate::unknown ();
2714
808f4dfe 2715 tristate ts = eval_condition_without_cm (lhs, op, rhs);
757bf1df
DM
2716 if (ts.is_known ())
2717 return ts;
2718
2719 /* Otherwise, try constraints. */
808f4dfe 2720 return m_constraints->eval_condition (lhs, op, rhs);
757bf1df
DM
2721}
2722
808f4dfe 2723/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
757bf1df
DM
2724 this model, without resorting to the constraint_manager.
2725
2726 This is exposed so that impl_region_model_context::on_state_leak can
2727 check for equality part-way through region_model::purge_unused_svalues
2728 without risking creating new ECs. */
2729
2730tristate
808f4dfe
DM
2731region_model::eval_condition_without_cm (const svalue *lhs,
2732 enum tree_code op,
2733 const svalue *rhs) const
757bf1df 2734{
757bf1df
DM
2735 gcc_assert (lhs);
2736 gcc_assert (rhs);
2737
2738 /* See what we know based on the values. */
808f4dfe
DM
2739
2740 /* For now, make no attempt to capture constraints on floating-point
2741 values. */
2742 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
2743 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
2744 return tristate::unknown ();
2745
2746 /* Unwrap any unmergeable values. */
2747 lhs = lhs->unwrap_any_unmergeable ();
2748 rhs = rhs->unwrap_any_unmergeable ();
2749
2750 if (lhs == rhs)
757bf1df 2751 {
808f4dfe
DM
2752 /* If we have the same svalue, then we have equality
2753 (apart from NaN-handling).
2754 TODO: should this definitely be the case for poisoned values? */
2755 /* Poisoned and unknown values are "unknowable". */
2756 if (lhs->get_kind () == SK_POISONED
2757 || lhs->get_kind () == SK_UNKNOWN)
2758 return tristate::TS_UNKNOWN;
e978955d 2759
808f4dfe 2760 switch (op)
757bf1df 2761 {
808f4dfe
DM
2762 case EQ_EXPR:
2763 case GE_EXPR:
2764 case LE_EXPR:
2765 return tristate::TS_TRUE;
07c86323 2766
808f4dfe
DM
2767 case NE_EXPR:
2768 case GT_EXPR:
2769 case LT_EXPR:
2770 return tristate::TS_FALSE;
2771
2772 default:
2773 /* For other ops, use the logic below. */
2774 break;
757bf1df 2775 }
808f4dfe 2776 }
757bf1df 2777
808f4dfe
DM
2778 /* If we have a pair of region_svalues, compare them. */
2779 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
2780 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
2781 {
2782 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
2783 if (res.is_known ())
2784 return res;
2785 /* Otherwise, only known through constraints. */
2786 }
757bf1df 2787
808f4dfe
DM
2788 /* If we have a pair of constants, compare them. */
2789 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
2790 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
2791 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
757bf1df 2792
e82e0f14
DM
2793 /* Handle comparison against zero. */
2794 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
2795 if (zerop (cst_rhs->get_constant ()))
2796 {
2797 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
2798 {
2799 /* A region_svalue is a non-NULL pointer, except in certain
2800 special cases (see the comment for region::non_null_p). */
2801 const region *pointee = ptr->get_pointee ();
2802 if (pointee->non_null_p ())
2803 {
2804 switch (op)
2805 {
2806 default:
2807 gcc_unreachable ();
2808
2809 case EQ_EXPR:
2810 case GE_EXPR:
2811 case LE_EXPR:
2812 return tristate::TS_FALSE;
2813
2814 case NE_EXPR:
2815 case GT_EXPR:
2816 case LT_EXPR:
2817 return tristate::TS_TRUE;
2818 }
2819 }
2820 }
2821 else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
2822 {
2823 /* Treat offsets from a non-NULL pointer as being non-NULL. This
2824 isn't strictly true, in that eventually ptr++ will wrap
2825 around and be NULL, but it won't occur in practise and thus
2826 can be used to suppress effectively false positives that we
2827 shouldn't warn for. */
2828 if (binop->get_op () == POINTER_PLUS_EXPR)
2829 {
2830 tristate lhs_ts
2831 = eval_condition_without_cm (binop->get_arg0 (),
2832 op, rhs);
2833 if (lhs_ts.is_known ())
2834 return lhs_ts;
2835 }
2836 }
2837 }
808f4dfe
DM
2838
2839 /* Handle rejection of equality for comparisons of the initial values of
2840 "external" values (such as params) with the address of locals. */
2841 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
2842 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
2843 {
2844 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
2845 if (res.is_known ())
2846 return res;
2847 }
2848 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
2849 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
2850 {
2851 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
2852 if (res.is_known ())
2853 return res;
2854 }
2855
2856 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
2857 if (tree rhs_cst = rhs->maybe_get_constant ())
2858 {
2859 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
2860 if (res.is_known ())
2861 return res;
2862 }
2863
2864 return tristate::TS_UNKNOWN;
2865}
2866
2867/* Subroutine of region_model::eval_condition_without_cm, for rejecting
2868 equality of INIT_VAL(PARM) with &LOCAL. */
2869
2870tristate
2871region_model::compare_initial_and_pointer (const initial_svalue *init,
2872 const region_svalue *ptr) const
2873{
2874 const region *pointee = ptr->get_pointee ();
2875
2876 /* If we have a pointer to something within a stack frame, it can't be the
2877 initial value of a param. */
2878 if (pointee->maybe_get_frame_region ())
e0139b2a
DM
2879 if (init->initial_value_of_param_p ())
2880 return tristate::TS_FALSE;
757bf1df
DM
2881
2882 return tristate::TS_UNKNOWN;
2883}
2884
48e8a7a6
DM
2885/* Handle various constraints of the form:
2886 LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
2887 OP : == or !=
2888 RHS: zero
2889 and (with a cast):
2890 LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
2891 OP : == or !=
2892 RHS: zero
2893 by adding constraints for INNER_LHS INNEROP INNER_RHS.
2894
2895 Return true if this function can fully handle the constraint; if
2896 so, add the implied constraint(s) and write true to *OUT if they
2897 are consistent with existing constraints, or write false to *OUT
2898 if they contradicts existing constraints.
2899
2900 Return false for cases that this function doeesn't know how to handle.
2901
2902 For example, if we're checking a stored conditional, we'll have
2903 something like:
2904 LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
2905 OP : NE_EXPR
2906 RHS: zero
2907 which this function can turn into an add_constraint of:
2908 (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
2909
2910 Similarly, optimized && and || conditionals lead to e.g.
2911 if (p && q)
2912 becoming gimple like this:
2913 _1 = p_6 == 0B;
2914 _2 = q_8 == 0B
2915 _3 = _1 | _2
2916 On the "_3 is false" branch we can have constraints of the form:
2917 ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
2918 | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
2919 == 0
2920 which implies that both _1 and _2 are false,
2921 which this function can turn into a pair of add_constraints of
2922 (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
2923 and:
2924 (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
2925
2926bool
2927region_model::add_constraints_from_binop (const svalue *outer_lhs,
2928 enum tree_code outer_op,
2929 const svalue *outer_rhs,
2930 bool *out,
2931 region_model_context *ctxt)
2932{
2933 while (const svalue *cast = outer_lhs->maybe_undo_cast ())
2934 outer_lhs = cast;
2935 const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
2936 if (!binop_sval)
2937 return false;
2938 if (!outer_rhs->all_zeroes_p ())
2939 return false;
2940
2941 const svalue *inner_lhs = binop_sval->get_arg0 ();
2942 enum tree_code inner_op = binop_sval->get_op ();
2943 const svalue *inner_rhs = binop_sval->get_arg1 ();
2944
2945 if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
2946 return false;
2947
2948 /* We have either
2949 - "OUTER_LHS != false" (i.e. OUTER is true), or
2950 - "OUTER_LHS == false" (i.e. OUTER is false). */
2951 bool is_true = outer_op == NE_EXPR;
2952
2953 switch (inner_op)
2954 {
2955 default:
2956 return false;
2957
2958 case EQ_EXPR:
2959 case NE_EXPR:
2960 {
2961 /* ...and "(inner_lhs OP inner_rhs) == 0"
2962 then (inner_lhs OP inner_rhs) must have the same
2963 logical value as LHS. */
2964 if (!is_true)
2965 inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
2966 *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
2967 return true;
2968 }
2969 break;
2970
2971 case BIT_AND_EXPR:
2972 if (is_true)
2973 {
2974 /* ...and "(inner_lhs & inner_rhs) != 0"
2975 then both inner_lhs and inner_rhs must be true. */
2976 const svalue *false_sval
2977 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
2978 bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
2979 bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
2980 *out = sat1 && sat2;
2981 return true;
2982 }
2983 return false;
2984
2985 case BIT_IOR_EXPR:
2986 if (!is_true)
2987 {
2988 /* ...and "(inner_lhs | inner_rhs) == 0"
2989 i.e. "(inner_lhs | inner_rhs)" is false
2990 then both inner_lhs and inner_rhs must be false. */
2991 const svalue *false_sval
2992 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
2993 bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
2994 bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
2995 *out = sat1 && sat2;
2996 return true;
2997 }
2998 return false;
2999 }
3000}
3001
757bf1df
DM
3002/* Attempt to add the constraint "LHS OP RHS" to this region_model.
3003 If it is consistent with existing constraints, add it, and return true.
3004 Return false if it contradicts existing constraints.
3005 Use CTXT for reporting any diagnostics associated with the accesses. */
3006
3007bool
3008region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
3009 region_model_context *ctxt)
3010{
e978955d
DM
3011 /* For now, make no attempt to capture constraints on floating-point
3012 values. */
3013 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
3014 return true;
3015
808f4dfe
DM
3016 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
3017 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
757bf1df 3018
48e8a7a6
DM
3019 return add_constraint (lhs_sval, op, rhs_sval, ctxt);
3020}
3021
3022/* Attempt to add the constraint "LHS OP RHS" to this region_model.
3023 If it is consistent with existing constraints, add it, and return true.
3024 Return false if it contradicts existing constraints.
3025 Use CTXT for reporting any diagnostics associated with the accesses. */
3026
3027bool
3028region_model::add_constraint (const svalue *lhs,
3029 enum tree_code op,
3030 const svalue *rhs,
3031 region_model_context *ctxt)
3032{
3033 tristate t_cond = eval_condition (lhs, op, rhs);
757bf1df
DM
3034
3035 /* If we already have the condition, do nothing. */
3036 if (t_cond.is_true ())
3037 return true;
3038
3039 /* Reject a constraint that would contradict existing knowledge, as
3040 unsatisfiable. */
3041 if (t_cond.is_false ())
3042 return false;
3043
48e8a7a6
DM
3044 bool out;
3045 if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
3046 return out;
757bf1df 3047
c4b8f373
DM
3048 /* Attempt to store the constraint. */
3049 if (!m_constraints->add_constraint (lhs, op, rhs))
3050 return false;
757bf1df
DM
3051
3052 /* Notify the context, if any. This exists so that the state machines
3053 in a program_state can be notified about the condition, and so can
3054 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
3055 when synthesizing constraints as above. */
3056 if (ctxt)
3057 ctxt->on_condition (lhs, op, rhs);
3058
9a2c9579
DM
3059 /* If we have &REGION == NULL, then drop dynamic extents for REGION (for
3060 the case where REGION is heap-allocated and thus could be NULL). */
48e8a7a6
DM
3061 if (tree rhs_cst = rhs->maybe_get_constant ())
3062 if (op == EQ_EXPR && zerop (rhs_cst))
3063 if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
3064 unset_dynamic_extents (region_sval->get_pointee ());
9a2c9579 3065
757bf1df
DM
3066 return true;
3067}
3068
84fb3546
DM
3069/* As above, but when returning false, if OUT is non-NULL, write a
3070 new rejected_constraint to *OUT. */
3071
3072bool
3073region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
3074 region_model_context *ctxt,
3075 rejected_constraint **out)
3076{
3077 bool sat = add_constraint (lhs, op, rhs, ctxt);
3078 if (!sat && out)
8ca7fa84 3079 *out = new rejected_op_constraint (*this, lhs, op, rhs);
84fb3546
DM
3080 return sat;
3081}
3082
757bf1df
DM
3083/* Determine what is known about the condition "LHS OP RHS" within
3084 this model.
3085 Use CTXT for reporting any diagnostics associated with the accesses. */
3086
3087tristate
3088region_model::eval_condition (tree lhs,
3089 enum tree_code op,
3090 tree rhs,
3091 region_model_context *ctxt)
3092{
e978955d
DM
3093 /* For now, make no attempt to model constraints on floating-point
3094 values. */
3095 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
3096 return tristate::unknown ();
3097
757bf1df
DM
3098 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
3099}
3100
467a4820
DM
3101/* Implementation of region_model::get_representative_path_var.
3102 Attempt to return a path_var that represents SVAL, or return NULL_TREE.
808f4dfe
DM
3103 Use VISITED to prevent infinite mutual recursion with the overload for
3104 regions. */
757bf1df 3105
808f4dfe 3106path_var
467a4820
DM
3107region_model::get_representative_path_var_1 (const svalue *sval,
3108 svalue_set *visited) const
757bf1df 3109{
467a4820 3110 gcc_assert (sval);
757bf1df 3111
808f4dfe
DM
3112 /* Prevent infinite recursion. */
3113 if (visited->contains (sval))
3114 return path_var (NULL_TREE, 0);
3115 visited->add (sval);
757bf1df 3116
467a4820
DM
3117 /* Handle casts by recursion into get_representative_path_var. */
3118 if (const svalue *cast_sval = sval->maybe_undo_cast ())
3119 {
3120 path_var result = get_representative_path_var (cast_sval, visited);
3121 tree orig_type = sval->get_type ();
3122 /* If necessary, wrap the result in a cast. */
3123 if (result.m_tree && orig_type)
3124 result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
3125 return result;
3126 }
3127
808f4dfe
DM
3128 auto_vec<path_var> pvs;
3129 m_store.get_representative_path_vars (this, visited, sval, &pvs);
757bf1df 3130
808f4dfe
DM
3131 if (tree cst = sval->maybe_get_constant ())
3132 pvs.safe_push (path_var (cst, 0));
757bf1df 3133
90f7c300 3134 /* Handle string literals and various other pointers. */
808f4dfe
DM
3135 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
3136 {
3137 const region *reg = ptr_sval->get_pointee ();
3138 if (path_var pv = get_representative_path_var (reg, visited))
3139 return path_var (build1 (ADDR_EXPR,
467a4820 3140 sval->get_type (),
808f4dfe
DM
3141 pv.m_tree),
3142 pv.m_stack_depth);
3143 }
3144
3145 /* If we have a sub_svalue, look for ways to represent the parent. */
3146 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
90f7c300 3147 {
808f4dfe
DM
3148 const svalue *parent_sval = sub_sval->get_parent ();
3149 const region *subreg = sub_sval->get_subregion ();
3150 if (path_var parent_pv
3151 = get_representative_path_var (parent_sval, visited))
3152 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
3153 return path_var (build3 (COMPONENT_REF,
3154 sval->get_type (),
3155 parent_pv.m_tree,
3156 field_reg->get_field (),
3157 NULL_TREE),
3158 parent_pv.m_stack_depth);
90f7c300
DM
3159 }
3160
b9365b93
DM
3161 /* Handle binops. */
3162 if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
3163 if (path_var lhs_pv
3164 = get_representative_path_var (binop_sval->get_arg0 (), visited))
3165 if (path_var rhs_pv
3166 = get_representative_path_var (binop_sval->get_arg1 (), visited))
3167 return path_var (build2 (binop_sval->get_op (),
3168 sval->get_type (),
3169 lhs_pv.m_tree, rhs_pv.m_tree),
3170 lhs_pv.m_stack_depth);
3171
808f4dfe
DM
3172 if (pvs.length () < 1)
3173 return path_var (NULL_TREE, 0);
3174
3175 pvs.qsort (readability_comparator);
3176 return pvs[0];
757bf1df
DM
3177}
3178
467a4820
DM
3179/* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
3180 Use VISITED to prevent infinite mutual recursion with the overload for
3181 regions
3182
3183 This function defers to get_representative_path_var_1 to do the work;
3184 it adds verification that get_representative_path_var_1 returned a tree
3185 of the correct type. */
3186
3187path_var
3188region_model::get_representative_path_var (const svalue *sval,
3189 svalue_set *visited) const
3190{
3191 if (sval == NULL)
3192 return path_var (NULL_TREE, 0);
3193
3194 tree orig_type = sval->get_type ();
3195
3196 path_var result = get_representative_path_var_1 (sval, visited);
3197
3198 /* Verify that the result has the same type as SVAL, if any. */
3199 if (result.m_tree && orig_type)
3200 gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
3201
3202 return result;
3203}
3204
3205/* Attempt to return a tree that represents SVAL, or return NULL_TREE.
3206
3207 Strip off any top-level cast, to avoid messages like
3208 double-free of '(void *)ptr'
3209 from analyzer diagnostics. */
757bf1df 3210
808f4dfe
DM
3211tree
3212region_model::get_representative_tree (const svalue *sval) const
757bf1df 3213{
808f4dfe 3214 svalue_set visited;
467a4820
DM
3215 tree expr = get_representative_path_var (sval, &visited).m_tree;
3216
3217 /* Strip off any top-level cast. */
3218 if (expr && TREE_CODE (expr) == NOP_EXPR)
e4bb1bd6 3219 expr = TREE_OPERAND (expr, 0);
467a4820 3220
e4bb1bd6 3221 return fixup_tree_for_diagnostic (expr);
808f4dfe
DM
3222}
3223
467a4820
DM
3224/* Implementation of region_model::get_representative_path_var.
3225
3226 Attempt to return a path_var that represents REG, or return
808f4dfe
DM
3227 the NULL path_var.
3228 For example, a region for a field of a local would be a path_var
3229 wrapping a COMPONENT_REF.
3230 Use VISITED to prevent infinite mutual recursion with the overload for
3231 svalues. */
757bf1df 3232
808f4dfe 3233path_var
467a4820
DM
3234region_model::get_representative_path_var_1 (const region *reg,
3235 svalue_set *visited) const
808f4dfe
DM
3236{
3237 switch (reg->get_kind ())
757bf1df 3238 {
808f4dfe
DM
3239 default:
3240 gcc_unreachable ();
e516294a 3241
808f4dfe
DM
3242 case RK_FRAME:
3243 case RK_GLOBALS:
3244 case RK_CODE:
3245 case RK_HEAP:
3246 case RK_STACK:
3247 case RK_ROOT:
3248 /* Regions that represent memory spaces are not expressible as trees. */
3249 return path_var (NULL_TREE, 0);
757bf1df 3250
808f4dfe 3251 case RK_FUNCTION:
884d9141 3252 {
808f4dfe
DM
3253 const function_region *function_reg
3254 = as_a <const function_region *> (reg);
3255 return path_var (function_reg->get_fndecl (), 0);
884d9141 3256 }
808f4dfe 3257 case RK_LABEL:
9e78634c
DM
3258 {
3259 const label_region *label_reg = as_a <const label_region *> (reg);
3260 return path_var (label_reg->get_label (), 0);
3261 }
90f7c300 3262
808f4dfe
DM
3263 case RK_SYMBOLIC:
3264 {
3265 const symbolic_region *symbolic_reg
3266 = as_a <const symbolic_region *> (reg);
3267 const svalue *pointer = symbolic_reg->get_pointer ();
3268 path_var pointer_pv = get_representative_path_var (pointer, visited);
3269 if (!pointer_pv)
3270 return path_var (NULL_TREE, 0);
3271 tree offset = build_int_cst (pointer->get_type (), 0);
3272 return path_var (build2 (MEM_REF,
3273 reg->get_type (),
3274 pointer_pv.m_tree,
3275 offset),
3276 pointer_pv.m_stack_depth);
3277 }
3278 case RK_DECL:
3279 {
3280 const decl_region *decl_reg = as_a <const decl_region *> (reg);
3281 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
3282 }
3283 case RK_FIELD:
3284 {
3285 const field_region *field_reg = as_a <const field_region *> (reg);
3286 path_var parent_pv
3287 = get_representative_path_var (reg->get_parent_region (), visited);
3288 if (!parent_pv)
3289 return path_var (NULL_TREE, 0);
3290 return path_var (build3 (COMPONENT_REF,
3291 reg->get_type (),
3292 parent_pv.m_tree,
3293 field_reg->get_field (),
3294 NULL_TREE),
3295 parent_pv.m_stack_depth);
3296 }
757bf1df 3297
808f4dfe
DM
3298 case RK_ELEMENT:
3299 {
3300 const element_region *element_reg
3301 = as_a <const element_region *> (reg);
3302 path_var parent_pv
3303 = get_representative_path_var (reg->get_parent_region (), visited);
3304 if (!parent_pv)
3305 return path_var (NULL_TREE, 0);
3306 path_var index_pv
3307 = get_representative_path_var (element_reg->get_index (), visited);
3308 if (!index_pv)
3309 return path_var (NULL_TREE, 0);
3310 return path_var (build4 (ARRAY_REF,
3311 reg->get_type (),
3312 parent_pv.m_tree, index_pv.m_tree,
3313 NULL_TREE, NULL_TREE),
3314 parent_pv.m_stack_depth);
3315 }
757bf1df 3316
808f4dfe 3317 case RK_OFFSET:
757bf1df 3318 {
808f4dfe
DM
3319 const offset_region *offset_reg
3320 = as_a <const offset_region *> (reg);
3321 path_var parent_pv
3322 = get_representative_path_var (reg->get_parent_region (), visited);
3323 if (!parent_pv)
3324 return path_var (NULL_TREE, 0);
3325 path_var offset_pv
3326 = get_representative_path_var (offset_reg->get_byte_offset (),
3327 visited);
29f5db8e 3328 if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
808f4dfe 3329 return path_var (NULL_TREE, 0);
29f5db8e
DM
3330 tree addr_parent = build1 (ADDR_EXPR,
3331 build_pointer_type (reg->get_type ()),
3332 parent_pv.m_tree);
808f4dfe
DM
3333 return path_var (build2 (MEM_REF,
3334 reg->get_type (),
29f5db8e 3335 addr_parent, offset_pv.m_tree),
808f4dfe 3336 parent_pv.m_stack_depth);
757bf1df 3337 }
757bf1df 3338
e61ffa20
DM
3339 case RK_SIZED:
3340 return path_var (NULL_TREE, 0);
3341
808f4dfe
DM
3342 case RK_CAST:
3343 {
3344 path_var parent_pv
3345 = get_representative_path_var (reg->get_parent_region (), visited);
3346 if (!parent_pv)
3347 return path_var (NULL_TREE, 0);
3348 return path_var (build1 (NOP_EXPR,
3349 reg->get_type (),
3350 parent_pv.m_tree),
3351 parent_pv.m_stack_depth);
3352 }
757bf1df 3353
808f4dfe
DM
3354 case RK_HEAP_ALLOCATED:
3355 case RK_ALLOCA:
3356 /* No good way to express heap-allocated/alloca regions as trees. */
3357 return path_var (NULL_TREE, 0);
757bf1df 3358
808f4dfe
DM
3359 case RK_STRING:
3360 {
3361 const string_region *string_reg = as_a <const string_region *> (reg);
3362 return path_var (string_reg->get_string_cst (), 0);
3363 }
757bf1df 3364
808f4dfe
DM
3365 case RK_UNKNOWN:
3366 return path_var (NULL_TREE, 0);
3367 }
757bf1df
DM
3368}
3369
467a4820
DM
3370/* Attempt to return a path_var that represents REG, or return
3371 the NULL path_var.
3372 For example, a region for a field of a local would be a path_var
3373 wrapping a COMPONENT_REF.
3374 Use VISITED to prevent infinite mutual recursion with the overload for
3375 svalues.
3376
3377 This function defers to get_representative_path_var_1 to do the work;
3378 it adds verification that get_representative_path_var_1 returned a tree
3379 of the correct type. */
3380
3381path_var
3382region_model::get_representative_path_var (const region *reg,
3383 svalue_set *visited) const
3384{
3385 path_var result = get_representative_path_var_1 (reg, visited);
3386
3387 /* Verify that the result has the same type as REG, if any. */
3388 if (result.m_tree && reg->get_type ())
3389 gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
3390
3391 return result;
3392}
3393
757bf1df
DM
3394/* Update this model for any phis in SNODE, assuming we came from
3395 LAST_CFG_SUPEREDGE. */
3396
3397void
3398region_model::update_for_phis (const supernode *snode,
3399 const cfg_superedge *last_cfg_superedge,
3400 region_model_context *ctxt)
3401{
3402 gcc_assert (last_cfg_superedge);
3403
e0a7a675
DM
3404 /* Copy this state and pass it to handle_phi so that all of the phi stmts
3405 are effectively handled simultaneously. */
3406 const region_model old_state (*this);
3407
757bf1df
DM
3408 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
3409 !gsi_end_p (gpi); gsi_next (&gpi))
3410 {
3411 gphi *phi = gpi.phi ();
3412
3413 tree src = last_cfg_superedge->get_phi_arg (phi);
3414 tree lhs = gimple_phi_result (phi);
3415
e0a7a675
DM
3416 /* Update next_state based on phi and old_state. */
3417 handle_phi (phi, lhs, src, old_state, ctxt);
757bf1df
DM
3418 }
3419}
3420
3421/* Attempt to update this model for taking EDGE (where the last statement
3422 was LAST_STMT), returning true if the edge can be taken, false
3423 otherwise.
84fb3546
DM
3424 When returning false, if OUT is non-NULL, write a new rejected_constraint
3425 to it.
757bf1df
DM
3426
3427 For CFG superedges where LAST_STMT is a conditional or a switch
3428 statement, attempt to add the relevant conditions for EDGE to this
3429 model, returning true if they are feasible, or false if they are
3430 impossible.
3431
3432 For call superedges, push frame information and store arguments
3433 into parameters.
3434
3435 For return superedges, pop frame information and store return
3436 values into any lhs.
3437
3438 Rejection of call/return superedges happens elsewhere, in
3439 program_point::on_edge (i.e. based on program point, rather
3440 than program state). */
3441
3442bool
3443region_model::maybe_update_for_edge (const superedge &edge,
3444 const gimple *last_stmt,
84fb3546
DM
3445 region_model_context *ctxt,
3446 rejected_constraint **out)
757bf1df
DM
3447{
3448 /* Handle frame updates for interprocedural edges. */
3449 switch (edge.m_kind)
3450 {
3451 default:
3452 break;
3453
3454 case SUPEREDGE_CALL:
3455 {
3456 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
3457 update_for_call_superedge (*call_edge, ctxt);
3458 }
3459 break;
3460
3461 case SUPEREDGE_RETURN:
3462 {
3463 const return_superedge *return_edge
3464 = as_a <const return_superedge *> (&edge);
3465 update_for_return_superedge (*return_edge, ctxt);
3466 }
3467 break;
3468
3469 case SUPEREDGE_INTRAPROCEDURAL_CALL:
3470 {
3471 const callgraph_superedge *cg_sedge
3472 = as_a <const callgraph_superedge *> (&edge);
3473 update_for_call_summary (*cg_sedge, ctxt);
3474 }
3475 break;
3476 }
3477
3478 if (last_stmt == NULL)
3479 return true;
3480
3481 /* Apply any constraints for conditionals/switch statements. */
3482
3483 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
3484 {
3485 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
84fb3546 3486 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out);
757bf1df
DM
3487 }
3488
3489 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
3490 {
3491 const switch_cfg_superedge *switch_sedge
3492 = as_a <const switch_cfg_superedge *> (&edge);
84fb3546
DM
3493 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt,
3494 ctxt, out);
757bf1df
DM
3495 }
3496
1690a839
DM
3497 /* Apply any constraints due to an exception being thrown. */
3498 if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge))
3499 if (cfg_sedge->get_flags () & EDGE_EH)
84fb3546 3500 return apply_constraints_for_exception (last_stmt, ctxt, out);
1690a839 3501
757bf1df
DM
3502 return true;
3503}
3504
3505/* Push a new frame_region on to the stack region.
3506 Populate the frame_region with child regions for the function call's
3507 parameters, using values from the arguments at the callsite in the
3508 caller's frame. */
3509
3510void
aef703cf 3511region_model::update_for_gcall (const gcall *call_stmt,
e92d0ff6
AS
3512 region_model_context *ctxt,
3513 function *callee)
757bf1df 3514{
808f4dfe 3515 /* Build a vec of argument svalues, using the current top
757bf1df 3516 frame for resolving tree expressions. */
808f4dfe 3517 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
757bf1df
DM
3518
3519 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
3520 {
3521 tree arg = gimple_call_arg (call_stmt, i);
808f4dfe 3522 arg_svals.quick_push (get_rvalue (arg, ctxt));
757bf1df
DM
3523 }
3524
e92d0ff6
AS
3525 if(!callee)
3526 {
3527 /* Get the function * from the gcall. */
3528 tree fn_decl = get_fndecl_for_call (call_stmt,ctxt);
3529 callee = DECL_STRUCT_FUNCTION (fn_decl);
3530 }
3531
3532 push_frame (callee, &arg_svals, ctxt);
757bf1df
DM
3533}
3534
a96f1c38
DM
3535/* Pop the top-most frame_region from the stack, and copy the return
3536 region's values (if any) into the region for the lvalue of the LHS of
757bf1df 3537 the call (if any). */
aef703cf 3538
757bf1df 3539void
aef703cf
AS
3540region_model::update_for_return_gcall (const gcall *call_stmt,
3541 region_model_context *ctxt)
757bf1df 3542{
a96f1c38 3543 /* Get the region for the result of the call, within the caller frame. */
808f4dfe 3544 const region *result_dst_reg = NULL;
757bf1df
DM
3545 tree lhs = gimple_call_lhs (call_stmt);
3546 if (lhs)
a96f1c38
DM
3547 {
3548 /* Normally we access the top-level frame, which is:
aef703cf
AS
3549 path_var (expr, get_stack_depth () - 1)
3550 whereas here we need the caller frame, hence "- 2" here. */
808f4dfe
DM
3551 gcc_assert (get_stack_depth () >= 2);
3552 result_dst_reg = get_lvalue (path_var (lhs, get_stack_depth () - 2),
aef703cf 3553 ctxt);
a96f1c38
DM
3554 }
3555
808f4dfe 3556 pop_frame (result_dst_reg, NULL, ctxt);
757bf1df
DM
3557}
3558
aef703cf
AS
3559/* Extract calling information from the superedge and update the model for the
3560 call */
3561
3562void
3563region_model::update_for_call_superedge (const call_superedge &call_edge,
3564 region_model_context *ctxt)
3565{
3566 const gcall *call_stmt = call_edge.get_call_stmt ();
e92d0ff6 3567 update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ());
aef703cf
AS
3568}
3569
3570/* Extract calling information from the return superedge and update the model
3571 for the returning call */
3572
3573void
3574region_model::update_for_return_superedge (const return_superedge &return_edge,
3575 region_model_context *ctxt)
3576{
3577 const gcall *call_stmt = return_edge.get_call_stmt ();
3578 update_for_return_gcall (call_stmt, ctxt);
3579}
3580
757bf1df
DM
3581/* Update this region_model with a summary of the effect of calling
3582 and returning from CG_SEDGE.
3583
3584 TODO: Currently this is extremely simplistic: we merely set the
3585 return value to "unknown". A proper implementation would e.g. update
3586 sm-state, and presumably be reworked to support multiple outcomes. */
3587
3588void
3589region_model::update_for_call_summary (const callgraph_superedge &cg_sedge,
3590 region_model_context *ctxt)
3591{
3592 /* For now, set any return value to "unknown". */
3593 const gcall *call_stmt = cg_sedge.get_call_stmt ();
3594 tree lhs = gimple_call_lhs (call_stmt);
3595 if (lhs)
3a66c289
DM
3596 mark_region_as_unknown (get_lvalue (lhs, ctxt),
3597 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
3598
3599 // TODO: actually implement some kind of summary here
3600}
3601
3602/* Given a true or false edge guarded by conditional statement COND_STMT,
3603 determine appropriate constraints for the edge to be taken.
3604
3605 If they are feasible, add the constraints and return true.
3606
3607 Return false if the constraints contradict existing knowledge
84fb3546
DM
3608 (and so the edge should not be taken).
3609 When returning false, if OUT is non-NULL, write a new rejected_constraint
3610 to it. */
757bf1df
DM
3611
3612bool
3613region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
3614 const gcond *cond_stmt,
84fb3546
DM
3615 region_model_context *ctxt,
3616 rejected_constraint **out)
757bf1df
DM
3617{
3618 ::edge cfg_edge = sedge.get_cfg_edge ();
3619 gcc_assert (cfg_edge != NULL);
3620 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
3621
3622 enum tree_code op = gimple_cond_code (cond_stmt);
3623 tree lhs = gimple_cond_lhs (cond_stmt);
3624 tree rhs = gimple_cond_rhs (cond_stmt);
3625 if (cfg_edge->flags & EDGE_FALSE_VALUE)
3626 op = invert_tree_comparison (op, false /* honor_nans */);
84fb3546 3627 return add_constraint (lhs, op, rhs, ctxt, out);
757bf1df
DM
3628}
3629
3630/* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
3631 for the edge to be taken.
3632
3633 If they are feasible, add the constraints and return true.
3634
3635 Return false if the constraints contradict existing knowledge
84fb3546
DM
3636 (and so the edge should not be taken).
3637 When returning false, if OUT is non-NULL, write a new rejected_constraint
3638 to it. */
757bf1df
DM
3639
3640bool
3641region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
3642 const gswitch *switch_stmt,
84fb3546
DM
3643 region_model_context *ctxt,
3644 rejected_constraint **out)
757bf1df 3645{
8ca7fa84
DM
3646 bounded_ranges_manager *ranges_mgr = get_range_manager ();
3647 const bounded_ranges *all_cases_ranges
3648 = ranges_mgr->get_or_create_ranges_for_switch (&edge, switch_stmt);
757bf1df 3649 tree index = gimple_switch_index (switch_stmt);
8ca7fa84
DM
3650 const svalue *index_sval = get_rvalue (index, ctxt);
3651 bool sat = m_constraints->add_bounded_ranges (index_sval, all_cases_ranges);
3652 if (!sat && out)
3653 *out = new rejected_ranges_constraint (*this, index, all_cases_ranges);
3654 return sat;
757bf1df
DM
3655}
3656
1690a839
DM
3657/* Apply any constraints due to an exception being thrown at LAST_STMT.
3658
3659 If they are feasible, add the constraints and return true.
3660
3661 Return false if the constraints contradict existing knowledge
84fb3546
DM
3662 (and so the edge should not be taken).
3663 When returning false, if OUT is non-NULL, write a new rejected_constraint
3664 to it. */
1690a839
DM
3665
3666bool
3667region_model::apply_constraints_for_exception (const gimple *last_stmt,
84fb3546
DM
3668 region_model_context *ctxt,
3669 rejected_constraint **out)
1690a839
DM
3670{
3671 gcc_assert (last_stmt);
3672 if (const gcall *call = dyn_cast <const gcall *> (last_stmt))
3673 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
3674 if (is_named_call_p (callee_fndecl, "operator new", call, 1)
3675 || is_named_call_p (callee_fndecl, "operator new []", call, 1))
3676 {
3677 /* We have an exception thrown from operator new.
3678 Add a constraint that the result was NULL, to avoid a false
3679 leak report due to the result being lost when following
3680 the EH edge. */
3681 if (tree lhs = gimple_call_lhs (call))
84fb3546 3682 return add_constraint (lhs, EQ_EXPR, null_pointer_node, ctxt, out);
1690a839
DM
3683 return true;
3684 }
3685 return true;
3686}
3687
808f4dfe
DM
3688/* For use with push_frame when handling a top-level call within the analysis.
3689 PARAM has a defined but unknown initial value.
3690 Anything it points to has escaped, since the calling context "knows"
3691 the pointer, and thus calls to unknown functions could read/write into
3692 the region. */
757bf1df
DM
3693
3694void
808f4dfe 3695region_model::on_top_level_param (tree param,
3a25f345 3696 region_model_context *ctxt)
757bf1df 3697{
808f4dfe 3698 if (POINTER_TYPE_P (TREE_TYPE (param)))
5eae0ac7 3699 {
808f4dfe
DM
3700 const region *param_reg = get_lvalue (param, ctxt);
3701 const svalue *init_ptr_sval
3702 = m_mgr->get_or_create_initial_value (param_reg);
3703 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
3704 m_store.mark_as_escaped (pointee_reg);
5eae0ac7 3705 }
757bf1df
DM
3706}
3707
808f4dfe
DM
3708/* Update this region_model to reflect pushing a frame onto the stack
3709 for a call to FUN.
757bf1df 3710
808f4dfe
DM
3711 If ARG_SVALS is non-NULL, use it to populate the parameters
3712 in the new frame.
3713 Otherwise, the params have their initial_svalues.
757bf1df 3714
808f4dfe 3715 Return the frame_region for the new frame. */
757bf1df 3716
808f4dfe
DM
3717const region *
3718region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
3719 region_model_context *ctxt)
757bf1df 3720{
808f4dfe
DM
3721 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
3722 if (arg_svals)
757bf1df 3723 {
808f4dfe
DM
3724 /* Arguments supplied from a caller frame. */
3725 tree fndecl = fun->decl;
3726 unsigned idx = 0;
3727 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
3728 iter_parm = DECL_CHAIN (iter_parm), ++idx)
757bf1df 3729 {
808f4dfe
DM
3730 /* If there's a mismatching declaration, the call stmt might
3731 not have enough args. Handle this case by leaving the
3732 rest of the params as uninitialized. */
3733 if (idx >= arg_svals->length ())
3734 break;
294b6da2
DM
3735 tree parm_lval = iter_parm;
3736 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
3737 parm_lval = parm_default_ssa;
3738 const region *parm_reg = get_lvalue (parm_lval, ctxt);
808f4dfe 3739 const svalue *arg_sval = (*arg_svals)[idx];
808f4dfe 3740 set_value (parm_reg, arg_sval, ctxt);
757bf1df 3741 }
757bf1df 3742 }
808f4dfe 3743 else
757bf1df 3744 {
808f4dfe
DM
3745 /* Otherwise we have a top-level call within the analysis. The params
3746 have defined but unknown initial values.
3747 Anything they point to has escaped. */
3748 tree fndecl = fun->decl;
3749 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
3750 iter_parm = DECL_CHAIN (iter_parm))
757bf1df 3751 {
294b6da2 3752 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
808f4dfe 3753 on_top_level_param (parm_default_ssa, ctxt);
294b6da2
DM
3754 else
3755 on_top_level_param (iter_parm, ctxt);
757bf1df
DM
3756 }
3757 }
757bf1df 3758
808f4dfe 3759 return m_current_frame;
757bf1df
DM
3760}
3761
808f4dfe
DM
3762/* Get the function of the top-most frame in this region_model's stack.
3763 There must be such a frame. */
757bf1df 3764
808f4dfe
DM
3765function *
3766region_model::get_current_function () const
757bf1df 3767{
808f4dfe
DM
3768 const frame_region *frame = get_current_frame ();
3769 gcc_assert (frame);
3770 return frame->get_function ();
757bf1df
DM
3771}
3772
808f4dfe 3773/* Pop the topmost frame_region from this region_model's stack;
757bf1df 3774
808f4dfe
DM
3775 If RESULT_DST_REG is non-null, copy any return value from the frame
3776 into RESULT_DST_REG's region.
3777 If OUT_RESULT is non-null, copy any return value from the frame
3778 into *OUT_RESULT.
757bf1df 3779
808f4dfe
DM
3780 Purge the frame region and all its descendent regions.
3781 Convert any pointers that point into such regions into
3782 POISON_KIND_POPPED_STACK svalues. */
757bf1df 3783
808f4dfe
DM
3784void
3785region_model::pop_frame (const region *result_dst_reg,
3786 const svalue **out_result,
3787 region_model_context *ctxt)
3788{
3789 gcc_assert (m_current_frame);
757bf1df 3790
808f4dfe
DM
3791 /* Evaluate the result, within the callee frame. */
3792 const frame_region *frame_reg = m_current_frame;
3793 tree fndecl = m_current_frame->get_function ()->decl;
3794 tree result = DECL_RESULT (fndecl);
3795 if (result && TREE_TYPE (result) != void_type_node)
3796 {
13ad6d9f 3797 const svalue *retval = get_rvalue (result, ctxt);
808f4dfe 3798 if (result_dst_reg)
13ad6d9f 3799 set_value (result_dst_reg, retval, ctxt);
808f4dfe 3800 if (out_result)
13ad6d9f 3801 *out_result = retval;
808f4dfe 3802 }
757bf1df 3803
808f4dfe
DM
3804 /* Pop the frame. */
3805 m_current_frame = m_current_frame->get_calling_frame ();
757bf1df 3806
808f4dfe 3807 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
757bf1df
DM
3808}
3809
808f4dfe 3810/* Get the number of frames in this region_model's stack. */
757bf1df 3811
808f4dfe
DM
3812int
3813region_model::get_stack_depth () const
757bf1df 3814{
808f4dfe
DM
3815 const frame_region *frame = get_current_frame ();
3816 if (frame)
3817 return frame->get_stack_depth ();
3818 else
3819 return 0;
757bf1df
DM
3820}
3821
808f4dfe
DM
3822/* Get the frame_region with the given index within the stack.
3823 The frame_region must exist. */
757bf1df 3824
808f4dfe
DM
3825const frame_region *
3826region_model::get_frame_at_index (int index) const
757bf1df 3827{
808f4dfe
DM
3828 const frame_region *frame = get_current_frame ();
3829 gcc_assert (frame);
3830 gcc_assert (index >= 0);
3831 gcc_assert (index <= frame->get_index ());
3832 while (index != frame->get_index ())
3833 {
3834 frame = frame->get_calling_frame ();
3835 gcc_assert (frame);
3836 }
3837 return frame;
757bf1df
DM
3838}
3839
808f4dfe
DM
3840/* Unbind svalues for any regions in REG and below.
3841 Find any pointers to such regions; convert them to
9a2c9579
DM
3842 poisoned values of kind PKIND.
3843 Also purge any dynamic extents. */
757bf1df 3844
808f4dfe
DM
3845void
3846region_model::unbind_region_and_descendents (const region *reg,
3847 enum poison_kind pkind)
757bf1df 3848{
808f4dfe
DM
3849 /* Gather a set of base regions to be unbound. */
3850 hash_set<const region *> base_regs;
3851 for (store::cluster_map_t::iterator iter = m_store.begin ();
3852 iter != m_store.end (); ++iter)
757bf1df 3853 {
808f4dfe
DM
3854 const region *iter_base_reg = (*iter).first;
3855 if (iter_base_reg->descendent_of_p (reg))
3856 base_regs.add (iter_base_reg);
757bf1df 3857 }
808f4dfe
DM
3858 for (hash_set<const region *>::iterator iter = base_regs.begin ();
3859 iter != base_regs.end (); ++iter)
3860 m_store.purge_cluster (*iter);
757bf1df 3861
808f4dfe
DM
3862 /* Find any pointers to REG or its descendents; convert to poisoned. */
3863 poison_any_pointers_to_descendents (reg, pkind);
9a2c9579
DM
3864
3865 /* Purge dynamic extents of any base regions in REG and below
3866 (e.g. VLAs and alloca stack regions). */
3867 for (auto iter : m_dynamic_extents)
3868 {
3869 const region *iter_reg = iter.first;
3870 if (iter_reg->descendent_of_p (reg))
3871 unset_dynamic_extents (iter_reg);
3872 }
757bf1df
DM
3873}
3874
808f4dfe
DM
3875/* Implementation of BindingVisitor.
3876 Update the bound svalues for regions below REG to use poisoned
3877 values instead. */
757bf1df 3878
808f4dfe 3879struct bad_pointer_finder
757bf1df 3880{
808f4dfe
DM
3881 bad_pointer_finder (const region *reg, enum poison_kind pkind,
3882 region_model_manager *mgr)
3883 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
3884 {}
757bf1df 3885
808f4dfe
DM
3886 void on_binding (const binding_key *, const svalue *&sval)
3887 {
3888 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
3889 {
3890 const region *ptr_dst = ptr_sval->get_pointee ();
3891 /* Poison ptrs to descendents of REG, but not to REG itself,
3892 otherwise double-free detection doesn't work (since sm-state
3893 for "free" is stored on the original ptr svalue). */
3894 if (ptr_dst->descendent_of_p (m_reg)
3895 && ptr_dst != m_reg)
3896 {
3897 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
3898 sval->get_type ());
3899 ++m_count;
3900 }
3901 }
3902 }
757bf1df 3903
808f4dfe
DM
3904 const region *m_reg;
3905 enum poison_kind m_pkind;
3906 region_model_manager *const m_mgr;
3907 int m_count;
3908};
757bf1df 3909
808f4dfe
DM
3910/* Find any pointers to REG or its descendents; convert them to
3911 poisoned values of kind PKIND.
3912 Return the number of pointers that were poisoned. */
757bf1df 3913
808f4dfe
DM
3914int
3915region_model::poison_any_pointers_to_descendents (const region *reg,
3916 enum poison_kind pkind)
3917{
3918 bad_pointer_finder bv (reg, pkind, m_mgr);
3919 m_store.for_each_binding (bv);
3920 return bv.m_count;
757bf1df
DM
3921}
3922
808f4dfe
DM
3923/* Attempt to merge THIS with OTHER_MODEL, writing the result
3924 to OUT_MODEL. Use POINT to distinguish values created as a
3925 result of merging. */
757bf1df 3926
808f4dfe
DM
3927bool
3928region_model::can_merge_with_p (const region_model &other_model,
3929 const program_point &point,
f573d351
DM
3930 region_model *out_model,
3931 const extrinsic_state *ext_state,
3932 const program_state *state_a,
3933 const program_state *state_b) const
757bf1df 3934{
808f4dfe
DM
3935 gcc_assert (out_model);
3936 gcc_assert (m_mgr == other_model.m_mgr);
3937 gcc_assert (m_mgr == out_model->m_mgr);
757bf1df 3938
808f4dfe
DM
3939 if (m_current_frame != other_model.m_current_frame)
3940 return false;
3941 out_model->m_current_frame = m_current_frame;
757bf1df 3942
f573d351
DM
3943 model_merger m (this, &other_model, point, out_model,
3944 ext_state, state_a, state_b);
757bf1df 3945
808f4dfe
DM
3946 if (!store::can_merge_p (&m_store, &other_model.m_store,
3947 &out_model->m_store, m_mgr->get_store_manager (),
3948 &m))
3949 return false;
3950
9a2c9579
DM
3951 if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
3952 &out_model->m_dynamic_extents))
3953 return false;
3954
808f4dfe
DM
3955 /* Merge constraints. */
3956 constraint_manager::merge (*m_constraints,
3957 *other_model.m_constraints,
c710051a 3958 out_model->m_constraints);
757bf1df 3959
808f4dfe 3960 return true;
757bf1df
DM
3961}
3962
3963/* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
3964 otherwise. */
3965
3966tree
3967region_model::get_fndecl_for_call (const gcall *call,
3968 region_model_context *ctxt)
3969{
3970 tree fn_ptr = gimple_call_fn (call);
3971 if (fn_ptr == NULL_TREE)
3972 return NULL_TREE;
808f4dfe
DM
3973 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
3974 if (const region_svalue *fn_ptr_ptr
3975 = fn_ptr_sval->dyn_cast_region_svalue ())
757bf1df 3976 {
808f4dfe
DM
3977 const region *reg = fn_ptr_ptr->get_pointee ();
3978 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
757bf1df 3979 {
808f4dfe 3980 tree fn_decl = fn_reg->get_fndecl ();
0ba70d1b
DM
3981 cgraph_node *node = cgraph_node::get (fn_decl);
3982 if (!node)
3983 return NULL_TREE;
3984 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
91f993b7
DM
3985 if (ultimate_node)
3986 return ultimate_node->decl;
757bf1df
DM
3987 }
3988 }
3989
3990 return NULL_TREE;
3991}
3992
808f4dfe 3993/* Would be much simpler to use a lambda here, if it were supported. */
757bf1df 3994
808f4dfe 3995struct append_ssa_names_cb_data
757bf1df 3996{
808f4dfe
DM
3997 const region_model *model;
3998 auto_vec<const decl_region *> *out;
3999};
757bf1df 4000
808f4dfe
DM
4001/* Populate *OUT with all decl_regions for SSA names in the current
4002 frame that have clusters within the store. */
757bf1df
DM
4003
4004void
808f4dfe
DM
4005region_model::
4006get_ssa_name_regions_for_current_frame (auto_vec<const decl_region *> *out)
4007 const
757bf1df 4008{
808f4dfe
DM
4009 append_ssa_names_cb_data data;
4010 data.model = this;
4011 data.out = out;
4012 m_store.for_each_cluster (append_ssa_names_cb, &data);
757bf1df
DM
4013}
4014
808f4dfe 4015/* Implementation detail of get_ssa_name_regions_for_current_frame. */
757bf1df 4016
808f4dfe
DM
4017void
4018region_model::append_ssa_names_cb (const region *base_reg,
4019 append_ssa_names_cb_data *cb_data)
757bf1df 4020{
808f4dfe
DM
4021 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
4022 return;
4023 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
757bf1df 4024 {
808f4dfe
DM
4025 if (TREE_CODE (decl_reg->get_decl ()) == SSA_NAME)
4026 cb_data->out->safe_push (decl_reg);
757bf1df
DM
4027 }
4028}
4029
b9365b93
DM
4030/* Return a new region describing a heap-allocated block of memory.
4031 Use CTXT to complain about tainted sizes. */
757bf1df 4032
808f4dfe 4033const region *
b9365b93
DM
4034region_model::create_region_for_heap_alloc (const svalue *size_in_bytes,
4035 region_model_context *ctxt)
757bf1df 4036{
808f4dfe 4037 const region *reg = m_mgr->create_region_for_heap_alloc ();
ea4e3218 4038 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
b9365b93 4039 set_dynamic_extents (reg, size_in_bytes, ctxt);
808f4dfe 4040 return reg;
757bf1df
DM
4041}
4042
808f4dfe 4043/* Return a new region describing a block of memory allocated within the
b9365b93
DM
4044 current frame.
4045 Use CTXT to complain about tainted sizes. */
757bf1df 4046
808f4dfe 4047const region *
b9365b93
DM
4048region_model::create_region_for_alloca (const svalue *size_in_bytes,
4049 region_model_context *ctxt)
757bf1df 4050{
808f4dfe 4051 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
ea4e3218 4052 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
b9365b93 4053 set_dynamic_extents (reg, size_in_bytes, ctxt);
808f4dfe 4054 return reg;
757bf1df
DM
4055}
4056
b9365b93
DM
4057/* Record that the size of REG is SIZE_IN_BYTES.
4058 Use CTXT to complain about tainted sizes. */
757bf1df
DM
4059
4060void
9a2c9579 4061region_model::set_dynamic_extents (const region *reg,
b9365b93
DM
4062 const svalue *size_in_bytes,
4063 region_model_context *ctxt)
9a2c9579
DM
4064{
4065 assert_compat_types (size_in_bytes->get_type (), size_type_node);
b9365b93
DM
4066 if (ctxt)
4067 check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
4068 ctxt);
9a2c9579
DM
4069 m_dynamic_extents.put (reg, size_in_bytes);
4070}
4071
4072/* Get the recording of REG in bytes, or NULL if no dynamic size was
4073 recorded. */
4074
4075const svalue *
4076region_model::get_dynamic_extents (const region *reg) const
757bf1df 4077{
9a2c9579
DM
4078 if (const svalue * const *slot = m_dynamic_extents.get (reg))
4079 return *slot;
4080 return NULL;
4081}
4082
4083/* Unset any recorded dynamic size of REG. */
4084
4085void
4086region_model::unset_dynamic_extents (const region *reg)
4087{
4088 m_dynamic_extents.remove (reg);
757bf1df
DM
4089}
4090
eafa9d96
DM
4091/* class noop_region_model_context : public region_model_context. */
4092
4093void
4094noop_region_model_context::bifurcate (custom_edge_info *info)
4095{
4096 delete info;
4097}
4098
4099void
4100noop_region_model_context::terminate_path ()
4101{
4102}
4103
808f4dfe 4104/* struct model_merger. */
757bf1df 4105
808f4dfe 4106/* Dump a multiline representation of this merger to PP. */
757bf1df
DM
4107
4108void
808f4dfe 4109model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
757bf1df 4110{
808f4dfe
DM
4111 pp_string (pp, "model A:");
4112 pp_newline (pp);
4113 m_model_a->dump_to_pp (pp, simple, true);
4114 pp_newline (pp);
757bf1df 4115
808f4dfe 4116 pp_string (pp, "model B:");
757bf1df 4117 pp_newline (pp);
808f4dfe 4118 m_model_b->dump_to_pp (pp, simple, true);
757bf1df
DM
4119 pp_newline (pp);
4120
808f4dfe 4121 pp_string (pp, "merged model:");
757bf1df 4122 pp_newline (pp);
808f4dfe 4123 m_merged_model->dump_to_pp (pp, simple, true);
757bf1df
DM
4124 pp_newline (pp);
4125}
4126
808f4dfe 4127/* Dump a multiline representation of this merger to FILE. */
757bf1df
DM
4128
4129void
808f4dfe 4130model_merger::dump (FILE *fp, bool simple) const
757bf1df
DM
4131{
4132 pretty_printer pp;
4133 pp_format_decoder (&pp) = default_tree_printer;
4134 pp_show_color (&pp) = pp_show_color (global_dc->printer);
4135 pp.buffer->stream = fp;
808f4dfe 4136 dump_to_pp (&pp, simple);
757bf1df
DM
4137 pp_flush (&pp);
4138}
4139
808f4dfe 4140/* Dump a multiline representation of this merger to stderr. */
757bf1df
DM
4141
4142DEBUG_FUNCTION void
808f4dfe 4143model_merger::dump (bool simple) const
757bf1df 4144{
808f4dfe 4145 dump (stderr, simple);
757bf1df
DM
4146}
4147
f573d351
DM
4148/* Return true if it's OK to merge SVAL with other svalues. */
4149
4150bool
4151model_merger::mergeable_svalue_p (const svalue *sval) const
4152{
4153 if (m_ext_state)
4154 {
4155 /* Reject merging svalues that have non-purgable sm-state,
4156 to avoid falsely reporting memory leaks by merging them
4157 with something else. For example, given a local var "p",
4158 reject the merger of a:
4159 store_a mapping "p" to a malloc-ed ptr
4160 with:
4161 store_b mapping "p" to a NULL ptr. */
4162 if (m_state_a)
4163 if (!m_state_a->can_purge_p (*m_ext_state, sval))
4164 return false;
4165 if (m_state_b)
4166 if (!m_state_b->can_purge_p (*m_ext_state, sval))
4167 return false;
4168 }
4169 return true;
4170}
4171
75038aa6
DM
4172} // namespace ana
4173
808f4dfe 4174/* Dump RMODEL fully to stderr (i.e. without summarization). */
757bf1df 4175
808f4dfe
DM
4176DEBUG_FUNCTION void
4177debug (const region_model &rmodel)
757bf1df 4178{
808f4dfe 4179 rmodel.dump (false);
757bf1df
DM
4180}
4181
8ca7fa84 4182/* class rejected_op_constraint : public rejected_constraint. */
84fb3546
DM
4183
4184void
8ca7fa84 4185rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
84fb3546
DM
4186{
4187 region_model m (m_model);
4188 const svalue *lhs_sval = m.get_rvalue (m_lhs, NULL);
4189 const svalue *rhs_sval = m.get_rvalue (m_rhs, NULL);
4190 lhs_sval->dump_to_pp (pp, true);
4191 pp_printf (pp, " %s ", op_symbol_code (m_op));
4192 rhs_sval->dump_to_pp (pp, true);
4193}
4194
8ca7fa84
DM
4195/* class rejected_ranges_constraint : public rejected_constraint. */
4196
4197void
4198rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
4199{
4200 region_model m (m_model);
4201 const svalue *sval = m.get_rvalue (m_expr, NULL);
4202 sval->dump_to_pp (pp, true);
4203 pp_string (pp, " in ");
4204 m_ranges->dump_to_pp (pp, true);
4205}
4206
808f4dfe 4207/* class engine. */
757bf1df 4208
11a2ff8d
DM
4209/* engine's ctor. */
4210
4211engine::engine (logger *logger)
4212: m_mgr (logger)
4213{
4214}
4215
808f4dfe 4216/* Dump the managed objects by class to LOGGER, and the per-class totals. */
757bf1df 4217
808f4dfe
DM
4218void
4219engine::log_stats (logger *logger) const
757bf1df 4220{
808f4dfe 4221 m_mgr.log_stats (logger, true);
757bf1df
DM
4222}
4223
75038aa6
DM
4224namespace ana {
4225
757bf1df
DM
4226#if CHECKING_P
4227
4228namespace selftest {
4229
8c08c983
DM
4230/* Build a constant tree of the given type from STR. */
4231
4232static tree
4233build_real_cst_from_string (tree type, const char *str)
4234{
4235 REAL_VALUE_TYPE real;
4236 real_from_string (&real, str);
4237 return build_real (type, real);
4238}
4239
4240/* Append various "interesting" constants to OUT (e.g. NaN). */
4241
4242static void
4243append_interesting_constants (auto_vec<tree> *out)
4244{
4245 out->safe_push (build_int_cst (integer_type_node, 0));
4246 out->safe_push (build_int_cst (integer_type_node, 42));
4247 out->safe_push (build_int_cst (unsigned_type_node, 0));
4248 out->safe_push (build_int_cst (unsigned_type_node, 42));
4249 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
4250 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
4251 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
4252 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
4253 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
4254 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
4255 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
4256 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
4257}
4258
4259/* Verify that tree_cmp is a well-behaved comparator for qsort, even
4260 if the underlying constants aren't comparable. */
4261
4262static void
4263test_tree_cmp_on_constants ()
4264{
4265 auto_vec<tree> csts;
4266 append_interesting_constants (&csts);
4267
4268 /* Try sorting every triple. */
4269 const unsigned num = csts.length ();
4270 for (unsigned i = 0; i < num; i++)
4271 for (unsigned j = 0; j < num; j++)
4272 for (unsigned k = 0; k < num; k++)
4273 {
4274 auto_vec<tree> v (3);
4275 v.quick_push (csts[i]);
4276 v.quick_push (csts[j]);
4277 v.quick_push (csts[k]);
4278 v.qsort (tree_cmp);
4279 }
4280}
4281
757bf1df
DM
4282/* Implementation detail of the ASSERT_CONDITION_* macros. */
4283
808f4dfe
DM
4284void
4285assert_condition (const location &loc,
4286 region_model &model,
4287 const svalue *lhs, tree_code op, const svalue *rhs,
4288 tristate expected)
4289{
4290 tristate actual = model.eval_condition (lhs, op, rhs);
4291 ASSERT_EQ_AT (loc, actual, expected);
4292}
4293
4294/* Implementation detail of the ASSERT_CONDITION_* macros. */
4295
757bf1df
DM
4296void
4297assert_condition (const location &loc,
4298 region_model &model,
4299 tree lhs, tree_code op, tree rhs,
4300 tristate expected)
4301{
4302 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
4303 ASSERT_EQ_AT (loc, actual, expected);
4304}
4305
90f7c300
DM
4306/* Implementation detail of ASSERT_DUMP_TREE_EQ. */
4307
4308static void
4309assert_dump_tree_eq (const location &loc, tree t, const char *expected)
4310{
4311 auto_fix_quotes sentinel;
4312 pretty_printer pp;
4313 pp_format_decoder (&pp) = default_tree_printer;
4314 dump_tree (&pp, t);
4315 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
4316}
4317
4318/* Assert that dump_tree (T) is EXPECTED. */
4319
4320#define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
4321 SELFTEST_BEGIN_STMT \
4322 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
4323 SELFTEST_END_STMT
4324
757bf1df
DM
4325/* Implementation detail of ASSERT_DUMP_EQ. */
4326
4327static void
4328assert_dump_eq (const location &loc,
4329 const region_model &model,
4330 bool summarize,
4331 const char *expected)
4332{
4333 auto_fix_quotes sentinel;
4334 pretty_printer pp;
4335 pp_format_decoder (&pp) = default_tree_printer;
808f4dfe
DM
4336
4337 model.dump_to_pp (&pp, summarize, true);
757bf1df
DM
4338 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
4339}
4340
4341/* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
4342
4343#define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
4344 SELFTEST_BEGIN_STMT \
4345 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
4346 SELFTEST_END_STMT
4347
4348/* Smoketest for region_model::dump_to_pp. */
4349
4350static void
4351test_dump ()
4352{
808f4dfe
DM
4353 region_model_manager mgr;
4354 region_model model (&mgr);
757bf1df
DM
4355
4356 ASSERT_DUMP_EQ (model, false,
808f4dfe
DM
4357 "stack depth: 0\n"
4358 "m_called_unknown_fn: FALSE\n"
4359 "constraint_manager:\n"
4360 " equiv classes:\n"
4361 " constraints:\n");
4362 ASSERT_DUMP_EQ (model, true,
4363 "stack depth: 0\n"
4364 "m_called_unknown_fn: FALSE\n"
4365 "constraint_manager:\n"
757bf1df
DM
4366 " equiv classes:\n"
4367 " constraints:\n");
757bf1df
DM
4368}
4369
884d9141
DM
4370/* Helper function for selftests. Create a struct or union type named NAME,
4371 with the fields given by the FIELD_DECLS in FIELDS.
4372 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
4373 create a UNION_TYPE. */
4374
4375static tree
4376make_test_compound_type (const char *name, bool is_struct,
4377 const auto_vec<tree> *fields)
4378{
4379 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
4380 TYPE_NAME (t) = get_identifier (name);
4381 TYPE_SIZE (t) = 0;
4382
4383 tree fieldlist = NULL;
4384 int i;
4385 tree field;
4386 FOR_EACH_VEC_ELT (*fields, i, field)
4387 {
4388 gcc_assert (TREE_CODE (field) == FIELD_DECL);
4389 DECL_CONTEXT (field) = t;
4390 fieldlist = chainon (field, fieldlist);
4391 }
4392 fieldlist = nreverse (fieldlist);
4393 TYPE_FIELDS (t) = fieldlist;
4394
4395 layout_type (t);
4396 return t;
4397}
4398
a96f1c38
DM
4399/* Selftest fixture for creating the type "struct coord {int x; int y; };". */
4400
4401struct coord_test
4402{
4403 coord_test ()
4404 {
4405 auto_vec<tree> fields;
4406 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
4407 get_identifier ("x"), integer_type_node);
4408 fields.safe_push (m_x_field);
4409 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
4410 get_identifier ("y"), integer_type_node);
4411 fields.safe_push (m_y_field);
4412 m_coord_type = make_test_compound_type ("coord", true, &fields);
4413 }
4414
4415 tree m_x_field;
4416 tree m_y_field;
4417 tree m_coord_type;
4418};
4419
808f4dfe 4420/* Verify usage of a struct. */
884d9141
DM
4421
4422static void
808f4dfe 4423test_struct ()
884d9141 4424{
a96f1c38
DM
4425 coord_test ct;
4426
4427 tree c = build_global_decl ("c", ct.m_coord_type);
4428 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4429 c, ct.m_x_field, NULL_TREE);
4430 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4431 c, ct.m_y_field, NULL_TREE);
884d9141
DM
4432
4433 tree int_17 = build_int_cst (integer_type_node, 17);
4434 tree int_m3 = build_int_cst (integer_type_node, -3);
4435
808f4dfe
DM
4436 region_model_manager mgr;
4437 region_model model (&mgr);
884d9141
DM
4438 model.set_value (c_x, int_17, NULL);
4439 model.set_value (c_y, int_m3, NULL);
4440
808f4dfe
DM
4441 /* Verify get_offset for "c.x". */
4442 {
4443 const region *c_x_reg = model.get_lvalue (c_x, NULL);
4444 region_offset offset = c_x_reg->get_offset ();
4445 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
4446 ASSERT_EQ (offset.get_bit_offset (), 0);
4447 }
4448
4449 /* Verify get_offset for "c.y". */
4450 {
4451 const region *c_y_reg = model.get_lvalue (c_y, NULL);
4452 region_offset offset = c_y_reg->get_offset ();
4453 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
4454 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
4455 }
884d9141
DM
4456}
4457
808f4dfe 4458/* Verify usage of an array element. */
884d9141
DM
4459
4460static void
808f4dfe 4461test_array_1 ()
884d9141
DM
4462{
4463 tree tlen = size_int (10);
4464 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
4465
4466 tree a = build_global_decl ("a", arr_type);
4467
808f4dfe
DM
4468 region_model_manager mgr;
4469 region_model model (&mgr);
884d9141
DM
4470 tree int_0 = build_int_cst (integer_type_node, 0);
4471 tree a_0 = build4 (ARRAY_REF, char_type_node,
4472 a, int_0, NULL_TREE, NULL_TREE);
4473 tree char_A = build_int_cst (char_type_node, 'A');
4474 model.set_value (a_0, char_A, NULL);
884d9141
DM
4475}
4476
90f7c300
DM
4477/* Verify that region_model::get_representative_tree works as expected. */
4478
4479static void
4480test_get_representative_tree ()
4481{
808f4dfe
DM
4482 region_model_manager mgr;
4483
90f7c300
DM
4484 /* STRING_CST. */
4485 {
4486 tree string_cst = build_string (4, "foo");
808f4dfe
DM
4487 region_model m (&mgr);
4488 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
4489 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
4490 ASSERT_EQ (rep, string_cst);
4491 }
4492
4493 /* String literal. */
4494 {
4495 tree string_cst_ptr = build_string_literal (4, "foo");
808f4dfe
DM
4496 region_model m (&mgr);
4497 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
4498 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
4499 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
4500 }
808f4dfe
DM
4501
4502 /* Value of an element within an array. */
4503 {
4504 tree tlen = size_int (10);
4505 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
4506 tree a = build_global_decl ("a", arr_type);
4507 placeholder_svalue test_sval (char_type_node, "test value");
4508
4509 /* Value of a[3]. */
4510 {
4511 test_region_model_context ctxt;
4512 region_model model (&mgr);
4513 tree int_3 = build_int_cst (integer_type_node, 3);
4514 tree a_3 = build4 (ARRAY_REF, char_type_node,
4515 a, int_3, NULL_TREE, NULL_TREE);
4516 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
4517 model.set_value (a_3_reg, &test_sval, &ctxt);
4518 tree rep = model.get_representative_tree (&test_sval);
4519 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
4520 }
4521
4522 /* Value of a[0]. */
4523 {
4524 test_region_model_context ctxt;
4525 region_model model (&mgr);
4526 tree idx = build_int_cst (integer_type_node, 0);
4527 tree a_0 = build4 (ARRAY_REF, char_type_node,
4528 a, idx, NULL_TREE, NULL_TREE);
4529 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
4530 model.set_value (a_0_reg, &test_sval, &ctxt);
4531 tree rep = model.get_representative_tree (&test_sval);
4532 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
4533 }
4534 }
4535
4536 /* Value of a field within a struct. */
4537 {
4538 coord_test ct;
4539
4540 tree c = build_global_decl ("c", ct.m_coord_type);
4541 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4542 c, ct.m_x_field, NULL_TREE);
4543 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4544 c, ct.m_y_field, NULL_TREE);
4545
4546 test_region_model_context ctxt;
4547
4548 /* Value of initial field. */
4549 {
4550 region_model m (&mgr);
4551 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
4552 placeholder_svalue test_sval_x (integer_type_node, "test x val");
4553 m.set_value (c_x_reg, &test_sval_x, &ctxt);
4554 tree rep = m.get_representative_tree (&test_sval_x);
4555 ASSERT_DUMP_TREE_EQ (rep, "c.x");
4556 }
4557
4558 /* Value of non-initial field. */
4559 {
4560 region_model m (&mgr);
4561 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
4562 placeholder_svalue test_sval_y (integer_type_node, "test y val");
4563 m.set_value (c_y_reg, &test_sval_y, &ctxt);
4564 tree rep = m.get_representative_tree (&test_sval_y);
4565 ASSERT_DUMP_TREE_EQ (rep, "c.y");
4566 }
4567 }
90f7c300
DM
4568}
4569
757bf1df 4570/* Verify that calling region_model::get_rvalue repeatedly on the same
808f4dfe 4571 tree constant retrieves the same svalue *. */
757bf1df
DM
4572
4573static void
4574test_unique_constants ()
4575{
4576 tree int_0 = build_int_cst (integer_type_node, 0);
4577 tree int_42 = build_int_cst (integer_type_node, 42);
4578
4579 test_region_model_context ctxt;
808f4dfe
DM
4580 region_model_manager mgr;
4581 region_model model (&mgr);
757bf1df
DM
4582 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
4583 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
4584 model.get_rvalue (int_42, &ctxt));
4585 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
4586 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
757bf1df 4587
808f4dfe
DM
4588 /* A "(const int)42" will be a different tree from "(int)42)"... */
4589 tree const_int_type_node
4590 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
4591 tree const_int_42 = build_int_cst (const_int_type_node, 42);
4592 ASSERT_NE (int_42, const_int_42);
4593 /* It should have a different const_svalue. */
4594 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
4595 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
4596 ASSERT_NE (int_42_sval, const_int_42_sval);
4597 /* But they should compare as equal. */
4598 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
4599 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
757bf1df
DM
4600}
4601
808f4dfe
DM
4602/* Verify that each type gets its own singleton unknown_svalue within a
4603 region_model_manager, and that NULL_TREE gets its own singleton. */
757bf1df
DM
4604
4605static void
808f4dfe 4606test_unique_unknowns ()
757bf1df 4607{
808f4dfe
DM
4608 region_model_manager mgr;
4609 const svalue *unknown_int
4610 = mgr.get_or_create_unknown_svalue (integer_type_node);
4611 /* Repeated calls with the same type should get the same "unknown"
4612 svalue. */
4613 const svalue *unknown_int_2
4614 = mgr.get_or_create_unknown_svalue (integer_type_node);
4615 ASSERT_EQ (unknown_int, unknown_int_2);
757bf1df 4616
808f4dfe
DM
4617 /* Different types (or the NULL type) should have different
4618 unknown_svalues. */
4619 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
4620 ASSERT_NE (unknown_NULL_type, unknown_int);
757bf1df 4621
808f4dfe
DM
4622 /* Repeated calls with NULL for the type should get the same "unknown"
4623 svalue. */
4624 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
4625 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
757bf1df
DM
4626}
4627
808f4dfe 4628/* Verify that initial_svalue are handled as expected. */
757bf1df 4629
808f4dfe
DM
4630static void
4631test_initial_svalue_folding ()
757bf1df 4632{
808f4dfe
DM
4633 region_model_manager mgr;
4634 tree x = build_global_decl ("x", integer_type_node);
4635 tree y = build_global_decl ("y", integer_type_node);
757bf1df 4636
808f4dfe
DM
4637 test_region_model_context ctxt;
4638 region_model model (&mgr);
4639 const svalue *x_init = model.get_rvalue (x, &ctxt);
4640 const svalue *y_init = model.get_rvalue (y, &ctxt);
4641 ASSERT_NE (x_init, y_init);
4642 const region *x_reg = model.get_lvalue (x, &ctxt);
4643 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
757bf1df 4644
808f4dfe 4645}
757bf1df 4646
808f4dfe 4647/* Verify that unary ops are folded as expected. */
757bf1df
DM
4648
4649static void
808f4dfe 4650test_unaryop_svalue_folding ()
757bf1df 4651{
808f4dfe 4652 region_model_manager mgr;
757bf1df
DM
4653 tree x = build_global_decl ("x", integer_type_node);
4654 tree y = build_global_decl ("y", integer_type_node);
4655
808f4dfe
DM
4656 test_region_model_context ctxt;
4657 region_model model (&mgr);
4658 const svalue *x_init = model.get_rvalue (x, &ctxt);
4659 const svalue *y_init = model.get_rvalue (y, &ctxt);
4660 const region *x_reg = model.get_lvalue (x, &ctxt);
4661 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
4662
4663 /* "(int)x" -> "x". */
4664 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
4665
4666 /* "(void *)x" -> something other than "x". */
4667 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
4668
4669 /* "!(x == y)" -> "x != y". */
4670 ASSERT_EQ (mgr.get_or_create_unaryop
4671 (boolean_type_node, TRUTH_NOT_EXPR,
4672 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
4673 x_init, y_init)),
4674 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
4675 x_init, y_init));
4676 /* "!(x > y)" -> "x <= y". */
4677 ASSERT_EQ (mgr.get_or_create_unaryop
4678 (boolean_type_node, TRUTH_NOT_EXPR,
4679 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
4680 x_init, y_init)),
4681 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
4682 x_init, y_init));
4683}
4684
4685/* Verify that binops on constant svalues are folded. */
757bf1df 4686
808f4dfe
DM
4687static void
4688test_binop_svalue_folding ()
4689{
4690#define NUM_CSTS 10
4691 tree cst_int[NUM_CSTS];
4692 region_model_manager mgr;
4693 const svalue *cst_sval[NUM_CSTS];
4694 for (int i = 0; i < NUM_CSTS; i++)
4695 {
4696 cst_int[i] = build_int_cst (integer_type_node, i);
4697 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
4698 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
4699 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
4700 }
757bf1df 4701
808f4dfe
DM
4702 for (int i = 0; i < NUM_CSTS; i++)
4703 for (int j = 0; j < NUM_CSTS; j++)
4704 {
4705 if (i != j)
4706 ASSERT_NE (cst_sval[i], cst_sval[j]);
4707 if (i + j < NUM_CSTS)
4708 {
4709 const svalue *sum
4710 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4711 cst_sval[i], cst_sval[j]);
4712 ASSERT_EQ (sum, cst_sval[i + j]);
4713 }
4714 if (i - j >= 0)
4715 {
4716 const svalue *difference
4717 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
4718 cst_sval[i], cst_sval[j]);
4719 ASSERT_EQ (difference, cst_sval[i - j]);
4720 }
4721 if (i * j < NUM_CSTS)
4722 {
4723 const svalue *product
4724 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4725 cst_sval[i], cst_sval[j]);
4726 ASSERT_EQ (product, cst_sval[i * j]);
4727 }
4728 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
4729 cst_sval[i], cst_sval[j]);
4730 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
4731 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
4732 cst_sval[i], cst_sval[j]);
4733 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
4734 // etc
4735 }
757bf1df 4736
808f4dfe 4737 tree x = build_global_decl ("x", integer_type_node);
757bf1df 4738
808f4dfe
DM
4739 test_region_model_context ctxt;
4740 region_model model (&mgr);
4741 const svalue *x_init = model.get_rvalue (x, &ctxt);
4742
4743 /* PLUS_EXPR folding. */
4744 const svalue *x_init_plus_zero
4745 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4746 x_init, cst_sval[0]);
4747 ASSERT_EQ (x_init_plus_zero, x_init);
4748 const svalue *zero_plus_x_init
4749 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4750 cst_sval[0], x_init);
4751 ASSERT_EQ (zero_plus_x_init, x_init);
4752
4753 /* MULT_EXPR folding. */
4754 const svalue *x_init_times_zero
4755 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4756 x_init, cst_sval[0]);
4757 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
4758 const svalue *zero_times_x_init
4759 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4760 cst_sval[0], x_init);
4761 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
4762
4763 const svalue *x_init_times_one
4764 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4765 x_init, cst_sval[1]);
4766 ASSERT_EQ (x_init_times_one, x_init);
4767 const svalue *one_times_x_init
4768 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4769 cst_sval[1], x_init);
4770 ASSERT_EQ (one_times_x_init, x_init);
4771
4772 // etc
4773 // TODO: do we want to use the match-and-simplify DSL for this?
4774
4775 /* Verify that binops put any constants on the RHS. */
4776 const svalue *four_times_x_init
4777 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4778 cst_sval[4], x_init);
4779 const svalue *x_init_times_four
4780 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
4781 x_init, cst_sval[4]);
4782 ASSERT_EQ (four_times_x_init, x_init_times_four);
4783 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
4784 ASSERT_EQ (binop->get_op (), MULT_EXPR);
4785 ASSERT_EQ (binop->get_arg0 (), x_init);
4786 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
4787
4788 /* Verify that ((x + 1) + 1) == (x + 2). */
4789 const svalue *x_init_plus_one
4790 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4791 x_init, cst_sval[1]);
4792 const svalue *x_init_plus_two
4793 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4794 x_init, cst_sval[2]);
4795 const svalue *x_init_plus_one_plus_one
4796 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
4797 x_init_plus_one, cst_sval[1]);
4798 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
4f34f8cc
DM
4799
4800 /* Verify various binops on booleans. */
4801 {
4802 const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1);
4803 const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0);
4804 const svalue *sval_unknown
4805 = mgr.get_or_create_unknown_svalue (boolean_type_node);
4806 const placeholder_svalue sval_placeholder (boolean_type_node, "v");
4807 for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR})
4808 {
4809 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
4810 sval_true, sval_unknown),
4811 sval_true);
4812 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
4813 sval_false, sval_unknown),
4814 sval_unknown);
4815 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
4816 sval_false, &sval_placeholder),
4817 &sval_placeholder);
4818 }
4819 for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR})
4820 {
4821 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
4822 sval_false, sval_unknown),
4823 sval_false);
4824 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
4825 sval_true, sval_unknown),
4826 sval_unknown);
4827 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
4828 sval_true, &sval_placeholder),
4829 &sval_placeholder);
4830 }
4831 }
808f4dfe
DM
4832}
4833
4834/* Verify that sub_svalues are folded as expected. */
757bf1df 4835
808f4dfe
DM
4836static void
4837test_sub_svalue_folding ()
4838{
4839 coord_test ct;
4840 tree c = build_global_decl ("c", ct.m_coord_type);
4841 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4842 c, ct.m_x_field, NULL_TREE);
757bf1df 4843
808f4dfe
DM
4844 region_model_manager mgr;
4845 region_model model (&mgr);
4846 test_region_model_context ctxt;
4847 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
757bf1df 4848
808f4dfe
DM
4849 /* Verify that sub_svalue of "unknown" simply
4850 yields an unknown. */
757bf1df 4851
808f4dfe
DM
4852 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
4853 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
4854 unknown, c_x_reg);
4855 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
4856 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
757bf1df
DM
4857}
4858
808f4dfe 4859/* Test that region::descendent_of_p works as expected. */
757bf1df
DM
4860
4861static void
808f4dfe 4862test_descendent_of_p ()
757bf1df 4863{
808f4dfe
DM
4864 region_model_manager mgr;
4865 const region *stack = mgr.get_stack_region ();
4866 const region *heap = mgr.get_heap_region ();
4867 const region *code = mgr.get_code_region ();
4868 const region *globals = mgr.get_globals_region ();
757bf1df 4869
808f4dfe
DM
4870 /* descendent_of_p should return true when used on the region itself. */
4871 ASSERT_TRUE (stack->descendent_of_p (stack));
4872 ASSERT_FALSE (stack->descendent_of_p (heap));
4873 ASSERT_FALSE (stack->descendent_of_p (code));
4874 ASSERT_FALSE (stack->descendent_of_p (globals));
757bf1df 4875
808f4dfe
DM
4876 tree x = build_global_decl ("x", integer_type_node);
4877 const region *x_reg = mgr.get_region_for_global (x);
4878 ASSERT_TRUE (x_reg->descendent_of_p (globals));
757bf1df 4879
808f4dfe
DM
4880 /* A cast_region should be a descendent of the original region. */
4881 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
4882 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
757bf1df
DM
4883}
4884
391512ad
DM
4885/* Verify that bit_range_region works as expected. */
4886
4887static void
4888test_bit_range_regions ()
4889{
4890 tree x = build_global_decl ("x", integer_type_node);
4891 region_model_manager mgr;
4892 const region *x_reg = mgr.get_region_for_global (x);
4893 const region *byte0
4894 = mgr.get_bit_range (x_reg, char_type_node, bit_range (0, 8));
4895 const region *byte1
4896 = mgr.get_bit_range (x_reg, char_type_node, bit_range (8, 8));
4897 ASSERT_TRUE (byte0->descendent_of_p (x_reg));
4898 ASSERT_TRUE (byte1->descendent_of_p (x_reg));
4899 ASSERT_NE (byte0, byte1);
4900}
4901
757bf1df
DM
4902/* Verify that simple assignments work as expected. */
4903
4904static void
4905test_assignment ()
4906{
4907 tree int_0 = build_int_cst (integer_type_node, 0);
4908 tree x = build_global_decl ("x", integer_type_node);
4909 tree y = build_global_decl ("y", integer_type_node);
4910
4911 /* "x == 0", then use of y, then "y = 0;". */
808f4dfe
DM
4912 region_model_manager mgr;
4913 region_model model (&mgr);
757bf1df
DM
4914 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
4915 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
4916 model.set_value (model.get_lvalue (y, NULL),
4917 model.get_rvalue (int_0, NULL),
4918 NULL);
4919 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
4920 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
757bf1df
DM
4921}
4922
a96f1c38
DM
4923/* Verify that compound assignments work as expected. */
4924
4925static void
4926test_compound_assignment ()
4927{
4928 coord_test ct;
4929
4930 tree c = build_global_decl ("c", ct.m_coord_type);
4931 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4932 c, ct.m_x_field, NULL_TREE);
4933 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4934 c, ct.m_y_field, NULL_TREE);
4935 tree d = build_global_decl ("d", ct.m_coord_type);
4936 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
4937 d, ct.m_x_field, NULL_TREE);
4938 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
4939 d, ct.m_y_field, NULL_TREE);
4940
4941 tree int_17 = build_int_cst (integer_type_node, 17);
4942 tree int_m3 = build_int_cst (integer_type_node, -3);
4943
808f4dfe
DM
4944 region_model_manager mgr;
4945 region_model model (&mgr);
a96f1c38
DM
4946 model.set_value (c_x, int_17, NULL);
4947 model.set_value (c_y, int_m3, NULL);
4948
a96f1c38 4949 /* Copy c to d. */
13ad6d9f
DM
4950 const svalue *sval = model.get_rvalue (c, NULL);
4951 model.set_value (model.get_lvalue (d, NULL), sval, NULL);
4952
a96f1c38
DM
4953 /* Check that the fields have the same svalues. */
4954 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
4955 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
4956}
4957
757bf1df
DM
4958/* Verify the details of pushing and popping stack frames. */
4959
4960static void
4961test_stack_frames ()
4962{
4963 tree int_42 = build_int_cst (integer_type_node, 42);
4964 tree int_10 = build_int_cst (integer_type_node, 10);
4965 tree int_5 = build_int_cst (integer_type_node, 5);
4966 tree int_0 = build_int_cst (integer_type_node, 0);
4967
4968 auto_vec <tree> param_types;
4969 tree parent_fndecl = make_fndecl (integer_type_node,
4970 "parent_fn",
4971 param_types);
4972 allocate_struct_function (parent_fndecl, true);
4973
4974 tree child_fndecl = make_fndecl (integer_type_node,
4975 "child_fn",
4976 param_types);
4977 allocate_struct_function (child_fndecl, true);
4978
4979 /* "a" and "b" in the parent frame. */
4980 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4981 get_identifier ("a"),
4982 integer_type_node);
4983 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4984 get_identifier ("b"),
4985 integer_type_node);
4986 /* "x" and "y" in a child frame. */
4987 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4988 get_identifier ("x"),
4989 integer_type_node);
4990 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
4991 get_identifier ("y"),
4992 integer_type_node);
4993
4994 /* "p" global. */
4995 tree p = build_global_decl ("p", ptr_type_node);
4996
4997 /* "q" global. */
4998 tree q = build_global_decl ("q", ptr_type_node);
4999
808f4dfe 5000 region_model_manager mgr;
757bf1df 5001 test_region_model_context ctxt;
808f4dfe 5002 region_model model (&mgr);
757bf1df
DM
5003
5004 /* Push stack frame for "parent_fn". */
808f4dfe
DM
5005 const region *parent_frame_reg
5006 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl),
5007 NULL, &ctxt);
5008 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
5009 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
5010 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
5011 model.set_value (a_in_parent_reg,
5012 model.get_rvalue (int_42, &ctxt),
5013 &ctxt);
5014 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
5015
757bf1df
DM
5016 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
5017 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
5018 tristate (tristate::TS_TRUE));
5019
5020 /* Push stack frame for "child_fn". */
808f4dfe 5021 const region *child_frame_reg
757bf1df 5022 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
808f4dfe
DM
5023 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
5024 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
5025 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
5026 model.set_value (x_in_child_reg,
5027 model.get_rvalue (int_0, &ctxt),
5028 &ctxt);
5029 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
5030
757bf1df
DM
5031 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
5032 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
5033 tristate (tristate::TS_TRUE));
5034
5035 /* Point a global pointer at a local in the child frame: p = &x. */
808f4dfe
DM
5036 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
5037 model.set_value (p_in_globals_reg,
5038 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
757bf1df 5039 &ctxt);
808f4dfe 5040 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
757bf1df
DM
5041
5042 /* Point another global pointer at p: q = &p. */
808f4dfe
DM
5043 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
5044 model.set_value (q_in_globals_reg,
5045 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
757bf1df
DM
5046 &ctxt);
5047
808f4dfe
DM
5048 /* Test region::descendent_of_p. */
5049 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
5050 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
5051 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
757bf1df
DM
5052
5053 /* Pop the "child_fn" frame from the stack. */
808f4dfe
DM
5054 model.pop_frame (NULL, NULL, &ctxt);
5055 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
5056 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
757bf1df
DM
5057
5058 /* Verify that p (which was pointing at the local "x" in the popped
5059 frame) has been poisoned. */
33255ad3 5060 const svalue *new_p_sval = model.get_rvalue (p, NULL);
757bf1df
DM
5061 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
5062 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
5063 POISON_KIND_POPPED_STACK);
5064
5065 /* Verify that q still points to p, in spite of the region
5066 renumbering. */
808f4dfe 5067 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
757bf1df 5068 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
5932dd35 5069 ASSERT_EQ (new_q_sval->maybe_get_region (),
757bf1df
DM
5070 model.get_lvalue (p, &ctxt));
5071
5072 /* Verify that top of stack has been updated. */
808f4dfe 5073 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
757bf1df
DM
5074
5075 /* Verify locals in parent frame. */
5076 /* Verify "a" still has its value. */
808f4dfe 5077 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
757bf1df
DM
5078 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
5079 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
5080 int_42);
5081 /* Verify "b" still has its constraint. */
5082 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
5083 tristate (tristate::TS_TRUE));
5084}
5085
5086/* Verify that get_representative_path_var works as expected, that
808f4dfe 5087 we can map from regions to parms and back within a recursive call
757bf1df
DM
5088 stack. */
5089
5090static void
5091test_get_representative_path_var ()
5092{
5093 auto_vec <tree> param_types;
5094 tree fndecl = make_fndecl (integer_type_node,
5095 "factorial",
5096 param_types);
5097 allocate_struct_function (fndecl, true);
5098
5099 /* Parm "n". */
5100 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
5101 get_identifier ("n"),
5102 integer_type_node);
5103
808f4dfe
DM
5104 region_model_manager mgr;
5105 test_region_model_context ctxt;
5106 region_model model (&mgr);
757bf1df
DM
5107
5108 /* Push 5 stack frames for "factorial", each with a param */
808f4dfe
DM
5109 auto_vec<const region *> parm_regs;
5110 auto_vec<const svalue *> parm_svals;
757bf1df
DM
5111 for (int depth = 0; depth < 5; depth++)
5112 {
808f4dfe
DM
5113 const region *frame_n_reg
5114 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
5115 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
5116 parm_regs.safe_push (parm_n_reg);
757bf1df 5117
808f4dfe
DM
5118 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
5119 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
5120 parm_svals.safe_push (sval_n);
757bf1df
DM
5121 }
5122
5123 /* Verify that we can recognize that the regions are the parms,
5124 at every depth. */
5125 for (int depth = 0; depth < 5; depth++)
5126 {
808f4dfe
DM
5127 {
5128 svalue_set visited;
5129 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
5130 &visited),
5131 path_var (n, depth + 1));
5132 }
757bf1df
DM
5133 /* ...and that we can lookup lvalues for locals for all frames,
5134 not just the top. */
5135 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
808f4dfe 5136 parm_regs[depth]);
757bf1df 5137 /* ...and that we can locate the svalues. */
808f4dfe
DM
5138 {
5139 svalue_set visited;
5140 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
5141 &visited),
5142 path_var (n, depth + 1));
5143 }
757bf1df
DM
5144 }
5145}
5146
808f4dfe 5147/* Ensure that region_model::operator== works as expected. */
757bf1df
DM
5148
5149static void
808f4dfe 5150test_equality_1 ()
757bf1df 5151{
808f4dfe
DM
5152 tree int_42 = build_int_cst (integer_type_node, 42);
5153 tree int_17 = build_int_cst (integer_type_node, 17);
757bf1df 5154
808f4dfe
DM
5155/* Verify that "empty" region_model instances are equal to each other. */
5156 region_model_manager mgr;
5157 region_model model0 (&mgr);
5158 region_model model1 (&mgr);
757bf1df 5159 ASSERT_EQ (model0, model1);
808f4dfe
DM
5160
5161 /* Verify that setting state in model1 makes the models non-equal. */
5162 tree x = build_global_decl ("x", integer_type_node);
5163 model0.set_value (x, int_42, NULL);
5164 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
5165 ASSERT_NE (model0, model1);
5166
5167 /* Verify the copy-ctor. */
5168 region_model model2 (model0);
5169 ASSERT_EQ (model0, model2);
5170 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
5171 ASSERT_NE (model1, model2);
5172
5173 /* Verify that models obtained from copy-ctor are independently editable
5174 w/o affecting the original model. */
5175 model2.set_value (x, int_17, NULL);
5176 ASSERT_NE (model0, model2);
5177 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
5178 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
757bf1df
DM
5179}
5180
5181/* Verify that region models for
5182 x = 42; y = 113;
5183 and
5184 y = 113; x = 42;
808f4dfe 5185 are equal. */
757bf1df
DM
5186
5187static void
5188test_canonicalization_2 ()
5189{
5190 tree int_42 = build_int_cst (integer_type_node, 42);
5191 tree int_113 = build_int_cst (integer_type_node, 113);
5192 tree x = build_global_decl ("x", integer_type_node);
5193 tree y = build_global_decl ("y", integer_type_node);
5194
808f4dfe
DM
5195 region_model_manager mgr;
5196 region_model model0 (&mgr);
757bf1df
DM
5197 model0.set_value (model0.get_lvalue (x, NULL),
5198 model0.get_rvalue (int_42, NULL),
5199 NULL);
5200 model0.set_value (model0.get_lvalue (y, NULL),
5201 model0.get_rvalue (int_113, NULL),
5202 NULL);
5203
808f4dfe 5204 region_model model1 (&mgr);
757bf1df
DM
5205 model1.set_value (model1.get_lvalue (y, NULL),
5206 model1.get_rvalue (int_113, NULL),
5207 NULL);
5208 model1.set_value (model1.get_lvalue (x, NULL),
5209 model1.get_rvalue (int_42, NULL),
5210 NULL);
5211
757bf1df
DM
5212 ASSERT_EQ (model0, model1);
5213}
5214
5215/* Verify that constraints for
5216 x > 3 && y > 42
5217 and
5218 y > 42 && x > 3
5219 are equal after canonicalization. */
5220
5221static void
5222test_canonicalization_3 ()
5223{
5224 tree int_3 = build_int_cst (integer_type_node, 3);
5225 tree int_42 = build_int_cst (integer_type_node, 42);
5226 tree x = build_global_decl ("x", integer_type_node);
5227 tree y = build_global_decl ("y", integer_type_node);
5228
808f4dfe
DM
5229 region_model_manager mgr;
5230 region_model model0 (&mgr);
757bf1df
DM
5231 model0.add_constraint (x, GT_EXPR, int_3, NULL);
5232 model0.add_constraint (y, GT_EXPR, int_42, NULL);
5233
808f4dfe 5234 region_model model1 (&mgr);
757bf1df
DM
5235 model1.add_constraint (y, GT_EXPR, int_42, NULL);
5236 model1.add_constraint (x, GT_EXPR, int_3, NULL);
5237
808f4dfe
DM
5238 model0.canonicalize ();
5239 model1.canonicalize ();
757bf1df
DM
5240 ASSERT_EQ (model0, model1);
5241}
5242
8c08c983
DM
5243/* Verify that we can canonicalize a model containing NaN and other real
5244 constants. */
5245
5246static void
5247test_canonicalization_4 ()
5248{
5249 auto_vec<tree> csts;
5250 append_interesting_constants (&csts);
5251
808f4dfe
DM
5252 region_model_manager mgr;
5253 region_model model (&mgr);
8c08c983 5254
3f207ab3 5255 for (tree cst : csts)
8c08c983
DM
5256 model.get_rvalue (cst, NULL);
5257
808f4dfe 5258 model.canonicalize ();
8c08c983
DM
5259}
5260
757bf1df
DM
5261/* Assert that if we have two region_model instances
5262 with values VAL_A and VAL_B for EXPR that they are
5263 mergable. Write the merged model to *OUT_MERGED_MODEL,
5264 and the merged svalue ptr to *OUT_MERGED_SVALUE.
5265 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
5266 for that region_model. */
5267
5268static void
5269assert_region_models_merge (tree expr, tree val_a, tree val_b,
808f4dfe
DM
5270 region_model *out_merged_model,
5271 const svalue **out_merged_svalue)
757bf1df 5272{
808f4dfe 5273 program_point point (program_point::origin ());
757bf1df 5274 test_region_model_context ctxt;
808f4dfe
DM
5275 region_model_manager *mgr = out_merged_model->get_manager ();
5276 region_model model0 (mgr);
5277 region_model model1 (mgr);
757bf1df
DM
5278 if (val_a)
5279 model0.set_value (model0.get_lvalue (expr, &ctxt),
5280 model0.get_rvalue (val_a, &ctxt),
5281 &ctxt);
5282 if (val_b)
5283 model1.set_value (model1.get_lvalue (expr, &ctxt),
5284 model1.get_rvalue (val_b, &ctxt),
5285 &ctxt);
5286
5287 /* They should be mergeable. */
808f4dfe
DM
5288 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
5289 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
757bf1df
DM
5290}
5291
5292/* Verify that we can merge region_model instances. */
5293
5294static void
5295test_state_merging ()
5296{
5297 tree int_42 = build_int_cst (integer_type_node, 42);
5298 tree int_113 = build_int_cst (integer_type_node, 113);
5299 tree x = build_global_decl ("x", integer_type_node);
5300 tree y = build_global_decl ("y", integer_type_node);
5301 tree z = build_global_decl ("z", integer_type_node);
5302 tree p = build_global_decl ("p", ptr_type_node);
5303
5304 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
5305 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
5306
5307 auto_vec <tree> param_types;
5308 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
5309 allocate_struct_function (test_fndecl, true);
5310
5311 /* Param "a". */
5312 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
5313 get_identifier ("a"),
5314 integer_type_node);
5315 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
5316
455f58ec
DM
5317 /* Param "q", a pointer. */
5318 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
5319 get_identifier ("q"),
5320 ptr_type_node);
5321
808f4dfe
DM
5322 program_point point (program_point::origin ());
5323 region_model_manager mgr;
5324
757bf1df 5325 {
808f4dfe
DM
5326 region_model model0 (&mgr);
5327 region_model model1 (&mgr);
5328 region_model merged (&mgr);
757bf1df 5329 /* Verify empty models can be merged. */
808f4dfe 5330 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
5331 ASSERT_EQ (model0, merged);
5332 }
5333
5334 /* Verify that we can merge two contradictory constraints on the
5335 value for a global. */
5336 /* TODO: verify that the merged model doesn't have a value for
5337 the global */
5338 {
808f4dfe
DM
5339 region_model model0 (&mgr);
5340 region_model model1 (&mgr);
5341 region_model merged (&mgr);
757bf1df
DM
5342 test_region_model_context ctxt;
5343 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
5344 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe 5345 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
5346 ASSERT_NE (model0, merged);
5347 ASSERT_NE (model1, merged);
5348 }
5349
5350 /* Verify handling of a PARM_DECL. */
5351 {
5352 test_region_model_context ctxt;
808f4dfe
DM
5353 region_model model0 (&mgr);
5354 region_model model1 (&mgr);
757bf1df
DM
5355 ASSERT_EQ (model0.get_stack_depth (), 0);
5356 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
5357 ASSERT_EQ (model0.get_stack_depth (), 1);
757bf1df
DM
5358 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
5359
808f4dfe
DM
5360 placeholder_svalue test_sval (integer_type_node, "test sval");
5361 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
5362 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
757bf1df
DM
5363 ASSERT_EQ (model0, model1);
5364
757bf1df 5365 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
5366 region_model merged (&mgr);
5367 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 5368 ASSERT_EQ (model0, merged);
808f4dfe
DM
5369 /* In particular, "a" should have the placeholder value. */
5370 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
757bf1df
DM
5371 }
5372
5373 /* Verify handling of a global. */
5374 {
5375 test_region_model_context ctxt;
808f4dfe
DM
5376 region_model model0 (&mgr);
5377 region_model model1 (&mgr);
757bf1df 5378
808f4dfe
DM
5379 placeholder_svalue test_sval (integer_type_node, "test sval");
5380 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
5381 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
5382 ASSERT_EQ (model0, model1);
757bf1df
DM
5383
5384 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
5385 region_model merged (&mgr);
5386 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 5387 ASSERT_EQ (model0, merged);
808f4dfe
DM
5388 /* In particular, "x" should have the placeholder value. */
5389 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
757bf1df
DM
5390 }
5391
5392 /* Use global-handling to verify various combinations of values. */
5393
5394 /* Two equal constant values. */
5395 {
808f4dfe
DM
5396 region_model merged (&mgr);
5397 const svalue *merged_x_sval;
757bf1df
DM
5398 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
5399
5400 /* In particular, there should be a constant value for "x". */
5401 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
5402 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
5403 int_42);
5404 }
5405
5406 /* Two non-equal constant values. */
5407 {
808f4dfe
DM
5408 region_model merged (&mgr);
5409 const svalue *merged_x_sval;
757bf1df
DM
5410 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
5411
808f4dfe
DM
5412 /* In particular, there should be a "widening" value for "x". */
5413 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
757bf1df
DM
5414 }
5415
808f4dfe 5416 /* Initial and constant. */
757bf1df 5417 {
808f4dfe
DM
5418 region_model merged (&mgr);
5419 const svalue *merged_x_sval;
757bf1df
DM
5420 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
5421
5422 /* In particular, there should be an unknown value for "x". */
5423 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
5424 }
5425
808f4dfe 5426 /* Constant and initial. */
757bf1df 5427 {
808f4dfe
DM
5428 region_model merged (&mgr);
5429 const svalue *merged_x_sval;
757bf1df
DM
5430 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
5431
5432 /* In particular, there should be an unknown value for "x". */
5433 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
5434 }
5435
5436 /* Unknown and constant. */
5437 // TODO
5438
5439 /* Pointers: NULL and NULL. */
5440 // TODO
5441
5442 /* Pointers: NULL and non-NULL. */
5443 // TODO
5444
5445 /* Pointers: non-NULL and non-NULL: ptr to a local. */
5446 {
808f4dfe 5447 region_model model0 (&mgr);
757bf1df 5448 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
757bf1df
DM
5449 model0.set_value (model0.get_lvalue (p, NULL),
5450 model0.get_rvalue (addr_of_a, NULL), NULL);
5451
5452 region_model model1 (model0);
5453 ASSERT_EQ (model0, model1);
5454
5455 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
5456 region_model merged (&mgr);
5457 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
5458 ASSERT_EQ (model0, merged);
5459 }
5460
5461 /* Pointers: non-NULL and non-NULL: ptr to a global. */
5462 {
808f4dfe 5463 region_model merged (&mgr);
757bf1df 5464 /* p == &y in both input models. */
808f4dfe 5465 const svalue *merged_p_sval;
757bf1df
DM
5466 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
5467 &merged_p_sval);
5468
5469 /* We should get p == &y in the merged model. */
5470 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
808f4dfe
DM
5471 const region_svalue *merged_p_ptr
5472 = merged_p_sval->dyn_cast_region_svalue ();
5473 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
5474 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
757bf1df
DM
5475 }
5476
5477 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
5478 {
808f4dfe
DM
5479 region_model merged (&mgr);
5480 /* x == &y vs x == &z in the input models; these are actually casts
5481 of the ptrs to "int". */
5482 const svalue *merged_x_sval;
5483 // TODO:
757bf1df
DM
5484 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
5485 &merged_x_sval);
5486
5487 /* We should get x == unknown in the merged model. */
5488 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
5489 }
5490
5491 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
5492 {
5493 test_region_model_context ctxt;
808f4dfe 5494 region_model model0 (&mgr);
9a2c9579 5495 tree size = build_int_cst (size_type_node, 1024);
808f4dfe 5496 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
b9365b93
DM
5497 const region *new_reg
5498 = model0.create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe 5499 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
757bf1df 5500 model0.set_value (model0.get_lvalue (p, &ctxt),
808f4dfe 5501 ptr_sval, &ctxt);
757bf1df
DM
5502
5503 region_model model1 (model0);
5504
5505 ASSERT_EQ (model0, model1);
5506
808f4dfe
DM
5507 region_model merged (&mgr);
5508 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 5509
808f4dfe 5510 /* The merged model ought to be identical. */
757bf1df
DM
5511 ASSERT_EQ (model0, merged);
5512 }
5513
808f4dfe
DM
5514 /* Two regions sharing the same placeholder svalue should continue sharing
5515 it after self-merger. */
757bf1df
DM
5516 {
5517 test_region_model_context ctxt;
808f4dfe
DM
5518 region_model model0 (&mgr);
5519 placeholder_svalue placeholder_sval (integer_type_node, "test");
5520 model0.set_value (model0.get_lvalue (x, &ctxt),
5521 &placeholder_sval, &ctxt);
5522 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
757bf1df
DM
5523 region_model model1 (model0);
5524
5525 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
5526 region_model merged (&mgr);
5527 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
5528 ASSERT_EQ (model0, merged);
5529
5530 /* In particular, we should have x == y. */
5531 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
5532 tristate (tristate::TS_TRUE));
5533 }
5534
757bf1df 5535 {
808f4dfe
DM
5536 region_model model0 (&mgr);
5537 region_model model1 (&mgr);
757bf1df
DM
5538 test_region_model_context ctxt;
5539 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
5540 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
808f4dfe
DM
5541 region_model merged (&mgr);
5542 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
5543 }
5544
5545 {
808f4dfe
DM
5546 region_model model0 (&mgr);
5547 region_model model1 (&mgr);
757bf1df
DM
5548 test_region_model_context ctxt;
5549 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
5550 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
5551 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe
DM
5552 region_model merged (&mgr);
5553 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 5554 }
757bf1df
DM
5555
5556 // TODO: what can't we merge? need at least one such test
5557
5558 /* TODO: various things
5559 - heap regions
5560 - value merging:
5561 - every combination, but in particular
808f4dfe 5562 - pairs of regions
757bf1df
DM
5563 */
5564
5565 /* Views. */
5566 {
5567 test_region_model_context ctxt;
808f4dfe 5568 region_model model0 (&mgr);
757bf1df 5569
808f4dfe
DM
5570 const region *x_reg = model0.get_lvalue (x, &ctxt);
5571 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
757bf1df
DM
5572 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
5573
5574 region_model model1 (model0);
5575 ASSERT_EQ (model1, model0);
5576
5577 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
5578 region_model merged (&mgr);
5579 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 5580 }
455f58ec
DM
5581
5582 /* Verify that we can merge a model in which a local in an older stack
5583 frame points to a local in a more recent stack frame. */
5584 {
808f4dfe 5585 region_model model0 (&mgr);
455f58ec 5586 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
808f4dfe 5587 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
455f58ec
DM
5588
5589 /* Push a second frame. */
808f4dfe 5590 const region *reg_2nd_frame
455f58ec
DM
5591 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
5592
5593 /* Have a pointer in the older frame point to a local in the
5594 more recent frame. */
808f4dfe
DM
5595 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
5596 model0.set_value (q_in_first_frame, sval_ptr, NULL);
455f58ec
DM
5597
5598 /* Verify that it's pointing at the newer frame. */
5932dd35 5599 const region *reg_pointee = sval_ptr->maybe_get_region ();
808f4dfe 5600 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
455f58ec 5601
808f4dfe 5602 model0.canonicalize ();
455f58ec
DM
5603
5604 region_model model1 (model0);
5605 ASSERT_EQ (model0, model1);
5606
5607 /* They should be mergeable, and the result should be the same
5608 (after canonicalization, at least). */
808f4dfe
DM
5609 region_model merged (&mgr);
5610 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
5611 merged.canonicalize ();
455f58ec
DM
5612 ASSERT_EQ (model0, merged);
5613 }
5614
5615 /* Verify that we can merge a model in which a local points to a global. */
5616 {
808f4dfe 5617 region_model model0 (&mgr);
455f58ec
DM
5618 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
5619 model0.set_value (model0.get_lvalue (q, NULL),
5620 model0.get_rvalue (addr_of_y, NULL), NULL);
5621
455f58ec
DM
5622 region_model model1 (model0);
5623 ASSERT_EQ (model0, model1);
5624
5625 /* They should be mergeable, and the result should be the same
5626 (after canonicalization, at least). */
808f4dfe
DM
5627 region_model merged (&mgr);
5628 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
455f58ec
DM
5629 ASSERT_EQ (model0, merged);
5630 }
757bf1df
DM
5631}
5632
5633/* Verify that constraints are correctly merged when merging region_model
5634 instances. */
5635
5636static void
5637test_constraint_merging ()
5638{
5639 tree int_0 = build_int_cst (integer_type_node, 0);
5640 tree int_5 = build_int_cst (integer_type_node, 5);
5641 tree x = build_global_decl ("x", integer_type_node);
5642 tree y = build_global_decl ("y", integer_type_node);
5643 tree z = build_global_decl ("z", integer_type_node);
5644 tree n = build_global_decl ("n", integer_type_node);
5645
808f4dfe 5646 region_model_manager mgr;
757bf1df
DM
5647 test_region_model_context ctxt;
5648
5649 /* model0: 0 <= (x == y) < n. */
808f4dfe 5650 region_model model0 (&mgr);
757bf1df
DM
5651 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
5652 model0.add_constraint (x, GE_EXPR, int_0, NULL);
5653 model0.add_constraint (x, LT_EXPR, n, NULL);
5654
5655 /* model1: z != 5 && (0 <= x < n). */
808f4dfe 5656 region_model model1 (&mgr);
757bf1df
DM
5657 model1.add_constraint (z, NE_EXPR, int_5, NULL);
5658 model1.add_constraint (x, GE_EXPR, int_0, NULL);
5659 model1.add_constraint (x, LT_EXPR, n, NULL);
5660
5661 /* They should be mergeable; the merged constraints should
5662 be: (0 <= x < n). */
808f4dfe
DM
5663 program_point point (program_point::origin ());
5664 region_model merged (&mgr);
5665 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
5666
5667 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
5668 tristate (tristate::TS_TRUE));
5669 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
5670 tristate (tristate::TS_TRUE));
5671
5672 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
5673 tristate (tristate::TS_UNKNOWN));
5674 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
5675 tristate (tristate::TS_UNKNOWN));
5676}
5677
808f4dfe
DM
5678/* Verify that widening_svalue::eval_condition_without_cm works as
5679 expected. */
5680
5681static void
5682test_widening_constraints ()
5683{
5684 program_point point (program_point::origin ());
5685 tree int_0 = build_int_cst (integer_type_node, 0);
5686 tree int_m1 = build_int_cst (integer_type_node, -1);
5687 tree int_1 = build_int_cst (integer_type_node, 1);
5688 tree int_256 = build_int_cst (integer_type_node, 256);
5689 region_model_manager mgr;
5690 test_region_model_context ctxt;
5691 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
5692 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
5693 const svalue *w_zero_then_one_sval
5694 = mgr.get_or_create_widening_svalue (integer_type_node, point,
5695 int_0_sval, int_1_sval);
5696 const widening_svalue *w_zero_then_one
5697 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
5698 ASSERT_EQ (w_zero_then_one->get_direction (),
5699 widening_svalue::DIR_ASCENDING);
5700 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
5701 tristate::TS_FALSE);
5702 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
5703 tristate::TS_FALSE);
5704 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
5705 tristate::TS_UNKNOWN);
5706 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
5707 tristate::TS_UNKNOWN);
5708
5709 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
5710 tristate::TS_FALSE);
5711 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
5712 tristate::TS_UNKNOWN);
5713 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
5714 tristate::TS_UNKNOWN);
5715 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
5716 tristate::TS_UNKNOWN);
5717
5718 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
5719 tristate::TS_TRUE);
5720 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
5721 tristate::TS_UNKNOWN);
5722 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
5723 tristate::TS_UNKNOWN);
5724 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
5725 tristate::TS_UNKNOWN);
5726
5727 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
5728 tristate::TS_TRUE);
5729 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
5730 tristate::TS_TRUE);
5731 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
5732 tristate::TS_UNKNOWN);
5733 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
5734 tristate::TS_UNKNOWN);
5735
5736 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
5737 tristate::TS_FALSE);
5738 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
5739 tristate::TS_UNKNOWN);
5740 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
5741 tristate::TS_UNKNOWN);
5742 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
5743 tristate::TS_UNKNOWN);
5744
5745 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
5746 tristate::TS_TRUE);
5747 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
5748 tristate::TS_UNKNOWN);
5749 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
5750 tristate::TS_UNKNOWN);
5751 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
5752 tristate::TS_UNKNOWN);
5753}
5754
5755/* Verify merging constraints for states simulating successive iterations
5756 of a loop.
5757 Simulate:
5758 for (i = 0; i < 256; i++)
5759 [...body...]
5760 i.e. this gimple:.
5761 i_15 = 0;
5762 goto <bb 4>;
5763
5764 <bb 4> :
5765 i_11 = PHI <i_15(2), i_23(3)>
5766 if (i_11 <= 255)
5767 goto <bb 3>;
5768 else
5769 goto [AFTER LOOP]
5770
5771 <bb 3> :
5772 [LOOP BODY]
5773 i_23 = i_11 + 1;
5774
5775 and thus these ops (and resultant states):
5776 i_11 = PHI()
5777 {i_11: 0}
5778 add_constraint (i_11 <= 255) [for the true edge]
5779 {i_11: 0} [constraint was a no-op]
5780 i_23 = i_11 + 1;
5781 {i_22: 1}
5782 i_11 = PHI()
5783 {i_11: WIDENED (at phi, 0, 1)}
5784 add_constraint (i_11 <= 255) [for the true edge]
5785 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
5786 i_23 = i_11 + 1;
5787 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
5788 i_11 = PHI(); merge with state at phi above
5789 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
5790 [changing meaning of "WIDENED" here]
5791 if (i_11 <= 255)
5792 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
5793 F: {i_11: 256}
5794 */
5795
5796static void
5797test_iteration_1 ()
5798{
5799 program_point point (program_point::origin ());
5800
5801 tree int_0 = build_int_cst (integer_type_node, 0);
5802 tree int_1 = build_int_cst (integer_type_node, 1);
5803 tree int_256 = build_int_cst (integer_type_node, 256);
5804 tree int_257 = build_int_cst (integer_type_node, 257);
5805 tree i = build_global_decl ("i", integer_type_node);
5806
5807 region_model_manager mgr;
5808 test_region_model_context ctxt;
5809
5810 /* model0: i: 0. */
5811 region_model model0 (&mgr);
5812 model0.set_value (i, int_0, &ctxt);
5813
5814 /* model1: i: 1. */
5815 region_model model1 (&mgr);
5816 model1.set_value (i, int_1, &ctxt);
5817
5818 /* Should merge "i" to a widened value. */
5819 region_model model2 (&mgr);
5820 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
5821 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
5822 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
5823 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
5824 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
5825
5826 /* Add constraint: i < 256 */
5827 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
5828 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
5829 tristate (tristate::TS_TRUE));
5830 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
5831 tristate (tristate::TS_TRUE));
5832
5833 /* Try merging with the initial state. */
5834 region_model model3 (&mgr);
5835 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
5836 /* Merging the merged value with the initial value should be idempotent,
5837 so that the analysis converges. */
5838 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
5839 /* Merger of 0 and a widening value with constraint < CST
5840 should retain the constraint, even though it was implicit
5841 for the 0 case. */
5842 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
5843 tristate (tristate::TS_TRUE));
5844 /* ...and we should have equality: the analysis should have converged. */
5845 ASSERT_EQ (model3, model2);
5846
5847 /* "i_23 = i_11 + 1;" */
5848 region_model model4 (model3);
5849 ASSERT_EQ (model4, model2);
5850 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
5851 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
5852 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
5853
5854 /* Try merging with the "i: 1" state. */
5855 region_model model5 (&mgr);
5856 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
5857 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
5858 ASSERT_EQ (model5, model4);
5859
5860 /* "i_11 = PHI();" merge with state at phi above.
5861 For i, we should have a merger of WIDENING with WIDENING + 1,
5862 and this should be WIDENING again. */
5863 region_model model6 (&mgr);
5864 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
5865 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
5866 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
5867
5868 ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257);
5869}
5870
6969ac30
DM
5871/* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
5872 all cast pointers to that region are also known to be non-NULL. */
5873
5874static void
5875test_malloc_constraints ()
5876{
808f4dfe
DM
5877 region_model_manager mgr;
5878 region_model model (&mgr);
6969ac30
DM
5879 tree p = build_global_decl ("p", ptr_type_node);
5880 tree char_star = build_pointer_type (char_type_node);
5881 tree q = build_global_decl ("q", char_star);
5882 tree null_ptr = build_int_cst (ptr_type_node, 0);
5883
808f4dfe 5884 const svalue *size_in_bytes
9a2c9579 5885 = mgr.get_or_create_unknown_svalue (size_type_node);
b9365b93 5886 const region *reg = model.create_region_for_heap_alloc (size_in_bytes, NULL);
808f4dfe
DM
5887 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
5888 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
6969ac30
DM
5889 model.set_value (q, p, NULL);
5890
6969ac30
DM
5891 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
5892 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
5893 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
5894 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
5895
5896 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
5897
6969ac30
DM
5898 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
5899 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
5900 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
5901 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
5902}
5903
808f4dfe
DM
5904/* Smoketest of getting and setting the value of a variable. */
5905
5906static void
5907test_var ()
5908{
5909 /* "int i;" */
5910 tree i = build_global_decl ("i", integer_type_node);
5911
5912 tree int_17 = build_int_cst (integer_type_node, 17);
5913 tree int_m3 = build_int_cst (integer_type_node, -3);
5914
5915 region_model_manager mgr;
5916 region_model model (&mgr);
5917
5918 const region *i_reg = model.get_lvalue (i, NULL);
5919 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
5920
5921 /* Reading "i" should give a symbolic "initial value". */
5922 const svalue *sval_init = model.get_rvalue (i, NULL);
5923 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
5924 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
5925 /* ..and doing it again should give the same "initial value". */
5926 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
5927
5928 /* "i = 17;". */
5929 model.set_value (i, int_17, NULL);
5930 ASSERT_EQ (model.get_rvalue (i, NULL),
5931 model.get_rvalue (int_17, NULL));
5932
5933 /* "i = -3;". */
5934 model.set_value (i, int_m3, NULL);
5935 ASSERT_EQ (model.get_rvalue (i, NULL),
5936 model.get_rvalue (int_m3, NULL));
5937
5938 /* Verify get_offset for "i". */
5939 {
5940 region_offset offset = i_reg->get_offset ();
5941 ASSERT_EQ (offset.get_base_region (), i_reg);
5942 ASSERT_EQ (offset.get_bit_offset (), 0);
5943 }
5944}
5945
5946static void
5947test_array_2 ()
5948{
5949 /* "int arr[10];" */
5950 tree tlen = size_int (10);
5951 tree arr_type
5952 = build_array_type (integer_type_node, build_index_type (tlen));
5953 tree arr = build_global_decl ("arr", arr_type);
5954
5955 /* "int i;" */
5956 tree i = build_global_decl ("i", integer_type_node);
5957
5958 tree int_0 = build_int_cst (integer_type_node, 0);
5959 tree int_1 = build_int_cst (integer_type_node, 1);
5960
5961 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
5962 arr, int_0, NULL_TREE, NULL_TREE);
5963 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
5964 arr, int_1, NULL_TREE, NULL_TREE);
5965 tree arr_i = build4 (ARRAY_REF, integer_type_node,
5966 arr, i, NULL_TREE, NULL_TREE);
5967
5968 tree int_17 = build_int_cst (integer_type_node, 17);
5969 tree int_42 = build_int_cst (integer_type_node, 42);
5970 tree int_m3 = build_int_cst (integer_type_node, -3);
5971
5972 region_model_manager mgr;
5973 region_model model (&mgr);
5974 /* "arr[0] = 17;". */
5975 model.set_value (arr_0, int_17, NULL);
5976 /* "arr[1] = -3;". */
5977 model.set_value (arr_1, int_m3, NULL);
5978
5979 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
5980 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
5981
5982 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
5983 model.set_value (arr_1, int_42, NULL);
5984 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
5985
5986 /* Verify get_offset for "arr[0]". */
5987 {
5988 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
5989 region_offset offset = arr_0_reg->get_offset ();
5990 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
5991 ASSERT_EQ (offset.get_bit_offset (), 0);
5992 }
5993
5994 /* Verify get_offset for "arr[1]". */
5995 {
5996 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
5997 region_offset offset = arr_1_reg->get_offset ();
5998 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
5999 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
6000 }
6001
6002 /* "arr[i] = i;" - this should remove the earlier bindings. */
6003 model.set_value (arr_i, i, NULL);
6004 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
6005 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
6006
6007 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
6008 model.set_value (arr_0, int_17, NULL);
6009 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
6010 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
6011}
6012
6013/* Smoketest of dereferencing a pointer via MEM_REF. */
6014
6015static void
6016test_mem_ref ()
6017{
6018 /*
6019 x = 17;
6020 p = &x;
6021 *p;
6022 */
6023 tree x = build_global_decl ("x", integer_type_node);
6024 tree int_star = build_pointer_type (integer_type_node);
6025 tree p = build_global_decl ("p", int_star);
6026
6027 tree int_17 = build_int_cst (integer_type_node, 17);
6028 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
6029 tree offset_0 = build_int_cst (integer_type_node, 0);
6030 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
6031
6032 region_model_manager mgr;
6033 region_model model (&mgr);
6034
6035 /* "x = 17;". */
6036 model.set_value (x, int_17, NULL);
6037
6038 /* "p = &x;". */
6039 model.set_value (p, addr_of_x, NULL);
6040
6041 const svalue *sval = model.get_rvalue (star_p, NULL);
6042 ASSERT_EQ (sval->maybe_get_constant (), int_17);
6043}
6044
6045/* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
6046 Analogous to this code:
6047 void test_6 (int a[10])
6048 {
6049 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
6050 a[3] = 42;
6051 __analyzer_eval (a[3] == 42); [should be TRUE]
6052 }
6053 from data-model-1.c, which looks like this at the gimple level:
6054 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
6055 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
6056 int _2 = *_1; # MEM_REF
6057 _Bool _3 = _2 == 42;
6058 int _4 = (int) _3;
6059 __analyzer_eval (_4);
6060
6061 # a[3] = 42;
6062 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
6063 *_5 = 42; # MEM_REF
6064
6065 # __analyzer_eval (a[3] == 42); [should be TRUE]
6066 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
6067 int _7 = *_6; # MEM_REF
6068 _Bool _8 = _7 == 42;
6069 int _9 = (int) _8;
6070 __analyzer_eval (_9); */
6071
6072static void
6073test_POINTER_PLUS_EXPR_then_MEM_REF ()
6074{
6075 tree int_star = build_pointer_type (integer_type_node);
6076 tree a = build_global_decl ("a", int_star);
6077 tree offset_12 = build_int_cst (size_type_node, 12);
6078 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
6079 tree offset_0 = build_int_cst (integer_type_node, 0);
6080 tree mem_ref = build2 (MEM_REF, integer_type_node,
6081 pointer_plus_expr, offset_0);
6082 region_model_manager mgr;
6083 region_model m (&mgr);
6084
6085 tree int_42 = build_int_cst (integer_type_node, 42);
6086 m.set_value (mem_ref, int_42, NULL);
6087 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
6088}
6089
6090/* Verify that malloc works. */
6091
6092static void
6093test_malloc ()
6094{
6095 tree int_star = build_pointer_type (integer_type_node);
6096 tree p = build_global_decl ("p", int_star);
6097 tree n = build_global_decl ("n", integer_type_node);
6098 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
6099 n, build_int_cst (size_type_node, 4));
6100
6101 region_model_manager mgr;
6102 test_region_model_context ctxt;
6103 region_model model (&mgr);
6104
6105 /* "p = malloc (n * 4);". */
6106 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
b9365b93 6107 const region *reg = model.create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe
DM
6108 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
6109 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 6110 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
6111}
6112
6113/* Verify that alloca works. */
6114
6115static void
6116test_alloca ()
6117{
6118 auto_vec <tree> param_types;
6119 tree fndecl = make_fndecl (integer_type_node,
6120 "test_fn",
6121 param_types);
6122 allocate_struct_function (fndecl, true);
6123
6124
6125 tree int_star = build_pointer_type (integer_type_node);
6126 tree p = build_global_decl ("p", int_star);
6127 tree n = build_global_decl ("n", integer_type_node);
6128 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
6129 n, build_int_cst (size_type_node, 4));
6130
6131 region_model_manager mgr;
6132 test_region_model_context ctxt;
6133 region_model model (&mgr);
6134
6135 /* Push stack frame. */
6136 const region *frame_reg
6137 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl),
6138 NULL, &ctxt);
6139 /* "p = alloca (n * 4);". */
6140 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
b9365b93 6141 const region *reg = model.create_region_for_alloca (size_sval, &ctxt);
808f4dfe
DM
6142 ASSERT_EQ (reg->get_parent_region (), frame_reg);
6143 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
6144 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 6145 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
6146
6147 /* Verify that the pointers to the alloca region are replaced by
6148 poisoned values when the frame is popped. */
6149 model.pop_frame (NULL, NULL, &ctxt);
33255ad3 6150 ASSERT_EQ (model.get_rvalue (p, NULL)->get_kind (), SK_POISONED);
808f4dfe
DM
6151}
6152
71fc4655
DM
6153/* Verify that svalue::involves_p works. */
6154
6155static void
6156test_involves_p ()
6157{
6158 region_model_manager mgr;
6159 tree int_star = build_pointer_type (integer_type_node);
6160 tree p = build_global_decl ("p", int_star);
6161 tree q = build_global_decl ("q", int_star);
6162
6163 test_region_model_context ctxt;
6164 region_model model (&mgr);
6165 const svalue *p_init = model.get_rvalue (p, &ctxt);
6166 const svalue *q_init = model.get_rvalue (q, &ctxt);
6167
6168 ASSERT_TRUE (p_init->involves_p (p_init));
6169 ASSERT_FALSE (p_init->involves_p (q_init));
6170
6171 const region *star_p_reg = mgr.get_symbolic_region (p_init);
6172 const region *star_q_reg = mgr.get_symbolic_region (q_init);
6173
6174 const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
6175 const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
6176
6177 ASSERT_TRUE (init_star_p->involves_p (p_init));
6178 ASSERT_FALSE (p_init->involves_p (init_star_p));
6179 ASSERT_FALSE (init_star_p->involves_p (q_init));
6180 ASSERT_TRUE (init_star_q->involves_p (q_init));
6181 ASSERT_FALSE (init_star_q->involves_p (p_init));
6182}
6183
757bf1df
DM
6184/* Run all of the selftests within this file. */
6185
6186void
6187analyzer_region_model_cc_tests ()
6188{
8c08c983 6189 test_tree_cmp_on_constants ();
757bf1df 6190 test_dump ();
808f4dfe
DM
6191 test_struct ();
6192 test_array_1 ();
90f7c300 6193 test_get_representative_tree ();
757bf1df 6194 test_unique_constants ();
808f4dfe
DM
6195 test_unique_unknowns ();
6196 test_initial_svalue_folding ();
6197 test_unaryop_svalue_folding ();
6198 test_binop_svalue_folding ();
6199 test_sub_svalue_folding ();
6200 test_descendent_of_p ();
391512ad 6201 test_bit_range_regions ();
757bf1df 6202 test_assignment ();
a96f1c38 6203 test_compound_assignment ();
757bf1df
DM
6204 test_stack_frames ();
6205 test_get_representative_path_var ();
808f4dfe 6206 test_equality_1 ();
757bf1df
DM
6207 test_canonicalization_2 ();
6208 test_canonicalization_3 ();
8c08c983 6209 test_canonicalization_4 ();
757bf1df
DM
6210 test_state_merging ();
6211 test_constraint_merging ();
808f4dfe
DM
6212 test_widening_constraints ();
6213 test_iteration_1 ();
6969ac30 6214 test_malloc_constraints ();
808f4dfe
DM
6215 test_var ();
6216 test_array_2 ();
6217 test_mem_ref ();
6218 test_POINTER_PLUS_EXPR_then_MEM_REF ();
6219 test_malloc ();
6220 test_alloca ();
71fc4655 6221 test_involves_p ();
757bf1df
DM
6222}
6223
6224} // namespace selftest
6225
6226#endif /* CHECKING_P */
6227
75038aa6
DM
6228} // namespace ana
6229
757bf1df 6230#endif /* #if ENABLE_ANALYZER */