]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/analyzer/region-model.cc
Small fix for -fdump-ada-spec
[thirdparty/gcc.git] / gcc / analyzer / region-model.cc
CommitLineData
757bf1df 1/* Classes for modeling the state of memory.
7adcbafe 2 Copyright (C) 2019-2022 Free Software Foundation, Inc.
757bf1df
DM
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
c81b60b8 22#define INCLUDE_MEMORY
757bf1df
DM
23#include "system.h"
24#include "coretypes.h"
6341f14e 25#include "make-unique.h"
757bf1df
DM
26#include "tree.h"
27#include "function.h"
28#include "basic-block.h"
29#include "gimple.h"
30#include "gimple-iterator.h"
7892ff37 31#include "diagnostic-core.h"
757bf1df
DM
32#include "graphviz.h"
33#include "options.h"
34#include "cgraph.h"
35#include "tree-dfa.h"
36#include "stringpool.h"
37#include "convert.h"
38#include "target.h"
39#include "fold-const.h"
40#include "tree-pretty-print.h"
41#include "diagnostic-color.h"
42#include "diagnostic-metadata.h"
ef7827b0 43#include "bitmap.h"
757bf1df 44#include "selftest.h"
757bf1df
DM
45#include "analyzer/analyzer.h"
46#include "analyzer/analyzer-logging.h"
47#include "ordered-hash-map.h"
48#include "options.h"
49#include "cgraph.h"
50#include "cfg.h"
757bf1df
DM
51#include "analyzer/supergraph.h"
52#include "sbitmap.h"
808f4dfe
DM
53#include "analyzer/call-string.h"
54#include "analyzer/program-point.h"
55#include "analyzer/store.h"
757bf1df
DM
56#include "analyzer/region-model.h"
57#include "analyzer/constraint-manager.h"
58#include "diagnostic-event-id.h"
59#include "analyzer/sm.h"
60#include "diagnostic-event-id.h"
61#include "analyzer/sm.h"
62#include "analyzer/pending-diagnostic.h"
808f4dfe 63#include "analyzer/region-model-reachability.h"
757bf1df 64#include "analyzer/analyzer-selftests.h"
f573d351 65#include "analyzer/program-state.h"
bfca9505 66#include "analyzer/call-summary.h"
884d9141 67#include "stor-layout.h"
c7e276b8 68#include "attribs.h"
9a2c9579 69#include "tree-object-size.h"
1e2fe671
DM
70#include "gimple-ssa.h"
71#include "tree-phinodes.h"
72#include "tree-ssa-operands.h"
73#include "ssa-iterators.h"
5fbcbcaf 74#include "calls.h"
e6c3bb37 75#include "is-a.h"
c81b60b8 76#include "gcc-rich-location.h"
f5758fe5
DM
77#include "analyzer/checker-event.h"
78#include "analyzer/checker-path.h"
757bf1df
DM
79
80#if ENABLE_ANALYZER
81
75038aa6
DM
82namespace ana {
83
757bf1df
DM
84/* Dump T to PP in language-independent form, for debugging/logging/dumping
85 purposes. */
86
757bf1df 87void
808f4dfe 88dump_tree (pretty_printer *pp, tree t)
757bf1df 89{
808f4dfe 90 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
757bf1df
DM
91}
92
808f4dfe
DM
93/* Dump T to PP in language-independent form in quotes, for
94 debugging/logging/dumping purposes. */
757bf1df
DM
95
96void
808f4dfe 97dump_quoted_tree (pretty_printer *pp, tree t)
757bf1df 98{
808f4dfe
DM
99 pp_begin_quote (pp, pp_show_color (pp));
100 dump_tree (pp, t);
101 pp_end_quote (pp, pp_show_color (pp));
757bf1df
DM
102}
103
808f4dfe
DM
104/* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
105 calls within other pp_printf calls.
757bf1df 106
808f4dfe
DM
107 default_tree_printer handles 'T' and some other codes by calling
108 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
109 dump_generic_node calls pp_printf in various places, leading to
110 garbled output.
757bf1df 111
808f4dfe
DM
112 Ideally pp_printf could be made to be reentrant, but in the meantime
113 this function provides a workaround. */
6969ac30
DM
114
115void
808f4dfe 116print_quoted_type (pretty_printer *pp, tree t)
6969ac30 117{
808f4dfe
DM
118 pp_begin_quote (pp, pp_show_color (pp));
119 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
120 pp_end_quote (pp, pp_show_color (pp));
6969ac30
DM
121}
122
d726a57b
DM
123/* class region_to_value_map. */
124
125/* Assignment operator for region_to_value_map. */
126
127region_to_value_map &
128region_to_value_map::operator= (const region_to_value_map &other)
129{
130 m_hash_map.empty ();
131 for (auto iter : other.m_hash_map)
132 {
133 const region *reg = iter.first;
134 const svalue *sval = iter.second;
135 m_hash_map.put (reg, sval);
136 }
137 return *this;
138}
139
140/* Equality operator for region_to_value_map. */
141
142bool
143region_to_value_map::operator== (const region_to_value_map &other) const
144{
145 if (m_hash_map.elements () != other.m_hash_map.elements ())
146 return false;
147
148 for (auto iter : *this)
149 {
150 const region *reg = iter.first;
151 const svalue *sval = iter.second;
152 const svalue * const *other_slot = other.get (reg);
153 if (other_slot == NULL)
154 return false;
155 if (sval != *other_slot)
156 return false;
157 }
158
159 return true;
160}
161
162/* Dump this object to PP. */
163
164void
165region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
166 bool multiline) const
167{
168 auto_vec<const region *> regs;
169 for (iterator iter = begin (); iter != end (); ++iter)
170 regs.safe_push ((*iter).first);
171 regs.qsort (region::cmp_ptr_ptr);
172 if (multiline)
173 pp_newline (pp);
174 else
175 pp_string (pp, " {");
176 unsigned i;
177 const region *reg;
178 FOR_EACH_VEC_ELT (regs, i, reg)
179 {
180 if (multiline)
181 pp_string (pp, " ");
182 else if (i > 0)
183 pp_string (pp, ", ");
184 reg->dump_to_pp (pp, simple);
185 pp_string (pp, ": ");
186 const svalue *sval = *get (reg);
187 sval->dump_to_pp (pp, true);
188 if (multiline)
189 pp_newline (pp);
190 }
191 if (!multiline)
192 pp_string (pp, "}");
193}
194
195/* Dump this object to stderr. */
196
197DEBUG_FUNCTION void
198region_to_value_map::dump (bool simple) const
199{
200 pretty_printer pp;
201 pp_format_decoder (&pp) = default_tree_printer;
202 pp_show_color (&pp) = pp_show_color (global_dc->printer);
203 pp.buffer->stream = stderr;
204 dump_to_pp (&pp, simple, true);
205 pp_newline (&pp);
206 pp_flush (&pp);
207}
208
209
210/* Attempt to merge THIS with OTHER, writing the result
211 to OUT.
212
213 For now, write (region, value) mappings that are in common between THIS
ce917b04
DM
214 and OTHER to OUT, effectively taking the intersection.
215
216 Reject merger of different values. */
d726a57b
DM
217
218bool
219region_to_value_map::can_merge_with_p (const region_to_value_map &other,
220 region_to_value_map *out) const
221{
222 for (auto iter : *this)
223 {
224 const region *iter_reg = iter.first;
225 const svalue *iter_sval = iter.second;
226 const svalue * const * other_slot = other.get (iter_reg);
227 if (other_slot)
ce917b04
DM
228 {
229 if (iter_sval == *other_slot)
230 out->put (iter_reg, iter_sval);
231 else
232 return false;
233 }
d726a57b
DM
234 }
235 return true;
236}
237
33255ad3
DM
238/* Purge any state involving SVAL. */
239
240void
241region_to_value_map::purge_state_involving (const svalue *sval)
242{
243 auto_vec<const region *> to_purge;
244 for (auto iter : *this)
245 {
246 const region *iter_reg = iter.first;
247 const svalue *iter_sval = iter.second;
248 if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
249 to_purge.safe_push (iter_reg);
250 }
251 for (auto iter : to_purge)
252 m_hash_map.remove (iter);
253}
254
757bf1df
DM
255/* class region_model. */
256
808f4dfe 257/* Ctor for region_model: construct an "empty" model. */
757bf1df 258
808f4dfe 259region_model::region_model (region_model_manager *mgr)
9a2c9579
DM
260: m_mgr (mgr), m_store (), m_current_frame (NULL),
261 m_dynamic_extents ()
757bf1df 262{
808f4dfe 263 m_constraints = new constraint_manager (mgr);
757bf1df
DM
264}
265
266/* region_model's copy ctor. */
267
268region_model::region_model (const region_model &other)
808f4dfe
DM
269: m_mgr (other.m_mgr), m_store (other.m_store),
270 m_constraints (new constraint_manager (*other.m_constraints)),
9a2c9579
DM
271 m_current_frame (other.m_current_frame),
272 m_dynamic_extents (other.m_dynamic_extents)
757bf1df 273{
757bf1df
DM
274}
275
276/* region_model's dtor. */
277
278region_model::~region_model ()
279{
280 delete m_constraints;
281}
282
283/* region_model's assignment operator. */
284
285region_model &
286region_model::operator= (const region_model &other)
287{
808f4dfe
DM
288 /* m_mgr is const. */
289 gcc_assert (m_mgr == other.m_mgr);
757bf1df 290
808f4dfe 291 m_store = other.m_store;
757bf1df
DM
292
293 delete m_constraints;
808f4dfe 294 m_constraints = new constraint_manager (*other.m_constraints);
757bf1df 295
808f4dfe 296 m_current_frame = other.m_current_frame;
757bf1df 297
9a2c9579
DM
298 m_dynamic_extents = other.m_dynamic_extents;
299
757bf1df
DM
300 return *this;
301}
302
303/* Equality operator for region_model.
304
808f4dfe
DM
305 Amongst other things this directly compares the stores and the constraint
306 managers, so for this to be meaningful both this and OTHER should
757bf1df
DM
307 have been canonicalized. */
308
309bool
310region_model::operator== (const region_model &other) const
311{
808f4dfe
DM
312 /* We can only compare instances that use the same manager. */
313 gcc_assert (m_mgr == other.m_mgr);
757bf1df 314
808f4dfe 315 if (m_store != other.m_store)
757bf1df
DM
316 return false;
317
318 if (*m_constraints != *other.m_constraints)
319 return false;
320
808f4dfe
DM
321 if (m_current_frame != other.m_current_frame)
322 return false;
757bf1df 323
9a2c9579
DM
324 if (m_dynamic_extents != other.m_dynamic_extents)
325 return false;
326
757bf1df
DM
327 gcc_checking_assert (hash () == other.hash ());
328
329 return true;
330}
331
332/* Generate a hash value for this region_model. */
333
334hashval_t
808f4dfe
DM
335region_model::hash () const
336{
337 hashval_t result = m_store.hash ();
338 result ^= m_constraints->hash ();
339 return result;
757bf1df
DM
340}
341
808f4dfe
DM
342/* Dump a representation of this model to PP, showing the
343 stack, the store, and any constraints.
344 Use SIMPLE to control how svalues and regions are printed. */
757bf1df
DM
345
346void
808f4dfe
DM
347region_model::dump_to_pp (pretty_printer *pp, bool simple,
348 bool multiline) const
757bf1df 349{
808f4dfe
DM
350 /* Dump stack. */
351 pp_printf (pp, "stack depth: %i", get_stack_depth ());
352 if (multiline)
353 pp_newline (pp);
354 else
355 pp_string (pp, " {");
356 for (const frame_region *iter_frame = m_current_frame; iter_frame;
357 iter_frame = iter_frame->get_calling_frame ())
358 {
359 if (multiline)
360 pp_string (pp, " ");
361 else if (iter_frame != m_current_frame)
362 pp_string (pp, ", ");
363 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
364 iter_frame->dump_to_pp (pp, simple);
365 if (multiline)
366 pp_newline (pp);
367 }
368 if (!multiline)
369 pp_string (pp, "}");
370
371 /* Dump store. */
372 if (!multiline)
373 pp_string (pp, ", {");
374 m_store.dump_to_pp (pp, simple, multiline,
375 m_mgr->get_store_manager ());
376 if (!multiline)
377 pp_string (pp, "}");
378
379 /* Dump constraints. */
380 pp_string (pp, "constraint_manager:");
381 if (multiline)
382 pp_newline (pp);
383 else
384 pp_string (pp, " {");
385 m_constraints->dump_to_pp (pp, multiline);
386 if (!multiline)
387 pp_string (pp, "}");
9a2c9579
DM
388
389 /* Dump sizes of dynamic regions, if any are known. */
390 if (!m_dynamic_extents.is_empty ())
391 {
392 pp_string (pp, "dynamic_extents:");
393 m_dynamic_extents.dump_to_pp (pp, simple, multiline);
394 }
808f4dfe 395}
757bf1df 396
808f4dfe 397/* Dump a representation of this model to FILE. */
757bf1df 398
808f4dfe
DM
399void
400region_model::dump (FILE *fp, bool simple, bool multiline) const
401{
402 pretty_printer pp;
403 pp_format_decoder (&pp) = default_tree_printer;
404 pp_show_color (&pp) = pp_show_color (global_dc->printer);
405 pp.buffer->stream = fp;
406 dump_to_pp (&pp, simple, multiline);
407 pp_newline (&pp);
408 pp_flush (&pp);
757bf1df
DM
409}
410
808f4dfe 411/* Dump a multiline representation of this model to stderr. */
757bf1df 412
808f4dfe
DM
413DEBUG_FUNCTION void
414region_model::dump (bool simple) const
415{
416 dump (stderr, simple, true);
417}
757bf1df 418
808f4dfe 419/* Dump a multiline representation of this model to stderr. */
757bf1df 420
808f4dfe
DM
421DEBUG_FUNCTION void
422region_model::debug () const
757bf1df 423{
808f4dfe 424 dump (true);
757bf1df
DM
425}
426
e61ffa20
DM
427/* Assert that this object is valid. */
428
429void
430region_model::validate () const
431{
432 m_store.validate ();
433}
434
808f4dfe
DM
435/* Canonicalize the store and constraints, to maximize the chance of
436 equality between region_model instances. */
757bf1df
DM
437
438void
808f4dfe 439region_model::canonicalize ()
757bf1df 440{
808f4dfe
DM
441 m_store.canonicalize (m_mgr->get_store_manager ());
442 m_constraints->canonicalize ();
757bf1df
DM
443}
444
445/* Return true if this region_model is in canonical form. */
446
447bool
448region_model::canonicalized_p () const
449{
450 region_model copy (*this);
808f4dfe 451 copy.canonicalize ();
757bf1df
DM
452 return *this == copy;
453}
454
808f4dfe
DM
455/* See the comment for store::loop_replay_fixup. */
456
457void
458region_model::loop_replay_fixup (const region_model *dst_state)
459{
460 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
461}
462
757bf1df
DM
463/* A subclass of pending_diagnostic for complaining about uses of
464 poisoned values. */
465
466class poisoned_value_diagnostic
467: public pending_diagnostic_subclass<poisoned_value_diagnostic>
468{
469public:
00e7d024
DM
470 poisoned_value_diagnostic (tree expr, enum poison_kind pkind,
471 const region *src_region)
472 : m_expr (expr), m_pkind (pkind),
473 m_src_region (src_region)
757bf1df
DM
474 {}
475
ff171cb1 476 const char *get_kind () const final override { return "poisoned_value_diagnostic"; }
757bf1df 477
ff171cb1 478 bool use_of_uninit_p () const final override
33255ad3
DM
479 {
480 return m_pkind == POISON_KIND_UNINIT;
481 }
482
757bf1df
DM
483 bool operator== (const poisoned_value_diagnostic &other) const
484 {
00e7d024
DM
485 return (m_expr == other.m_expr
486 && m_pkind == other.m_pkind
487 && m_src_region == other.m_src_region);
757bf1df
DM
488 }
489
ff171cb1 490 int get_controlling_option () const final override
7fd6e36e
DM
491 {
492 switch (m_pkind)
493 {
494 default:
495 gcc_unreachable ();
496 case POISON_KIND_UNINIT:
497 return OPT_Wanalyzer_use_of_uninitialized_value;
498 case POISON_KIND_FREED:
499 return OPT_Wanalyzer_use_after_free;
500 case POISON_KIND_POPPED_STACK:
501 return OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame;
502 }
503 }
504
ff171cb1 505 bool emit (rich_location *rich_loc) final override
757bf1df
DM
506 {
507 switch (m_pkind)
508 {
509 default:
510 gcc_unreachable ();
33255ad3
DM
511 case POISON_KIND_UNINIT:
512 {
513 diagnostic_metadata m;
514 m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
7fd6e36e 515 return warning_meta (rich_loc, m, get_controlling_option (),
33255ad3
DM
516 "use of uninitialized value %qE",
517 m_expr);
518 }
519 break;
757bf1df
DM
520 case POISON_KIND_FREED:
521 {
522 diagnostic_metadata m;
523 m.add_cwe (416); /* "CWE-416: Use After Free". */
7fd6e36e 524 return warning_meta (rich_loc, m, get_controlling_option (),
6c8e5844
DM
525 "use after %<free%> of %qE",
526 m_expr);
757bf1df
DM
527 }
528 break;
529 case POISON_KIND_POPPED_STACK:
530 {
757bf1df 531 /* TODO: which CWE? */
808f4dfe 532 return warning_at
7fd6e36e 533 (rich_loc, get_controlling_option (),
808f4dfe
DM
534 "dereferencing pointer %qE to within stale stack frame",
535 m_expr);
757bf1df
DM
536 }
537 break;
538 }
539 }
540
ff171cb1 541 label_text describe_final_event (const evdesc::final_event &ev) final override
757bf1df
DM
542 {
543 switch (m_pkind)
544 {
545 default:
546 gcc_unreachable ();
33255ad3
DM
547 case POISON_KIND_UNINIT:
548 return ev.formatted_print ("use of uninitialized value %qE here",
549 m_expr);
757bf1df
DM
550 case POISON_KIND_FREED:
551 return ev.formatted_print ("use after %<free%> of %qE here",
552 m_expr);
553 case POISON_KIND_POPPED_STACK:
554 return ev.formatted_print
808f4dfe 555 ("dereferencing pointer %qE to within stale stack frame",
757bf1df
DM
556 m_expr);
557 }
558 }
559
ff171cb1 560 void mark_interesting_stuff (interesting_t *interest) final override
00e7d024
DM
561 {
562 if (m_src_region)
563 interest->add_region_creation (m_src_region);
564 }
565
757bf1df
DM
566private:
567 tree m_expr;
568 enum poison_kind m_pkind;
00e7d024 569 const region *m_src_region;
757bf1df
DM
570};
571
5e00ad3f
DM
572/* A subclass of pending_diagnostic for complaining about shifts
573 by negative counts. */
574
575class shift_count_negative_diagnostic
576: public pending_diagnostic_subclass<shift_count_negative_diagnostic>
577{
578public:
579 shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
580 : m_assign (assign), m_count_cst (count_cst)
581 {}
582
ff171cb1 583 const char *get_kind () const final override
5e00ad3f
DM
584 {
585 return "shift_count_negative_diagnostic";
586 }
587
588 bool operator== (const shift_count_negative_diagnostic &other) const
589 {
590 return (m_assign == other.m_assign
591 && same_tree_p (m_count_cst, other.m_count_cst));
592 }
593
ff171cb1 594 int get_controlling_option () const final override
7fd6e36e
DM
595 {
596 return OPT_Wanalyzer_shift_count_negative;
597 }
598
ff171cb1 599 bool emit (rich_location *rich_loc) final override
5e00ad3f 600 {
7fd6e36e 601 return warning_at (rich_loc, get_controlling_option (),
5e00ad3f
DM
602 "shift by negative count (%qE)", m_count_cst);
603 }
604
ff171cb1 605 label_text describe_final_event (const evdesc::final_event &ev) final override
5e00ad3f
DM
606 {
607 return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst);
608 }
609
610private:
611 const gassign *m_assign;
612 tree m_count_cst;
613};
614
615/* A subclass of pending_diagnostic for complaining about shifts
616 by counts >= the width of the operand type. */
617
618class shift_count_overflow_diagnostic
619: public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
620{
621public:
622 shift_count_overflow_diagnostic (const gassign *assign,
623 int operand_precision,
624 tree count_cst)
625 : m_assign (assign), m_operand_precision (operand_precision),
626 m_count_cst (count_cst)
627 {}
628
ff171cb1 629 const char *get_kind () const final override
5e00ad3f
DM
630 {
631 return "shift_count_overflow_diagnostic";
632 }
633
634 bool operator== (const shift_count_overflow_diagnostic &other) const
635 {
636 return (m_assign == other.m_assign
637 && m_operand_precision == other.m_operand_precision
638 && same_tree_p (m_count_cst, other.m_count_cst));
639 }
640
ff171cb1 641 int get_controlling_option () const final override
7fd6e36e
DM
642 {
643 return OPT_Wanalyzer_shift_count_overflow;
644 }
645
ff171cb1 646 bool emit (rich_location *rich_loc) final override
5e00ad3f 647 {
7fd6e36e 648 return warning_at (rich_loc, get_controlling_option (),
5e00ad3f
DM
649 "shift by count (%qE) >= precision of type (%qi)",
650 m_count_cst, m_operand_precision);
651 }
652
ff171cb1 653 label_text describe_final_event (const evdesc::final_event &ev) final override
5e00ad3f
DM
654 {
655 return ev.formatted_print ("shift by count %qE here", m_count_cst);
656 }
657
658private:
659 const gassign *m_assign;
660 int m_operand_precision;
661 tree m_count_cst;
662};
663
808f4dfe
DM
664/* If ASSIGN is a stmt that can be modelled via
665 set_value (lhs_reg, SVALUE, CTXT)
666 for some SVALUE, get the SVALUE.
667 Otherwise return NULL. */
757bf1df 668
808f4dfe
DM
669const svalue *
670region_model::get_gassign_result (const gassign *assign,
671 region_model_context *ctxt)
757bf1df
DM
672{
673 tree lhs = gimple_assign_lhs (assign);
674 tree rhs1 = gimple_assign_rhs1 (assign);
757bf1df
DM
675 enum tree_code op = gimple_assign_rhs_code (assign);
676 switch (op)
677 {
678 default:
808f4dfe 679 return NULL;
757bf1df
DM
680
681 case POINTER_PLUS_EXPR:
682 {
683 /* e.g. "_1 = a_10(D) + 12;" */
684 tree ptr = rhs1;
685 tree offset = gimple_assign_rhs2 (assign);
686
808f4dfe
DM
687 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
688 const svalue *offset_sval = get_rvalue (offset, ctxt);
689 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
690 is an integer of type sizetype". */
691 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
692
693 const svalue *sval_binop
694 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
695 ptr_sval, offset_sval);
696 return sval_binop;
757bf1df
DM
697 }
698 break;
699
700 case POINTER_DIFF_EXPR:
701 {
702 /* e.g. "_1 = p_2(D) - q_3(D);". */
808f4dfe
DM
703 tree rhs2 = gimple_assign_rhs2 (assign);
704 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
705 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 706
808f4dfe 707 // TODO: perhaps fold to zero if they're known to be equal?
757bf1df 708
808f4dfe
DM
709 const svalue *sval_binop
710 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
711 rhs1_sval, rhs2_sval);
712 return sval_binop;
757bf1df
DM
713 }
714 break;
715
808f4dfe
DM
716 /* Assignments of the form
717 set_value (lvalue (LHS), rvalue (EXPR))
718 for various EXPR.
719 We already have the lvalue for the LHS above, as "lhs_reg". */
720 case ADDR_EXPR: /* LHS = &RHS; */
721 case BIT_FIELD_REF:
722 case COMPONENT_REF: /* LHS = op0.op1; */
757bf1df 723 case MEM_REF:
757bf1df 724 case REAL_CST:
808f4dfe
DM
725 case COMPLEX_CST:
726 case VECTOR_CST:
757bf1df
DM
727 case INTEGER_CST:
728 case ARRAY_REF:
808f4dfe
DM
729 case SSA_NAME: /* LHS = VAR; */
730 case VAR_DECL: /* LHS = VAR; */
731 case PARM_DECL:/* LHS = VAR; */
732 case REALPART_EXPR:
733 case IMAGPART_EXPR:
734 return get_rvalue (rhs1, ctxt);
735
736 case ABS_EXPR:
737 case ABSU_EXPR:
738 case CONJ_EXPR:
739 case BIT_NOT_EXPR:
757bf1df
DM
740 case FIX_TRUNC_EXPR:
741 case FLOAT_EXPR:
808f4dfe 742 case NEGATE_EXPR:
757bf1df 743 case NOP_EXPR:
808f4dfe 744 case VIEW_CONVERT_EXPR:
757bf1df 745 {
808f4dfe
DM
746 /* Unary ops. */
747 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
748 const svalue *sval_unaryop
749 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
750 return sval_unaryop;
757bf1df 751 }
757bf1df
DM
752
753 case EQ_EXPR:
754 case GE_EXPR:
755 case LE_EXPR:
756 case NE_EXPR:
757 case GT_EXPR:
758 case LT_EXPR:
808f4dfe
DM
759 case UNORDERED_EXPR:
760 case ORDERED_EXPR:
757bf1df
DM
761 {
762 tree rhs2 = gimple_assign_rhs2 (assign);
763
808f4dfe
DM
764 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
765 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 766
2f5951bd 767 if (TREE_TYPE (lhs) == boolean_type_node)
808f4dfe 768 {
2f5951bd
DM
769 /* Consider constraints between svalues. */
770 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
771 if (t.is_known ())
772 return m_mgr->get_or_create_constant_svalue
773 (t.is_true () ? boolean_true_node : boolean_false_node);
808f4dfe 774 }
2f5951bd
DM
775
776 /* Otherwise, generate a symbolic binary op. */
777 const svalue *sval_binop
778 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
779 rhs1_sval, rhs2_sval);
780 return sval_binop;
757bf1df
DM
781 }
782 break;
783
784 case PLUS_EXPR:
785 case MINUS_EXPR:
786 case MULT_EXPR:
808f4dfe 787 case MULT_HIGHPART_EXPR:
757bf1df 788 case TRUNC_DIV_EXPR:
808f4dfe
DM
789 case CEIL_DIV_EXPR:
790 case FLOOR_DIV_EXPR:
791 case ROUND_DIV_EXPR:
757bf1df 792 case TRUNC_MOD_EXPR:
808f4dfe
DM
793 case CEIL_MOD_EXPR:
794 case FLOOR_MOD_EXPR:
795 case ROUND_MOD_EXPR:
796 case RDIV_EXPR:
797 case EXACT_DIV_EXPR:
757bf1df
DM
798 case LSHIFT_EXPR:
799 case RSHIFT_EXPR:
808f4dfe
DM
800 case LROTATE_EXPR:
801 case RROTATE_EXPR:
757bf1df
DM
802 case BIT_IOR_EXPR:
803 case BIT_XOR_EXPR:
804 case BIT_AND_EXPR:
805 case MIN_EXPR:
806 case MAX_EXPR:
808f4dfe 807 case COMPLEX_EXPR:
757bf1df
DM
808 {
809 /* Binary ops. */
810 tree rhs2 = gimple_assign_rhs2 (assign);
811
808f4dfe
DM
812 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
813 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 814
5e00ad3f
DM
815 if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
816 {
817 /* "INT34-C. Do not shift an expression by a negative number of bits
818 or by greater than or equal to the number of bits that exist in
819 the operand." */
820 if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
821 if (TREE_CODE (rhs2_cst) == INTEGER_CST)
822 {
823 if (tree_int_cst_sgn (rhs2_cst) < 0)
6341f14e
DM
824 ctxt->warn
825 (make_unique<shift_count_negative_diagnostic>
826 (assign, rhs2_cst));
5e00ad3f
DM
827 else if (compare_tree_int (rhs2_cst,
828 TYPE_PRECISION (TREE_TYPE (rhs1)))
829 >= 0)
6341f14e
DM
830 ctxt->warn
831 (make_unique<shift_count_overflow_diagnostic>
832 (assign,
833 int (TYPE_PRECISION (TREE_TYPE (rhs1))),
834 rhs2_cst));
5e00ad3f
DM
835 }
836 }
837
808f4dfe
DM
838 const svalue *sval_binop
839 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
840 rhs1_sval, rhs2_sval);
841 return sval_binop;
842 }
843
844 /* Vector expressions. In theory we could implement these elementwise,
845 but for now, simply return unknown values. */
846 case VEC_DUPLICATE_EXPR:
847 case VEC_SERIES_EXPR:
848 case VEC_COND_EXPR:
849 case VEC_PERM_EXPR:
1b0be822
DM
850 case VEC_WIDEN_MULT_HI_EXPR:
851 case VEC_WIDEN_MULT_LO_EXPR:
852 case VEC_WIDEN_MULT_EVEN_EXPR:
853 case VEC_WIDEN_MULT_ODD_EXPR:
854 case VEC_UNPACK_HI_EXPR:
855 case VEC_UNPACK_LO_EXPR:
856 case VEC_UNPACK_FLOAT_HI_EXPR:
857 case VEC_UNPACK_FLOAT_LO_EXPR:
858 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
859 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
860 case VEC_PACK_TRUNC_EXPR:
861 case VEC_PACK_SAT_EXPR:
862 case VEC_PACK_FIX_TRUNC_EXPR:
863 case VEC_PACK_FLOAT_EXPR:
864 case VEC_WIDEN_LSHIFT_HI_EXPR:
865 case VEC_WIDEN_LSHIFT_LO_EXPR:
808f4dfe
DM
866 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
867 }
868}
869
1e2fe671
DM
870/* Workaround for discarding certain false positives from
871 -Wanalyzer-use-of-uninitialized-value
872 of the form:
873 ((A OR-IF B) OR-IF C)
874 and:
875 ((A AND-IF B) AND-IF C)
876 where evaluating B is redundant, but could involve simple accesses of
877 uninitialized locals.
878
879 When optimization is turned on the FE can immediately fold compound
880 conditionals. Specifically, c_parser_condition parses this condition:
881 ((A OR-IF B) OR-IF C)
882 and calls c_fully_fold on the condition.
883 Within c_fully_fold, fold_truth_andor is called, which bails when
884 optimization is off, but if any optimization is turned on can convert the
885 ((A OR-IF B) OR-IF C)
886 into:
887 ((A OR B) OR_IF C)
888 for sufficiently simple B
889 i.e. the inner OR-IF becomes an OR.
890 At gimplification time the inner OR becomes BIT_IOR_EXPR (in gimplify_expr),
891 giving this for the inner condition:
892 tmp = A | B;
893 if (tmp)
894 thus effectively synthesizing a redundant access of B when optimization
895 is turned on, when compared to:
896 if (A) goto L1; else goto L4;
897 L1: if (B) goto L2; else goto L4;
898 L2: if (C) goto L3; else goto L4;
899 for the unoptimized case.
900
901 Return true if CTXT appears to be handling such a short-circuitable stmt,
902 such as the def-stmt for B for the:
903 tmp = A | B;
904 case above, for the case where A is true and thus B would have been
905 short-circuited without optimization, using MODEL for the value of A. */
906
907static bool
908within_short_circuited_stmt_p (const region_model *model,
b33dd787 909 const gassign *assign_stmt)
1e2fe671 910{
1e2fe671 911 /* We must have an assignment to a temporary of _Bool type. */
1e2fe671
DM
912 tree lhs = gimple_assign_lhs (assign_stmt);
913 if (TREE_TYPE (lhs) != boolean_type_node)
914 return false;
915 if (TREE_CODE (lhs) != SSA_NAME)
916 return false;
917 if (SSA_NAME_VAR (lhs) != NULL_TREE)
918 return false;
919
920 /* The temporary bool must be used exactly once: as the second arg of
921 a BIT_IOR_EXPR or BIT_AND_EXPR. */
922 use_operand_p use_op;
923 gimple *use_stmt;
924 if (!single_imm_use (lhs, &use_op, &use_stmt))
925 return false;
926 const gassign *use_assign = dyn_cast <const gassign *> (use_stmt);
927 if (!use_assign)
928 return false;
929 enum tree_code op = gimple_assign_rhs_code (use_assign);
930 if (!(op == BIT_IOR_EXPR ||op == BIT_AND_EXPR))
931 return false;
932 if (!(gimple_assign_rhs1 (use_assign) != lhs
933 && gimple_assign_rhs2 (use_assign) == lhs))
934 return false;
935
936 /* The first arg of the bitwise stmt must have a known value in MODEL
937 that implies that the value of the second arg doesn't matter, i.e.
938 1 for bitwise or, 0 for bitwise and. */
939 tree other_arg = gimple_assign_rhs1 (use_assign);
940 /* Use a NULL ctxt here to avoid generating warnings. */
941 const svalue *other_arg_sval = model->get_rvalue (other_arg, NULL);
942 tree other_arg_cst = other_arg_sval->maybe_get_constant ();
943 if (!other_arg_cst)
944 return false;
945 switch (op)
946 {
947 default:
948 gcc_unreachable ();
949 case BIT_IOR_EXPR:
950 if (zerop (other_arg_cst))
951 return false;
952 break;
953 case BIT_AND_EXPR:
954 if (!zerop (other_arg_cst))
955 return false;
956 break;
957 }
958
959 /* All tests passed. We appear to be in a stmt that generates a boolean
960 temporary with a value that won't matter. */
961 return true;
962}
963
b33dd787
DM
964/* Workaround for discarding certain false positives from
965 -Wanalyzer-use-of-uninitialized-value
966 seen with -ftrivial-auto-var-init=.
967
968 -ftrivial-auto-var-init= will generate calls to IFN_DEFERRED_INIT.
969
970 If the address of the var is taken, gimplification will give us
971 something like:
972
973 _1 = .DEFERRED_INIT (4, 2, &"len"[0]);
974 len = _1;
975
976 The result of DEFERRED_INIT will be an uninit value; we don't
977 want to emit a false positive for "len = _1;"
978
979 Return true if ASSIGN_STMT is such a stmt. */
980
981static bool
982due_to_ifn_deferred_init_p (const gassign *assign_stmt)
983
984{
985 /* We must have an assignment to a decl from an SSA name that's the
986 result of a IFN_DEFERRED_INIT call. */
987 if (gimple_assign_rhs_code (assign_stmt) != SSA_NAME)
988 return false;
989 tree lhs = gimple_assign_lhs (assign_stmt);
990 if (TREE_CODE (lhs) != VAR_DECL)
991 return false;
992 tree rhs = gimple_assign_rhs1 (assign_stmt);
993 if (TREE_CODE (rhs) != SSA_NAME)
994 return false;
995 const gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
996 const gcall *call = dyn_cast <const gcall *> (def_stmt);
997 if (!call)
998 return false;
999 if (gimple_call_internal_p (call)
1000 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1001 return true;
1002 return false;
1003}
1004
33255ad3
DM
1005/* Check for SVAL being poisoned, adding a warning to CTXT.
1006 Return SVAL, or, if a warning is added, another value, to avoid
2fdc8546
DM
1007 repeatedly complaining about the same poisoned value in followup code.
1008 SRC_REGION is a hint about where SVAL came from, and can be NULL. */
33255ad3
DM
1009
1010const svalue *
1011region_model::check_for_poison (const svalue *sval,
1012 tree expr,
2fdc8546 1013 const region *src_region,
33255ad3
DM
1014 region_model_context *ctxt) const
1015{
1016 if (!ctxt)
1017 return sval;
1018
1019 if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
1020 {
cc68ad87
DM
1021 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
1022
1023 /* Ignore uninitialized uses of empty types; there's nothing
1024 to initialize. */
1025 if (pkind == POISON_KIND_UNINIT
1026 && sval->get_type ()
1027 && is_empty_type (sval->get_type ()))
1028 return sval;
1029
b33dd787
DM
1030 if (pkind == POISON_KIND_UNINIT)
1031 if (const gimple *curr_stmt = ctxt->get_stmt ())
1032 if (const gassign *assign_stmt
1033 = dyn_cast <const gassign *> (curr_stmt))
1034 {
1035 /* Special case to avoid certain false positives. */
1036 if (within_short_circuited_stmt_p (this, assign_stmt))
1037 return sval;
1038
1039 /* Special case to avoid false positive on
1040 -ftrivial-auto-var-init=. */
1041 if (due_to_ifn_deferred_init_p (assign_stmt))
1042 return sval;
1043 }
1e2fe671 1044
33255ad3
DM
1045 /* If we have an SSA name for a temporary, we don't want to print
1046 '<unknown>'.
1047 Poisoned values are shared by type, and so we can't reconstruct
1048 the tree other than via the def stmts, using
1049 fixup_tree_for_diagnostic. */
1050 tree diag_arg = fixup_tree_for_diagnostic (expr);
2fdc8546 1051 if (src_region == NULL && pkind == POISON_KIND_UNINIT)
00e7d024 1052 src_region = get_region_for_poisoned_expr (expr);
6341f14e
DM
1053 if (ctxt->warn (make_unique<poisoned_value_diagnostic> (diag_arg,
1054 pkind,
1055 src_region)))
33255ad3
DM
1056 {
1057 /* We only want to report use of a poisoned value at the first
1058 place it gets used; return an unknown value to avoid generating
1059 a chain of followup warnings. */
1060 sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
1061 }
1062
1063 return sval;
1064 }
1065
1066 return sval;
1067}
1068
00e7d024
DM
1069/* Attempt to get a region for describing EXPR, the source of region of
1070 a poisoned_svalue for use in a poisoned_value_diagnostic.
1071 Return NULL if there is no good region to use. */
1072
1073const region *
1074region_model::get_region_for_poisoned_expr (tree expr) const
1075{
1076 if (TREE_CODE (expr) == SSA_NAME)
1077 {
1078 tree decl = SSA_NAME_VAR (expr);
1079 if (decl && DECL_P (decl))
1080 expr = decl;
1081 else
1082 return NULL;
1083 }
1084 return get_lvalue (expr, NULL);
1085}
1086
808f4dfe
DM
1087/* Update this model for the ASSIGN stmt, using CTXT to report any
1088 diagnostics. */
1089
1090void
1091region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
1092{
1093 tree lhs = gimple_assign_lhs (assign);
1094 tree rhs1 = gimple_assign_rhs1 (assign);
1095
1096 const region *lhs_reg = get_lvalue (lhs, ctxt);
1097
1098 /* Most assignments are handled by:
1099 set_value (lhs_reg, SVALUE, CTXT)
1100 for some SVALUE. */
1101 if (const svalue *sval = get_gassign_result (assign, ctxt))
1102 {
33255ad3 1103 tree expr = get_diagnostic_tree_for_gassign (assign);
2fdc8546 1104 check_for_poison (sval, expr, NULL, ctxt);
808f4dfe
DM
1105 set_value (lhs_reg, sval, ctxt);
1106 return;
1107 }
1108
1109 enum tree_code op = gimple_assign_rhs_code (assign);
1110 switch (op)
1111 {
1112 default:
1113 {
1b0be822 1114 if (0)
808f4dfe
DM
1115 sorry_at (assign->location, "unhandled assignment op: %qs",
1116 get_tree_code_name (op));
1b0be822
DM
1117 const svalue *unknown_sval
1118 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1119 set_value (lhs_reg, unknown_sval, ctxt);
757bf1df
DM
1120 }
1121 break;
1122
808f4dfe
DM
1123 case CONSTRUCTOR:
1124 {
1125 if (TREE_CLOBBER_P (rhs1))
1126 {
1127 /* e.g. "x ={v} {CLOBBER};" */
1128 clobber_region (lhs_reg);
1129 }
1130 else
1131 {
1132 /* Any CONSTRUCTOR that survives to this point is either
1133 just a zero-init of everything, or a vector. */
1134 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
1135 zero_fill_region (lhs_reg);
1136 unsigned ix;
1137 tree index;
1138 tree val;
1139 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
1140 {
1141 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
1142 if (!index)
1143 index = build_int_cst (integer_type_node, ix);
1144 gcc_assert (TREE_CODE (index) == INTEGER_CST);
1145 const svalue *index_sval
1146 = m_mgr->get_or_create_constant_svalue (index);
1147 gcc_assert (index_sval);
1148 const region *sub_reg
1149 = m_mgr->get_element_region (lhs_reg,
1150 TREE_TYPE (val),
1151 index_sval);
1152 const svalue *val_sval = get_rvalue (val, ctxt);
1153 set_value (sub_reg, val_sval, ctxt);
1154 }
1155 }
1156 }
1157 break;
1158
1159 case STRING_CST:
757bf1df 1160 {
808f4dfe 1161 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
808f4dfe
DM
1162 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1163 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 1164 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
1165 }
1166 break;
1167 }
1168}
1169
33255ad3 1170/* Handle the pre-sm-state part of STMT, modifying this object in-place.
33255ad3
DM
1171 Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
1172 side effects. */
1173
1174void
1175region_model::on_stmt_pre (const gimple *stmt,
33255ad3
DM
1176 bool *out_unknown_side_effects,
1177 region_model_context *ctxt)
1178{
1179 switch (gimple_code (stmt))
1180 {
1181 default:
1182 /* No-op for now. */
1183 break;
1184
1185 case GIMPLE_ASSIGN:
1186 {
1187 const gassign *assign = as_a <const gassign *> (stmt);
1188 on_assignment (assign, ctxt);
1189 }
1190 break;
1191
1192 case GIMPLE_ASM:
ded2c2c0
DM
1193 {
1194 const gasm *asm_stmt = as_a <const gasm *> (stmt);
1195 on_asm_stmt (asm_stmt, ctxt);
1196 }
33255ad3
DM
1197 break;
1198
1199 case GIMPLE_CALL:
1200 {
1201 /* Track whether we have a gcall to a function that's not recognized by
1202 anything, for which we don't have a function body, or for which we
1203 don't know the fndecl. */
1204 const gcall *call = as_a <const gcall *> (stmt);
6bd31b33 1205 *out_unknown_side_effects = on_call_pre (call, ctxt);
33255ad3
DM
1206 }
1207 break;
1208
1209 case GIMPLE_RETURN:
1210 {
1211 const greturn *return_ = as_a <const greturn *> (stmt);
1212 on_return (return_, ctxt);
1213 }
1214 break;
1215 }
1216}
1217
9ff3e236
DM
1218/* Ensure that all arguments at the call described by CD are checked
1219 for poisoned values, by calling get_rvalue on each argument. */
1220
1221void
1222region_model::check_call_args (const call_details &cd) const
1223{
1224 for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++)
1225 cd.get_arg_svalue (arg_idx);
1226}
1227
aee1adf2
DM
1228/* Return true if CD is known to be a call to a function with
1229 __attribute__((const)). */
1230
1231static bool
1232const_fn_p (const call_details &cd)
1233{
1234 tree fndecl = cd.get_fndecl_for_call ();
1235 if (!fndecl)
1236 return false;
1237 gcc_assert (DECL_P (fndecl));
1238 return TREE_READONLY (fndecl);
1239}
1240
1241/* If this CD is known to be a call to a function with
1242 __attribute__((const)), attempt to get a const_fn_result_svalue
1243 based on the arguments, or return NULL otherwise. */
1244
1245static const svalue *
1246maybe_get_const_fn_result (const call_details &cd)
1247{
1248 if (!const_fn_p (cd))
1249 return NULL;
1250
1251 unsigned num_args = cd.num_args ();
1252 if (num_args > const_fn_result_svalue::MAX_INPUTS)
1253 /* Too many arguments. */
1254 return NULL;
1255
1256 auto_vec<const svalue *> inputs (num_args);
1257 for (unsigned arg_idx = 0; arg_idx < num_args; arg_idx++)
1258 {
1259 const svalue *arg_sval = cd.get_arg_svalue (arg_idx);
1260 if (!arg_sval->can_have_associated_state_p ())
1261 return NULL;
1262 inputs.quick_push (arg_sval);
1263 }
1264
1265 region_model_manager *mgr = cd.get_manager ();
1266 const svalue *sval
1267 = mgr->get_or_create_const_fn_result_svalue (cd.get_lhs_type (),
1268 cd.get_fndecl_for_call (),
1269 inputs);
1270 return sval;
1271}
1272
792f039f
DM
1273/* Update this model for an outcome of a call that returns a specific
1274 integer constant.
07e30160
DM
1275 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1276 the state-merger code from merging success and failure outcomes. */
1277
1278void
792f039f
DM
1279region_model::update_for_int_cst_return (const call_details &cd,
1280 int retval,
1281 bool unmergeable)
07e30160
DM
1282{
1283 if (!cd.get_lhs_type ())
1284 return;
4e4e45a4
DM
1285 if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1286 return;
07e30160 1287 const svalue *result
792f039f 1288 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), retval);
07e30160
DM
1289 if (unmergeable)
1290 result = m_mgr->get_or_create_unmergeable (result);
1291 set_value (cd.get_lhs_region (), result, cd.get_ctxt ());
1292}
1293
792f039f
DM
1294/* Update this model for an outcome of a call that returns zero.
1295 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1296 the state-merger code from merging success and failure outcomes. */
1297
1298void
1299region_model::update_for_zero_return (const call_details &cd,
1300 bool unmergeable)
1301{
1302 update_for_int_cst_return (cd, 0, unmergeable);
1303}
1304
07e30160
DM
1305/* Update this model for an outcome of a call that returns non-zero. */
1306
1307void
1308region_model::update_for_nonzero_return (const call_details &cd)
1309{
1310 if (!cd.get_lhs_type ())
1311 return;
4e4e45a4
DM
1312 if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1313 return;
07e30160
DM
1314 const svalue *zero
1315 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), 0);
1316 const svalue *result
1317 = get_store_value (cd.get_lhs_region (), cd.get_ctxt ());
1318 add_constraint (result, NE_EXPR, zero, cd.get_ctxt ());
1319}
1320
1321/* Subroutine of region_model::maybe_get_copy_bounds.
1322 The Linux kernel commonly uses
1323 min_t([unsigned] long, VAR, sizeof(T));
1324 to set an upper bound on the size of a copy_to_user.
1325 Attempt to simplify such sizes by trying to get the upper bound as a
1326 constant.
1327 Return the simplified svalue if possible, or NULL otherwise. */
1328
1329static const svalue *
1330maybe_simplify_upper_bound (const svalue *num_bytes_sval,
1331 region_model_manager *mgr)
1332{
1333 tree type = num_bytes_sval->get_type ();
1334 while (const svalue *raw = num_bytes_sval->maybe_undo_cast ())
1335 num_bytes_sval = raw;
1336 if (const binop_svalue *binop_sval = num_bytes_sval->dyn_cast_binop_svalue ())
1337 if (binop_sval->get_op () == MIN_EXPR)
1338 if (binop_sval->get_arg1 ()->get_kind () == SK_CONSTANT)
1339 {
1340 return mgr->get_or_create_cast (type, binop_sval->get_arg1 ());
1341 /* TODO: we might want to also capture the constraint
1342 when recording the diagnostic, or note that we're using
1343 the upper bound. */
1344 }
1345 return NULL;
1346}
1347
1348/* Attempt to get an upper bound for the size of a copy when simulating a
1349 copy function.
1350
1351 NUM_BYTES_SVAL is the symbolic value for the size of the copy.
1352 Use it if it's constant, otherwise try to simplify it. Failing
1353 that, use the size of SRC_REG if constant.
1354
1355 Return a symbolic value for an upper limit on the number of bytes
1356 copied, or NULL if no such value could be determined. */
1357
1358const svalue *
1359region_model::maybe_get_copy_bounds (const region *src_reg,
1360 const svalue *num_bytes_sval)
1361{
1362 if (num_bytes_sval->maybe_get_constant ())
1363 return num_bytes_sval;
1364
1365 if (const svalue *simplified
1366 = maybe_simplify_upper_bound (num_bytes_sval, m_mgr))
1367 num_bytes_sval = simplified;
1368
1369 if (num_bytes_sval->maybe_get_constant ())
1370 return num_bytes_sval;
1371
1372 /* For now, try just guessing the size as the capacity of the
1373 base region of the src.
1374 This is a hack; we might get too large a value. */
1375 const region *src_base_reg = src_reg->get_base_region ();
1376 num_bytes_sval = get_capacity (src_base_reg);
1377
1378 if (num_bytes_sval->maybe_get_constant ())
1379 return num_bytes_sval;
1380
1381 /* Non-constant: give up. */
1382 return NULL;
1383}
1384
6bd31b33
DM
1385/* Get any known_function for FNDECL for call CD.
1386
1387 The call must match all assumptions made by the known_function (such as
1388 e.g. "argument 1's type must be a pointer type").
1389
1390 Return NULL if no known_function is found, or it does not match the
1391 assumption(s). */
1392
1393const known_function *
1394region_model::get_known_function (tree fndecl, const call_details &cd) const
1395{
1396 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
1397 return known_fn_mgr->get_match (fndecl, cd);
1398}
1399
1400/* Get any known_function for IFN, or NULL. */
07e30160
DM
1401
1402const known_function *
6bd31b33 1403region_model::get_known_function (enum internal_fn ifn) const
07e30160
DM
1404{
1405 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
6bd31b33 1406 return known_fn_mgr->get_internal_fn (ifn);
07e30160
DM
1407}
1408
757bf1df
DM
1409/* Update this model for the CALL stmt, using CTXT to report any
1410 diagnostics - the first half.
1411
1412 Updates to the region_model that should be made *before* sm-states
1413 are updated are done here; other updates to the region_model are done
ef7827b0 1414 in region_model::on_call_post.
757bf1df 1415
ef7827b0
DM
1416 Return true if the function call has unknown side effects (it wasn't
1417 recognized and we don't have a body for it, or are unable to tell which
6bd31b33 1418 fndecl it is). */
ef7827b0
DM
1419
1420bool
6bd31b33 1421region_model::on_call_pre (const gcall *call, region_model_context *ctxt)
757bf1df 1422{
48e8a7a6
DM
1423 call_details cd (call, this, ctxt);
1424
ef7827b0
DM
1425 bool unknown_side_effects = false;
1426
9b4eee5f
DM
1427 /* Special-case for IFN_DEFERRED_INIT.
1428 We want to report uninitialized variables with -fanalyzer (treating
1429 -ftrivial-auto-var-init= as purely a mitigation feature).
1430 Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the
1431 lhs of the call, so that it is still uninitialized from the point of
1432 view of the analyzer. */
1433 if (gimple_call_internal_p (call)
1434 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1435 return false;
1436
bddd8d86
DM
1437 /* Get svalues for all of the arguments at the callsite, to ensure that we
1438 complain about any uninitialized arguments. This might lead to
1439 duplicates if any of the handling below also looks up the svalues,
1440 but the deduplication code should deal with that. */
1441 if (ctxt)
ca123e01 1442 check_call_args (cd);
bddd8d86 1443
688fc162
DM
1444 tree callee_fndecl = get_fndecl_for_call (call, ctxt);
1445
33255ad3
DM
1446 /* Some of the cases below update the lhs of the call based on the
1447 return value, but not all. Provide a default value, which may
1448 get overwritten below. */
1449 if (tree lhs = gimple_call_lhs (call))
1450 {
1451 const region *lhs_region = get_lvalue (lhs, ctxt);
aee1adf2
DM
1452 const svalue *sval = maybe_get_const_fn_result (cd);
1453 if (!sval)
1454 {
688fc162
DM
1455 if (callee_fndecl
1456 && lookup_attribute ("malloc", DECL_ATTRIBUTES (callee_fndecl)))
1457 {
1458 const region *new_reg
1459 = get_or_create_region_for_heap_alloc (NULL, ctxt);
1460 mark_region_as_unknown (new_reg, NULL);
1461 sval = m_mgr->get_ptr_svalue (cd.get_lhs_type (), new_reg);
1462 }
1463 else
1464 /* For the common case of functions without __attribute__((const)),
1465 use a conjured value, and purge any prior state involving that
1466 value (in case this is in a loop). */
1467 sval = m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs), call,
1468 lhs_region,
1469 conjured_purge (this,
1470 ctxt));
aee1adf2 1471 }
3a1d168e 1472 set_value (lhs_region, sval, ctxt);
33255ad3
DM
1473 }
1474
48e8a7a6 1475 if (gimple_call_internal_p (call))
6bd31b33
DM
1476 if (const known_function *kf
1477 = get_known_function (gimple_call_internal_fn (call)))
1478 {
1479 kf->impl_call_pre (cd);
1480 return false;
1481 }
808f4dfe 1482
688fc162 1483 if (callee_fndecl)
48e8a7a6 1484 {
5fbcbcaf 1485 int callee_fndecl_flags = flags_from_decl_or_type (callee_fndecl);
ee7bfbe5 1486
6bd31b33 1487 if (const known_function *kf = get_known_function (callee_fndecl, cd))
b5081130 1488 {
6bd31b33 1489 kf->impl_call_pre (cd);
b5081130
DM
1490 return false;
1491 }
6bd31b33
DM
1492 else if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
1493 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
5ee4ba03 1494 {
6bd31b33 1495 if (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
5ee4ba03
DM
1496 unknown_side_effects = true;
1497 }
ef7827b0 1498 else if (!fndecl_has_gimple_body_p (callee_fndecl)
5fbcbcaf 1499 && (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
808f4dfe 1500 && !fndecl_built_in_p (callee_fndecl))
ef7827b0 1501 unknown_side_effects = true;
757bf1df 1502 }
ef7827b0
DM
1503 else
1504 unknown_side_effects = true;
757bf1df 1505
ef7827b0 1506 return unknown_side_effects;
757bf1df
DM
1507}
1508
1509/* Update this model for the CALL stmt, using CTXT to report any
1510 diagnostics - the second half.
1511
1512 Updates to the region_model that should be made *after* sm-states
1513 are updated are done here; other updates to the region_model are done
ef7827b0
DM
1514 in region_model::on_call_pre.
1515
1516 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
1517 to purge state. */
757bf1df
DM
1518
1519void
ef7827b0
DM
1520region_model::on_call_post (const gcall *call,
1521 bool unknown_side_effects,
1522 region_model_context *ctxt)
757bf1df 1523{
757bf1df 1524 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1690a839 1525 {
eafa9d96 1526 call_details cd (call, this, ctxt);
6bd31b33 1527 if (const known_function *kf = get_known_function (callee_fndecl, cd))
55e04240 1528 {
6bd31b33 1529 kf->impl_call_post (cd);
55e04240
DM
1530 return;
1531 }
c7e276b8
DM
1532 /* Was this fndecl referenced by
1533 __attribute__((malloc(FOO)))? */
1534 if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
1535 {
c7e276b8
DM
1536 impl_deallocation_call (cd);
1537 return;
1538 }
1690a839 1539 }
ef7827b0
DM
1540
1541 if (unknown_side_effects)
1542 handle_unrecognized_call (call, ctxt);
1543}
1544
33255ad3
DM
1545/* Purge state involving SVAL from this region_model, using CTXT
1546 (if non-NULL) to purge other state in a program_state.
1547
1548 For example, if we're at the def-stmt of an SSA name, then we need to
1549 purge any state for svalues that involve that SSA name. This avoids
1550 false positives in loops, since a symbolic value referring to the
1551 SSA name will be referring to the previous value of that SSA name.
1552
1553 For example, in:
1554 while ((e = hashmap_iter_next(&iter))) {
1555 struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
1556 free (e_strbuf->value);
1557 }
1558 at the def-stmt of e_8:
1559 e_8 = hashmap_iter_next (&iter);
1560 we should purge the "freed" state of:
1561 INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
1562 which is the "e_strbuf->value" value from the previous iteration,
1563 or we will erroneously report a double-free - the "e_8" within it
1564 refers to the previous value. */
1565
1566void
1567region_model::purge_state_involving (const svalue *sval,
1568 region_model_context *ctxt)
1569{
a113b143
DM
1570 if (!sval->can_have_associated_state_p ())
1571 return;
33255ad3
DM
1572 m_store.purge_state_involving (sval, m_mgr);
1573 m_constraints->purge_state_involving (sval);
1574 m_dynamic_extents.purge_state_involving (sval);
1575 if (ctxt)
1576 ctxt->purge_state_involving (sval);
1577}
1578
c65d3c7f
DM
1579/* A pending_note subclass for adding a note about an
1580 __attribute__((access, ...)) to a diagnostic. */
1581
1582class reason_attr_access : public pending_note_subclass<reason_attr_access>
1583{
1584public:
1585 reason_attr_access (tree callee_fndecl, const attr_access &access)
1586 : m_callee_fndecl (callee_fndecl),
1587 m_ptr_argno (access.ptrarg),
1588 m_access_str (TREE_STRING_POINTER (access.to_external_string ()))
1589 {
1590 }
1591
ff171cb1 1592 const char *get_kind () const final override { return "reason_attr_access"; }
c65d3c7f 1593
2ac1459f 1594 void emit () const final override
c65d3c7f
DM
1595 {
1596 inform (DECL_SOURCE_LOCATION (m_callee_fndecl),
1597 "parameter %i of %qD marked with attribute %qs",
1598 m_ptr_argno + 1, m_callee_fndecl, m_access_str);
1599 }
1600
1601 bool operator== (const reason_attr_access &other) const
1602 {
1603 return (m_callee_fndecl == other.m_callee_fndecl
1604 && m_ptr_argno == other.m_ptr_argno
1605 && !strcmp (m_access_str, other.m_access_str));
1606 }
1607
1608private:
1609 tree m_callee_fndecl;
1610 unsigned m_ptr_argno;
1611 const char *m_access_str;
1612};
1613
b6eaf90c
DM
1614/* Check CALL a call to external function CALLEE_FNDECL based on
1615 any __attribute__ ((access, ....) on the latter, complaining to
1616 CTXT about any issues.
1617
1618 Currently we merely call check_region_for_write on any regions
1619 pointed to by arguments marked with a "write_only" or "read_write"
1620 attribute. */
1621
1622void
1623region_model::
1624check_external_function_for_access_attr (const gcall *call,
1625 tree callee_fndecl,
1626 region_model_context *ctxt) const
1627{
1628 gcc_assert (call);
1629 gcc_assert (callee_fndecl);
1630 gcc_assert (ctxt);
1631
1632 tree fntype = TREE_TYPE (callee_fndecl);
1633 if (!fntype)
1634 return;
1635
1636 if (!TYPE_ATTRIBUTES (fntype))
1637 return;
1638
1639 /* Initialize a map of attribute access specifications for arguments
1640 to the function call. */
1641 rdwr_map rdwr_idx;
1642 init_attr_rdwr_indices (&rdwr_idx, TYPE_ATTRIBUTES (fntype));
1643
1644 unsigned argno = 0;
1645
1646 for (tree iter = TYPE_ARG_TYPES (fntype); iter;
1647 iter = TREE_CHAIN (iter), ++argno)
1648 {
1649 const attr_access* access = rdwr_idx.get (argno);
1650 if (!access)
1651 continue;
1652
1653 /* Ignore any duplicate entry in the map for the size argument. */
1654 if (access->ptrarg != argno)
1655 continue;
1656
1657 if (access->mode == access_write_only
1658 || access->mode == access_read_write)
1659 {
c65d3c7f
DM
1660 /* Subclass of decorated_region_model_context that
1661 adds a note about the attr access to any saved diagnostics. */
1662 class annotating_ctxt : public note_adding_context
1663 {
1664 public:
1665 annotating_ctxt (tree callee_fndecl,
1666 const attr_access &access,
1667 region_model_context *ctxt)
1668 : note_adding_context (ctxt),
1669 m_callee_fndecl (callee_fndecl),
1670 m_access (access)
1671 {
1672 }
6341f14e 1673 std::unique_ptr<pending_note> make_note () final override
c65d3c7f 1674 {
6341f14e
DM
1675 return make_unique<reason_attr_access>
1676 (m_callee_fndecl, m_access);
c65d3c7f
DM
1677 }
1678 private:
1679 tree m_callee_fndecl;
1680 const attr_access &m_access;
1681 };
1682
1683 /* Use this ctxt below so that any diagnostics get the
1684 note added to them. */
1685 annotating_ctxt my_ctxt (callee_fndecl, *access, ctxt);
1686
b6eaf90c 1687 tree ptr_tree = gimple_call_arg (call, access->ptrarg);
c65d3c7f
DM
1688 const svalue *ptr_sval = get_rvalue (ptr_tree, &my_ctxt);
1689 const region *reg = deref_rvalue (ptr_sval, ptr_tree, &my_ctxt);
1690 check_region_for_write (reg, &my_ctxt);
b6eaf90c
DM
1691 /* We don't use the size arg for now. */
1692 }
1693 }
1694}
1695
ef7827b0
DM
1696/* Handle a call CALL to a function with unknown behavior.
1697
1698 Traverse the regions in this model, determining what regions are
1699 reachable from pointer arguments to CALL and from global variables,
1700 recursively.
1701
1702 Set all reachable regions to new unknown values and purge sm-state
1703 from their values, and from values that point to them. */
1704
1705void
1706region_model::handle_unrecognized_call (const gcall *call,
1707 region_model_context *ctxt)
1708{
1709 tree fndecl = get_fndecl_for_call (call, ctxt);
1710
b6eaf90c
DM
1711 if (fndecl && ctxt)
1712 check_external_function_for_access_attr (call, fndecl, ctxt);
1713
c710051a 1714 reachable_regions reachable_regs (this);
ef7827b0
DM
1715
1716 /* Determine the reachable regions and their mutability. */
1717 {
808f4dfe
DM
1718 /* Add globals and regions that already escaped in previous
1719 unknown calls. */
1720 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1721 &reachable_regs);
ef7827b0
DM
1722
1723 /* Params that are pointers. */
1724 tree iter_param_types = NULL_TREE;
1725 if (fndecl)
1726 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
1727 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
1728 {
1729 /* Track expected param type, where available. */
1730 tree param_type = NULL_TREE;
1731 if (iter_param_types)
1732 {
1733 param_type = TREE_VALUE (iter_param_types);
1734 gcc_assert (param_type);
1735 iter_param_types = TREE_CHAIN (iter_param_types);
1736 }
1737
1738 tree parm = gimple_call_arg (call, arg_idx);
808f4dfe
DM
1739 const svalue *parm_sval = get_rvalue (parm, ctxt);
1740 reachable_regs.handle_parm (parm_sval, param_type);
ef7827b0
DM
1741 }
1742 }
1743
33255ad3 1744 uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULL;
3a66c289 1745
808f4dfe
DM
1746 /* Purge sm-state for the svalues that were reachable,
1747 both in non-mutable and mutable form. */
1748 for (svalue_set::iterator iter
1749 = reachable_regs.begin_reachable_svals ();
1750 iter != reachable_regs.end_reachable_svals (); ++iter)
ef7827b0 1751 {
808f4dfe 1752 const svalue *sval = (*iter);
33255ad3
DM
1753 if (ctxt)
1754 ctxt->on_unknown_change (sval, false);
808f4dfe
DM
1755 }
1756 for (svalue_set::iterator iter
1757 = reachable_regs.begin_mutable_svals ();
1758 iter != reachable_regs.end_mutable_svals (); ++iter)
1759 {
1760 const svalue *sval = (*iter);
33255ad3
DM
1761 if (ctxt)
1762 ctxt->on_unknown_change (sval, true);
3a66c289
DM
1763 if (uncertainty)
1764 uncertainty->on_mutable_sval_at_unknown_call (sval);
808f4dfe 1765 }
ef7827b0 1766
808f4dfe 1767 /* Mark any clusters that have escaped. */
af66094d 1768 reachable_regs.mark_escaped_clusters (ctxt);
ef7827b0 1769
808f4dfe
DM
1770 /* Update bindings for all clusters that have escaped, whether above,
1771 or previously. */
3734527d
DM
1772 m_store.on_unknown_fncall (call, m_mgr->get_store_manager (),
1773 conjured_purge (this, ctxt));
9a2c9579
DM
1774
1775 /* Purge dynamic extents from any regions that have escaped mutably:
1776 realloc could have been called on them. */
1777 for (hash_set<const region *>::iterator
1778 iter = reachable_regs.begin_mutable_base_regs ();
1779 iter != reachable_regs.end_mutable_base_regs ();
1780 ++iter)
1781 {
1782 const region *base_reg = (*iter);
1783 unset_dynamic_extents (base_reg);
1784 }
808f4dfe 1785}
ef7827b0 1786
808f4dfe
DM
1787/* Traverse the regions in this model, determining what regions are
1788 reachable from the store and populating *OUT.
ef7827b0 1789
808f4dfe
DM
1790 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
1791 for reachability (for handling return values from functions when
1792 analyzing return of the only function on the stack).
1793
3a66c289
DM
1794 If UNCERTAINTY is non-NULL, treat any svalues that were recorded
1795 within it as being maybe-bound as additional "roots" for reachability.
1796
808f4dfe
DM
1797 Find svalues that haven't leaked. */
1798
1799void
1800region_model::get_reachable_svalues (svalue_set *out,
3a66c289
DM
1801 const svalue *extra_sval,
1802 const uncertainty_t *uncertainty)
808f4dfe 1803{
c710051a 1804 reachable_regions reachable_regs (this);
808f4dfe
DM
1805
1806 /* Add globals and regions that already escaped in previous
1807 unknown calls. */
1808 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1809 &reachable_regs);
1810
1811 if (extra_sval)
1812 reachable_regs.handle_sval (extra_sval);
ef7827b0 1813
3a66c289
DM
1814 if (uncertainty)
1815 for (uncertainty_t::iterator iter
1816 = uncertainty->begin_maybe_bound_svals ();
1817 iter != uncertainty->end_maybe_bound_svals (); ++iter)
1818 reachable_regs.handle_sval (*iter);
1819
808f4dfe
DM
1820 /* Get regions for locals that have explicitly bound values. */
1821 for (store::cluster_map_t::iterator iter = m_store.begin ();
1822 iter != m_store.end (); ++iter)
1823 {
1824 const region *base_reg = (*iter).first;
1825 if (const region *parent = base_reg->get_parent_region ())
1826 if (parent->get_kind () == RK_FRAME)
1827 reachable_regs.add (base_reg, false);
1828 }
1829
1830 /* Populate *OUT based on the values that were reachable. */
1831 for (svalue_set::iterator iter
1832 = reachable_regs.begin_reachable_svals ();
1833 iter != reachable_regs.end_reachable_svals (); ++iter)
1834 out->add (*iter);
757bf1df
DM
1835}
1836
1837/* Update this model for the RETURN_STMT, using CTXT to report any
1838 diagnostics. */
1839
1840void
1841region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
1842{
1843 tree callee = get_current_function ()->decl;
1844 tree lhs = DECL_RESULT (callee);
1845 tree rhs = gimple_return_retval (return_stmt);
1846
1847 if (lhs && rhs)
13ad6d9f
DM
1848 {
1849 const svalue *sval = get_rvalue (rhs, ctxt);
1850 const region *ret_reg = get_lvalue (lhs, ctxt);
1851 set_value (ret_reg, sval, ctxt);
1852 }
757bf1df
DM
1853}
1854
342e14ff
DM
1855/* Update this model for a call and return of setjmp/sigsetjmp at CALL within
1856 ENODE, using CTXT to report any diagnostics.
757bf1df 1857
342e14ff
DM
1858 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
1859 0), as opposed to any second return due to longjmp/sigsetjmp. */
757bf1df
DM
1860
1861void
1862region_model::on_setjmp (const gcall *call, const exploded_node *enode,
1863 region_model_context *ctxt)
1864{
808f4dfe
DM
1865 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
1866 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
1867 ctxt);
757bf1df 1868
808f4dfe
DM
1869 /* Create a setjmp_svalue for this call and store it in BUF_REG's
1870 region. */
1871 if (buf_reg)
757bf1df 1872 {
fd9982bb 1873 setjmp_record r (enode, call);
808f4dfe
DM
1874 const svalue *sval
1875 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
1876 set_value (buf_reg, sval, ctxt);
757bf1df
DM
1877 }
1878
1879 /* Direct calls to setjmp return 0. */
1880 if (tree lhs = gimple_call_lhs (call))
1881 {
1aff29d4
DM
1882 const svalue *new_sval
1883 = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
808f4dfe
DM
1884 const region *lhs_reg = get_lvalue (lhs, ctxt);
1885 set_value (lhs_reg, new_sval, ctxt);
757bf1df
DM
1886 }
1887}
1888
1889/* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
1890 to a "setjmp" at SETJMP_CALL where the final stack depth should be
808f4dfe
DM
1891 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
1892 done, and should be done by the caller. */
757bf1df
DM
1893
1894void
1895region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
808f4dfe 1896 int setjmp_stack_depth, region_model_context *ctxt)
757bf1df
DM
1897{
1898 /* Evaluate the val, using the frame of the "longjmp". */
1899 tree fake_retval = gimple_call_arg (longjmp_call, 1);
808f4dfe 1900 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
757bf1df
DM
1901
1902 /* Pop any frames until we reach the stack depth of the function where
1903 setjmp was called. */
1904 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
1905 while (get_stack_depth () > setjmp_stack_depth)
808f4dfe 1906 pop_frame (NULL, NULL, ctxt);
757bf1df
DM
1907
1908 gcc_assert (get_stack_depth () == setjmp_stack_depth);
1909
1910 /* Assign to LHS of "setjmp" in new_state. */
1911 if (tree lhs = gimple_call_lhs (setjmp_call))
1912 {
1913 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
1aff29d4
DM
1914 const svalue *zero_sval
1915 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
808f4dfe 1916 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
757bf1df
DM
1917 /* If we have 0, use 1. */
1918 if (eq_zero.is_true ())
1919 {
808f4dfe 1920 const svalue *one_sval
1aff29d4 1921 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
808f4dfe 1922 fake_retval_sval = one_sval;
757bf1df
DM
1923 }
1924 else
1925 {
1926 /* Otherwise note that the value is nonzero. */
808f4dfe 1927 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
757bf1df
DM
1928 }
1929
808f4dfe
DM
1930 /* Decorate the return value from setjmp as being unmergeable,
1931 so that we don't attempt to merge states with it as zero
1932 with states in which it's nonzero, leading to a clean distinction
1933 in the exploded_graph betweeen the first return and the second
1934 return. */
1935 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
757bf1df 1936
808f4dfe
DM
1937 const region *lhs_reg = get_lvalue (lhs, ctxt);
1938 set_value (lhs_reg, fake_retval_sval, ctxt);
1939 }
757bf1df
DM
1940}
1941
1942/* Update this region_model for a phi stmt of the form
1943 LHS = PHI <...RHS...>.
e0a7a675
DM
1944 where RHS is for the appropriate edge.
1945 Get state from OLD_STATE so that all of the phi stmts for a basic block
1946 are effectively handled simultaneously. */
757bf1df
DM
1947
1948void
8525d1f5 1949region_model::handle_phi (const gphi *phi,
808f4dfe 1950 tree lhs, tree rhs,
e0a7a675 1951 const region_model &old_state,
757bf1df
DM
1952 region_model_context *ctxt)
1953{
1954 /* For now, don't bother tracking the .MEM SSA names. */
1955 if (tree var = SSA_NAME_VAR (lhs))
1956 if (TREE_CODE (var) == VAR_DECL)
1957 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
1958 return;
1959
e0a7a675
DM
1960 const svalue *src_sval = old_state.get_rvalue (rhs, ctxt);
1961 const region *dst_reg = old_state.get_lvalue (lhs, ctxt);
757bf1df 1962
e0a7a675 1963 set_value (dst_reg, src_sval, ctxt);
8525d1f5
DM
1964
1965 if (ctxt)
1966 ctxt->on_phi (phi, rhs);
757bf1df
DM
1967}
1968
1969/* Implementation of region_model::get_lvalue; the latter adds type-checking.
1970
1971 Get the id of the region for PV within this region_model,
1972 emitting any diagnostics to CTXT. */
1973
808f4dfe 1974const region *
53cb324c 1975region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
1976{
1977 tree expr = pv.m_tree;
1978
1979 gcc_assert (expr);
1980
1981 switch (TREE_CODE (expr))
1982 {
1983 default:
808f4dfe
DM
1984 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
1985 dump_location_t ());
757bf1df
DM
1986
1987 case ARRAY_REF:
1988 {
1989 tree array = TREE_OPERAND (expr, 0);
1990 tree index = TREE_OPERAND (expr, 1);
757bf1df 1991
808f4dfe
DM
1992 const region *array_reg = get_lvalue (array, ctxt);
1993 const svalue *index_sval = get_rvalue (index, ctxt);
1994 return m_mgr->get_element_region (array_reg,
1995 TREE_TYPE (TREE_TYPE (array)),
1996 index_sval);
757bf1df
DM
1997 }
1998 break;
1999
93e759fc
DM
2000 case BIT_FIELD_REF:
2001 {
2002 tree inner_expr = TREE_OPERAND (expr, 0);
2003 const region *inner_reg = get_lvalue (inner_expr, ctxt);
2004 tree num_bits = TREE_OPERAND (expr, 1);
2005 tree first_bit_offset = TREE_OPERAND (expr, 2);
2006 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2007 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2008 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2009 TREE_INT_CST_LOW (num_bits));
2010 return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits);
2011 }
2012 break;
2013
757bf1df
DM
2014 case MEM_REF:
2015 {
2016 tree ptr = TREE_OPERAND (expr, 0);
2017 tree offset = TREE_OPERAND (expr, 1);
808f4dfe
DM
2018 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2019 const svalue *offset_sval = get_rvalue (offset, ctxt);
2020 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
2021 return m_mgr->get_offset_region (star_ptr,
2022 TREE_TYPE (expr),
2023 offset_sval);
757bf1df
DM
2024 }
2025 break;
2026
808f4dfe
DM
2027 case FUNCTION_DECL:
2028 return m_mgr->get_region_for_fndecl (expr);
2029
2030 case LABEL_DECL:
2031 return m_mgr->get_region_for_label (expr);
2032
757bf1df
DM
2033 case VAR_DECL:
2034 /* Handle globals. */
2035 if (is_global_var (expr))
808f4dfe 2036 return m_mgr->get_region_for_global (expr);
757bf1df
DM
2037
2038 /* Fall through. */
2039
2040 case SSA_NAME:
2041 case PARM_DECL:
2042 case RESULT_DECL:
2043 {
2044 gcc_assert (TREE_CODE (expr) == SSA_NAME
2045 || TREE_CODE (expr) == PARM_DECL
2046 || TREE_CODE (expr) == VAR_DECL
2047 || TREE_CODE (expr) == RESULT_DECL);
2048
808f4dfe
DM
2049 int stack_index = pv.m_stack_depth;
2050 const frame_region *frame = get_frame_at_index (stack_index);
757bf1df 2051 gcc_assert (frame);
4cebae09 2052 return frame->get_region_for_local (m_mgr, expr, ctxt);
757bf1df
DM
2053 }
2054
2055 case COMPONENT_REF:
2056 {
2057 /* obj.field */
2058 tree obj = TREE_OPERAND (expr, 0);
2059 tree field = TREE_OPERAND (expr, 1);
808f4dfe
DM
2060 const region *obj_reg = get_lvalue (obj, ctxt);
2061 return m_mgr->get_field_region (obj_reg, field);
41a9e940
DM
2062 }
2063 break;
2064
757bf1df 2065 case STRING_CST:
808f4dfe 2066 return m_mgr->get_region_for_string (expr);
757bf1df
DM
2067 }
2068}
2069
2070/* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
2071
09bea584
DM
2072static void
2073assert_compat_types (tree src_type, tree dst_type)
2074{
2075 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
808f4dfe
DM
2076 {
2077#if CHECKING_P
2078 if (!(useless_type_conversion_p (src_type, dst_type)))
2079 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
2080#endif
2081 }
09bea584 2082}
757bf1df 2083
ea4e3218
DM
2084/* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
2085
e66b9f67 2086bool
ea4e3218
DM
2087compat_types_p (tree src_type, tree dst_type)
2088{
2089 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2090 if (!(useless_type_conversion_p (src_type, dst_type)))
2091 return false;
2092 return true;
2093}
2094
808f4dfe 2095/* Get the region for PV within this region_model,
757bf1df
DM
2096 emitting any diagnostics to CTXT. */
2097
808f4dfe 2098const region *
53cb324c 2099region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2100{
2101 if (pv.m_tree == NULL_TREE)
808f4dfe 2102 return NULL;
757bf1df 2103
808f4dfe
DM
2104 const region *result_reg = get_lvalue_1 (pv, ctxt);
2105 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
2106 return result_reg;
757bf1df
DM
2107}
2108
808f4dfe 2109/* Get the region for EXPR within this region_model (assuming the most
757bf1df
DM
2110 recent stack frame if it's a local). */
2111
808f4dfe 2112const region *
53cb324c 2113region_model::get_lvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
2114{
2115 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2116}
2117
2118/* Implementation of region_model::get_rvalue; the latter adds type-checking.
2119
2120 Get the value of PV within this region_model,
2121 emitting any diagnostics to CTXT. */
2122
808f4dfe 2123const svalue *
53cb324c 2124region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2125{
2126 gcc_assert (pv.m_tree);
2127
2128 switch (TREE_CODE (pv.m_tree))
2129 {
2130 default:
2242b975 2131 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
757bf1df
DM
2132
2133 case ADDR_EXPR:
2134 {
2135 /* "&EXPR". */
2136 tree expr = pv.m_tree;
2137 tree op0 = TREE_OPERAND (expr, 0);
808f4dfe
DM
2138 const region *expr_reg = get_lvalue (op0, ctxt);
2139 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
757bf1df
DM
2140 }
2141 break;
2142
808f4dfe 2143 case BIT_FIELD_REF:
d3b1ef7a
DM
2144 {
2145 tree expr = pv.m_tree;
2146 tree op0 = TREE_OPERAND (expr, 0);
2147 const region *reg = get_lvalue (op0, ctxt);
2148 tree num_bits = TREE_OPERAND (expr, 1);
2149 tree first_bit_offset = TREE_OPERAND (expr, 2);
2150 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2151 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2152 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2153 TREE_INT_CST_LOW (num_bits));
9faf8348 2154 return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
d3b1ef7a 2155 }
808f4dfe
DM
2156
2157 case SSA_NAME:
2158 case VAR_DECL:
2159 case PARM_DECL:
2160 case RESULT_DECL:
757bf1df
DM
2161 case ARRAY_REF:
2162 {
da7c2773 2163 const region *reg = get_lvalue (pv, ctxt);
9faf8348 2164 return get_store_value (reg, ctxt);
757bf1df
DM
2165 }
2166
808f4dfe
DM
2167 case REALPART_EXPR:
2168 case IMAGPART_EXPR:
2169 case VIEW_CONVERT_EXPR:
2170 {
2171 tree expr = pv.m_tree;
2172 tree arg = TREE_OPERAND (expr, 0);
2173 const svalue *arg_sval = get_rvalue (arg, ctxt);
2174 const svalue *sval_unaryop
2175 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
2176 arg_sval);
2177 return sval_unaryop;
2178 };
2179
757bf1df
DM
2180 case INTEGER_CST:
2181 case REAL_CST:
808f4dfe
DM
2182 case COMPLEX_CST:
2183 case VECTOR_CST:
757bf1df 2184 case STRING_CST:
808f4dfe
DM
2185 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
2186
2187 case POINTER_PLUS_EXPR:
2188 {
2189 tree expr = pv.m_tree;
2190 tree ptr = TREE_OPERAND (expr, 0);
2191 tree offset = TREE_OPERAND (expr, 1);
2192 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2193 const svalue *offset_sval = get_rvalue (offset, ctxt);
2194 const svalue *sval_binop
2195 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
2196 ptr_sval, offset_sval);
2197 return sval_binop;
2198 }
2199
2200 /* Binary ops. */
2201 case PLUS_EXPR:
2202 case MULT_EXPR:
2203 {
2204 tree expr = pv.m_tree;
2205 tree arg0 = TREE_OPERAND (expr, 0);
2206 tree arg1 = TREE_OPERAND (expr, 1);
2207 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
2208 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
2209 const svalue *sval_binop
2210 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
2211 arg0_sval, arg1_sval);
2212 return sval_binop;
2213 }
757bf1df
DM
2214
2215 case COMPONENT_REF:
2216 case MEM_REF:
757bf1df 2217 {
808f4dfe 2218 const region *ref_reg = get_lvalue (pv, ctxt);
9faf8348 2219 return get_store_value (ref_reg, ctxt);
757bf1df 2220 }
1b342485
AS
2221 case OBJ_TYPE_REF:
2222 {
2223 tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
2224 return get_rvalue (expr, ctxt);
2225 }
757bf1df
DM
2226 }
2227}
2228
2229/* Get the value of PV within this region_model,
2230 emitting any diagnostics to CTXT. */
2231
808f4dfe 2232const svalue *
53cb324c 2233region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2234{
2235 if (pv.m_tree == NULL_TREE)
808f4dfe 2236 return NULL;
757bf1df 2237
808f4dfe 2238 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
757bf1df 2239
808f4dfe
DM
2240 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
2241
2fdc8546 2242 result_sval = check_for_poison (result_sval, pv.m_tree, NULL, ctxt);
33255ad3 2243
808f4dfe 2244 return result_sval;
757bf1df
DM
2245}
2246
2247/* Get the value of EXPR within this region_model (assuming the most
2248 recent stack frame if it's a local). */
2249
808f4dfe 2250const svalue *
53cb324c 2251region_model::get_rvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
2252{
2253 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2254}
2255
623bc027
DM
2256/* Return true if this model is on a path with "main" as the entrypoint
2257 (as opposed to one in which we're merely analyzing a subset of the
2258 path through the code). */
2259
2260bool
2261region_model::called_from_main_p () const
2262{
2263 if (!m_current_frame)
2264 return false;
2265 /* Determine if the oldest stack frame in this model is for "main". */
2266 const frame_region *frame0 = get_frame_at_index (0);
2267 gcc_assert (frame0);
2268 return id_equal (DECL_NAME (frame0->get_function ()->decl), "main");
2269}
2270
2271/* Subroutine of region_model::get_store_value for when REG is (or is within)
2272 a global variable that hasn't been touched since the start of this path
2273 (or was implicitly touched due to a call to an unknown function). */
2274
2275const svalue *
2276region_model::get_initial_value_for_global (const region *reg) const
2277{
2278 /* Get the decl that REG is for (or is within). */
2279 const decl_region *base_reg
2280 = reg->get_base_region ()->dyn_cast_decl_region ();
2281 gcc_assert (base_reg);
2282 tree decl = base_reg->get_decl ();
2283
2284 /* Special-case: to avoid having to explicitly update all previously
2285 untracked globals when calling an unknown fn, they implicitly have
2286 an unknown value if an unknown call has occurred, unless this is
2287 static to-this-TU and hasn't escaped. Globals that have escaped
2288 are explicitly tracked, so we shouldn't hit this case for them. */
af66094d
DM
2289 if (m_store.called_unknown_fn_p ()
2290 && TREE_PUBLIC (decl)
2291 && !TREE_READONLY (decl))
623bc027
DM
2292 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2293
2294 /* If we are on a path from the entrypoint from "main" and we have a
2295 global decl defined in this TU that hasn't been touched yet, then
2296 the initial value of REG can be taken from the initialization value
2297 of the decl. */
16ad9ae8 2298 if (called_from_main_p () || TREE_READONLY (decl))
623bc027 2299 {
61a43de5
DM
2300 /* Attempt to get the initializer value for base_reg. */
2301 if (const svalue *base_reg_init
2302 = base_reg->get_svalue_for_initializer (m_mgr))
623bc027 2303 {
61a43de5
DM
2304 if (reg == base_reg)
2305 return base_reg_init;
2306 else
623bc027 2307 {
61a43de5
DM
2308 /* Get the value for REG within base_reg_init. */
2309 binding_cluster c (base_reg);
e61ffa20 2310 c.bind (m_mgr->get_store_manager (), base_reg, base_reg_init);
61a43de5
DM
2311 const svalue *sval
2312 = c.get_any_binding (m_mgr->get_store_manager (), reg);
2313 if (sval)
2314 {
2315 if (reg->get_type ())
2316 sval = m_mgr->get_or_create_cast (reg->get_type (),
2317 sval);
2318 return sval;
2319 }
623bc027
DM
2320 }
2321 }
2322 }
2323
2324 /* Otherwise, return INIT_VAL(REG). */
2325 return m_mgr->get_or_create_initial_value (reg);
2326}
2327
808f4dfe 2328/* Get a value for REG, looking it up in the store, or otherwise falling
9faf8348
DM
2329 back to "initial" or "unknown" values.
2330 Use CTXT to report any warnings associated with reading from REG. */
757bf1df 2331
808f4dfe 2332const svalue *
9faf8348
DM
2333region_model::get_store_value (const region *reg,
2334 region_model_context *ctxt) const
757bf1df 2335{
dfe2ef7f
DM
2336 /* Getting the value of an empty region gives an unknown_svalue. */
2337 if (reg->empty_p ())
2338 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2339
9faf8348
DM
2340 check_region_for_read (reg, ctxt);
2341
2867118d
DM
2342 /* Special-case: handle var_decls in the constant pool. */
2343 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
2344 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
2345 return sval;
2346
808f4dfe
DM
2347 const svalue *sval
2348 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
2349 if (sval)
757bf1df 2350 {
808f4dfe
DM
2351 if (reg->get_type ())
2352 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
2353 return sval;
757bf1df 2354 }
757bf1df 2355
808f4dfe
DM
2356 /* Special-case: read at a constant index within a STRING_CST. */
2357 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
2358 if (tree byte_offset_cst
2359 = offset_reg->get_byte_offset ()->maybe_get_constant ())
2360 if (const string_region *str_reg
2361 = reg->get_parent_region ()->dyn_cast_string_region ())
757bf1df 2362 {
808f4dfe
DM
2363 tree string_cst = str_reg->get_string_cst ();
2364 if (const svalue *char_sval
2365 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2366 byte_offset_cst))
2367 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
757bf1df 2368 }
757bf1df 2369
808f4dfe
DM
2370 /* Special-case: read the initial char of a STRING_CST. */
2371 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
2372 if (const string_region *str_reg
2373 = cast_reg->get_original_region ()->dyn_cast_string_region ())
2374 {
2375 tree string_cst = str_reg->get_string_cst ();
2376 tree byte_offset_cst = build_int_cst (integer_type_node, 0);
2377 if (const svalue *char_sval
2378 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2379 byte_offset_cst))
2380 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2381 }
757bf1df 2382
808f4dfe
DM
2383 /* Otherwise we implicitly have the initial value of the region
2384 (if the cluster had been touched, binding_cluster::get_any_binding,
2385 would have returned UNKNOWN, and we would already have returned
2386 that above). */
757bf1df 2387
623bc027
DM
2388 /* Handle globals. */
2389 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
2390 == RK_GLOBALS)
2391 return get_initial_value_for_global (reg);
757bf1df 2392
808f4dfe 2393 return m_mgr->get_or_create_initial_value (reg);
757bf1df
DM
2394}
2395
808f4dfe
DM
2396/* Return false if REG does not exist, true if it may do.
2397 This is for detecting regions within the stack that don't exist anymore
2398 after frames are popped. */
757bf1df 2399
808f4dfe
DM
2400bool
2401region_model::region_exists_p (const region *reg) const
757bf1df 2402{
808f4dfe
DM
2403 /* If within a stack frame, check that the stack frame is live. */
2404 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
757bf1df 2405 {
808f4dfe
DM
2406 /* Check that the current frame is the enclosing frame, or is called
2407 by it. */
2408 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
2409 iter_frame = iter_frame->get_calling_frame ())
2410 if (iter_frame == enclosing_frame)
2411 return true;
2412 return false;
757bf1df 2413 }
808f4dfe
DM
2414
2415 return true;
757bf1df
DM
2416}
2417
808f4dfe
DM
2418/* Get a region for referencing PTR_SVAL, creating a region if need be, and
2419 potentially generating warnings via CTXT.
35e3f082 2420 PTR_SVAL must be of pointer type.
808f4dfe 2421 PTR_TREE if non-NULL can be used when emitting diagnostics. */
757bf1df 2422
808f4dfe
DM
2423const region *
2424region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
53cb324c 2425 region_model_context *ctxt) const
757bf1df 2426{
808f4dfe 2427 gcc_assert (ptr_sval);
35e3f082 2428 gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
757bf1df 2429
49bfbf18
DM
2430 /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
2431 as a constraint. This suppresses false positives from
2432 -Wanalyzer-null-dereference for the case where we later have an
2433 if (PTR_SVAL) that would occur if we considered the false branch
2434 and transitioned the malloc state machine from start->null. */
2435 tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
2436 const svalue *null_ptr = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
2437 m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
2438
808f4dfe 2439 switch (ptr_sval->get_kind ())
757bf1df 2440 {
808f4dfe 2441 default:
23ebfda0 2442 break;
808f4dfe 2443
757bf1df
DM
2444 case SK_REGION:
2445 {
808f4dfe
DM
2446 const region_svalue *region_sval
2447 = as_a <const region_svalue *> (ptr_sval);
757bf1df
DM
2448 return region_sval->get_pointee ();
2449 }
2450
808f4dfe
DM
2451 case SK_BINOP:
2452 {
2453 const binop_svalue *binop_sval
2454 = as_a <const binop_svalue *> (ptr_sval);
2455 switch (binop_sval->get_op ())
2456 {
2457 case POINTER_PLUS_EXPR:
2458 {
2459 /* If we have a symbolic value expressing pointer arithmentic,
2460 try to convert it to a suitable region. */
2461 const region *parent_region
2462 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
2463 const svalue *offset = binop_sval->get_arg1 ();
2464 tree type= TREE_TYPE (ptr_sval->get_type ());
2465 return m_mgr->get_offset_region (parent_region, type, offset);
2466 }
2467 default:
23ebfda0 2468 break;
808f4dfe
DM
2469 }
2470 }
23ebfda0 2471 break;
757bf1df
DM
2472
2473 case SK_POISONED:
2474 {
2475 if (ctxt)
808f4dfe
DM
2476 {
2477 tree ptr = get_representative_tree (ptr_sval);
2478 /* If we can't get a representative tree for PTR_SVAL
2479 (e.g. if it hasn't been bound into the store), then
2480 fall back on PTR_TREE, if non-NULL. */
2481 if (!ptr)
2482 ptr = ptr_tree;
2483 if (ptr)
2484 {
2485 const poisoned_svalue *poisoned_sval
2486 = as_a <const poisoned_svalue *> (ptr_sval);
2487 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
6341f14e
DM
2488 ctxt->warn (make_unique<poisoned_value_diagnostic>
2489 (ptr, pkind, NULL));
808f4dfe
DM
2490 }
2491 }
757bf1df 2492 }
23ebfda0 2493 break;
757bf1df
DM
2494 }
2495
23ebfda0 2496 return m_mgr->get_symbolic_region (ptr_sval);
757bf1df
DM
2497}
2498
d3b1ef7a
DM
2499/* Attempt to get BITS within any value of REG, as TYPE.
2500 In particular, extract values from compound_svalues for the case
2501 where there's a concrete binding at BITS.
9faf8348
DM
2502 Return an unknown svalue if we can't handle the given case.
2503 Use CTXT to report any warnings associated with reading from REG. */
d3b1ef7a
DM
2504
2505const svalue *
2506region_model::get_rvalue_for_bits (tree type,
2507 const region *reg,
9faf8348
DM
2508 const bit_range &bits,
2509 region_model_context *ctxt) const
d3b1ef7a 2510{
9faf8348 2511 const svalue *sval = get_store_value (reg, ctxt);
e61ffa20 2512 return m_mgr->get_or_create_bits_within (type, bits, sval);
d3b1ef7a
DM
2513}
2514
3175d40f
DM
2515/* A subclass of pending_diagnostic for complaining about writes to
2516 constant regions of memory. */
2517
2518class write_to_const_diagnostic
2519: public pending_diagnostic_subclass<write_to_const_diagnostic>
2520{
2521public:
2522 write_to_const_diagnostic (const region *reg, tree decl)
2523 : m_reg (reg), m_decl (decl)
2524 {}
2525
ff171cb1 2526 const char *get_kind () const final override
3175d40f
DM
2527 {
2528 return "write_to_const_diagnostic";
2529 }
2530
2531 bool operator== (const write_to_const_diagnostic &other) const
2532 {
2533 return (m_reg == other.m_reg
2534 && m_decl == other.m_decl);
2535 }
2536
ff171cb1 2537 int get_controlling_option () const final override
7fd6e36e
DM
2538 {
2539 return OPT_Wanalyzer_write_to_const;
2540 }
2541
ff171cb1 2542 bool emit (rich_location *rich_loc) final override
3175d40f 2543 {
111fd515
DM
2544 auto_diagnostic_group d;
2545 bool warned;
2546 switch (m_reg->get_kind ())
2547 {
2548 default:
7fd6e36e 2549 warned = warning_at (rich_loc, get_controlling_option (),
111fd515
DM
2550 "write to %<const%> object %qE", m_decl);
2551 break;
2552 case RK_FUNCTION:
7fd6e36e 2553 warned = warning_at (rich_loc, get_controlling_option (),
111fd515
DM
2554 "write to function %qE", m_decl);
2555 break;
2556 case RK_LABEL:
7fd6e36e 2557 warned = warning_at (rich_loc, get_controlling_option (),
111fd515
DM
2558 "write to label %qE", m_decl);
2559 break;
2560 }
3175d40f
DM
2561 if (warned)
2562 inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
2563 return warned;
2564 }
2565
ff171cb1 2566 label_text describe_final_event (const evdesc::final_event &ev) final override
3175d40f 2567 {
111fd515
DM
2568 switch (m_reg->get_kind ())
2569 {
2570 default:
2571 return ev.formatted_print ("write to %<const%> object %qE here", m_decl);
2572 case RK_FUNCTION:
2573 return ev.formatted_print ("write to function %qE here", m_decl);
2574 case RK_LABEL:
2575 return ev.formatted_print ("write to label %qE here", m_decl);
2576 }
3175d40f
DM
2577 }
2578
2579private:
2580 const region *m_reg;
2581 tree m_decl;
2582};
2583
2584/* A subclass of pending_diagnostic for complaining about writes to
2585 string literals. */
2586
2587class write_to_string_literal_diagnostic
2588: public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
2589{
2590public:
2591 write_to_string_literal_diagnostic (const region *reg)
2592 : m_reg (reg)
2593 {}
2594
ff171cb1 2595 const char *get_kind () const final override
3175d40f
DM
2596 {
2597 return "write_to_string_literal_diagnostic";
2598 }
2599
2600 bool operator== (const write_to_string_literal_diagnostic &other) const
2601 {
2602 return m_reg == other.m_reg;
2603 }
2604
ff171cb1 2605 int get_controlling_option () const final override
7fd6e36e
DM
2606 {
2607 return OPT_Wanalyzer_write_to_string_literal;
2608 }
2609
ff171cb1 2610 bool emit (rich_location *rich_loc) final override
3175d40f 2611 {
7fd6e36e 2612 return warning_at (rich_loc, get_controlling_option (),
3175d40f
DM
2613 "write to string literal");
2614 /* Ideally we would show the location of the STRING_CST as well,
2615 but it is not available at this point. */
2616 }
2617
ff171cb1 2618 label_text describe_final_event (const evdesc::final_event &ev) final override
3175d40f
DM
2619 {
2620 return ev.formatted_print ("write to string literal here");
2621 }
2622
2623private:
2624 const region *m_reg;
2625};
2626
2627/* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
2628
2629void
2630region_model::check_for_writable_region (const region* dest_reg,
2631 region_model_context *ctxt) const
2632{
2633 /* Fail gracefully if CTXT is NULL. */
2634 if (!ctxt)
2635 return;
2636
2637 const region *base_reg = dest_reg->get_base_region ();
2638 switch (base_reg->get_kind ())
2639 {
2640 default:
2641 break;
111fd515
DM
2642 case RK_FUNCTION:
2643 {
2644 const function_region *func_reg = as_a <const function_region *> (base_reg);
2645 tree fndecl = func_reg->get_fndecl ();
6341f14e
DM
2646 ctxt->warn (make_unique<write_to_const_diagnostic>
2647 (func_reg, fndecl));
111fd515
DM
2648 }
2649 break;
2650 case RK_LABEL:
2651 {
2652 const label_region *label_reg = as_a <const label_region *> (base_reg);
2653 tree label = label_reg->get_label ();
6341f14e
DM
2654 ctxt->warn (make_unique<write_to_const_diagnostic>
2655 (label_reg, label));
111fd515
DM
2656 }
2657 break;
3175d40f
DM
2658 case RK_DECL:
2659 {
2660 const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
2661 tree decl = decl_reg->get_decl ();
2662 /* Warn about writes to const globals.
2663 Don't warn for writes to const locals, and params in particular,
2664 since we would warn in push_frame when setting them up (e.g the
2665 "this" param is "T* const"). */
2666 if (TREE_READONLY (decl)
2667 && is_global_var (decl))
6341f14e 2668 ctxt->warn (make_unique<write_to_const_diagnostic> (dest_reg, decl));
3175d40f
DM
2669 }
2670 break;
2671 case RK_STRING:
6341f14e 2672 ctxt->warn (make_unique<write_to_string_literal_diagnostic> (dest_reg));
3175d40f
DM
2673 break;
2674 }
2675}
2676
9a2c9579
DM
2677/* Get the capacity of REG in bytes. */
2678
2679const svalue *
2680region_model::get_capacity (const region *reg) const
2681{
2682 switch (reg->get_kind ())
2683 {
2684 default:
2685 break;
2686 case RK_DECL:
2687 {
2688 const decl_region *decl_reg = as_a <const decl_region *> (reg);
2689 tree decl = decl_reg->get_decl ();
2690 if (TREE_CODE (decl) == SSA_NAME)
2691 {
2692 tree type = TREE_TYPE (decl);
2693 tree size = TYPE_SIZE (type);
2694 return get_rvalue (size, NULL);
2695 }
2696 else
2697 {
2698 tree size = decl_init_size (decl, false);
2699 if (size)
2700 return get_rvalue (size, NULL);
2701 }
2702 }
2703 break;
e61ffa20
DM
2704 case RK_SIZED:
2705 /* Look through sized regions to get at the capacity
2706 of the underlying regions. */
2707 return get_capacity (reg->get_parent_region ());
9a2c9579
DM
2708 }
2709
2710 if (const svalue *recorded = get_dynamic_extents (reg))
2711 return recorded;
2712
2713 return m_mgr->get_or_create_unknown_svalue (sizetype);
2714}
2715
0a9c0d4a
TL
2716/* Return the string size, including the 0-terminator, if SVAL is a
2717 constant_svalue holding a string. Otherwise, return an unknown_svalue. */
2718
2719const svalue *
2720region_model::get_string_size (const svalue *sval) const
2721{
2722 tree cst = sval->maybe_get_constant ();
2723 if (!cst || TREE_CODE (cst) != STRING_CST)
2724 return m_mgr->get_or_create_unknown_svalue (size_type_node);
2725
2726 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
2727 return m_mgr->get_or_create_constant_svalue (out);
2728}
2729
2730/* Return the string size, including the 0-terminator, if REG is a
2731 string_region. Otherwise, return an unknown_svalue. */
2732
2733const svalue *
2734region_model::get_string_size (const region *reg) const
2735{
2736 const string_region *str_reg = dyn_cast <const string_region *> (reg);
2737 if (!str_reg)
2738 return m_mgr->get_or_create_unknown_svalue (size_type_node);
2739
2740 tree cst = str_reg->get_string_cst ();
2741 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
2742 return m_mgr->get_or_create_constant_svalue (out);
2743}
2744
9faf8348
DM
2745/* If CTXT is non-NULL, use it to warn about any problems accessing REG,
2746 using DIR to determine if this access is a read or write. */
2747
2748void
2749region_model::check_region_access (const region *reg,
2750 enum access_direction dir,
2751 region_model_context *ctxt) const
2752{
2753 /* Fail gracefully if CTXT is NULL. */
2754 if (!ctxt)
2755 return;
2756
b9365b93 2757 check_region_for_taint (reg, dir, ctxt);
7e3b45be 2758 check_region_bounds (reg, dir, ctxt);
b9365b93 2759
9faf8348
DM
2760 switch (dir)
2761 {
2762 default:
2763 gcc_unreachable ();
2764 case DIR_READ:
2765 /* Currently a no-op. */
2766 break;
2767 case DIR_WRITE:
2768 check_for_writable_region (reg, ctxt);
2769 break;
2770 }
2771}
2772
2773/* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
2774
2775void
2776region_model::check_region_for_write (const region *dest_reg,
2777 region_model_context *ctxt) const
2778{
2779 check_region_access (dest_reg, DIR_WRITE, ctxt);
2780}
2781
2782/* If CTXT is non-NULL, use it to warn about any problems reading from REG. */
2783
2784void
2785region_model::check_region_for_read (const region *src_reg,
2786 region_model_context *ctxt) const
2787{
2788 check_region_access (src_reg, DIR_READ, ctxt);
2789}
2790
e6c3bb37
TL
2791/* Concrete subclass for casts of pointers that lead to trailing bytes. */
2792
2793class dubious_allocation_size
2794: public pending_diagnostic_subclass<dubious_allocation_size>
2795{
2796public:
2797 dubious_allocation_size (const region *lhs, const region *rhs)
f5758fe5
DM
2798 : m_lhs (lhs), m_rhs (rhs), m_expr (NULL_TREE),
2799 m_has_allocation_event (false)
e6c3bb37
TL
2800 {}
2801
2802 dubious_allocation_size (const region *lhs, const region *rhs,
2803 tree expr)
f5758fe5
DM
2804 : m_lhs (lhs), m_rhs (rhs), m_expr (expr),
2805 m_has_allocation_event (false)
e6c3bb37
TL
2806 {}
2807
2808 const char *get_kind () const final override
2809 {
2810 return "dubious_allocation_size";
2811 }
2812
2813 bool operator== (const dubious_allocation_size &other) const
2814 {
2815 return m_lhs == other.m_lhs && m_rhs == other.m_rhs
2816 && pending_diagnostic::same_tree_p (m_expr, other.m_expr);
2817 }
2818
2819 int get_controlling_option () const final override
2820 {
2821 return OPT_Wanalyzer_allocation_size;
2822 }
2823
2824 bool emit (rich_location *rich_loc) final override
2825 {
2826 diagnostic_metadata m;
2827 m.add_cwe (131);
2828
2829 return warning_meta (rich_loc, m, get_controlling_option (),
c83e9731
TL
2830 "allocated buffer size is not a multiple"
2831 " of the pointee's size");
e6c3bb37
TL
2832 }
2833
e6c3bb37
TL
2834 label_text describe_final_event (const evdesc::final_event &ev) final
2835 override
2836 {
2837 tree pointee_type = TREE_TYPE (m_lhs->get_type ());
f5758fe5 2838 if (m_has_allocation_event)
e6c3bb37
TL
2839 return ev.formatted_print ("assigned to %qT here;"
2840 " %<sizeof (%T)%> is %qE",
2841 m_lhs->get_type (), pointee_type,
2842 size_in_bytes (pointee_type));
f5758fe5
DM
2843 /* Fallback: Typically, we should always see an allocation_event
2844 before. */
e6c3bb37
TL
2845 if (m_expr)
2846 {
2847 if (TREE_CODE (m_expr) == INTEGER_CST)
2848 return ev.formatted_print ("allocated %E bytes and assigned to"
2849 " %qT here; %<sizeof (%T)%> is %qE",
2850 m_expr, m_lhs->get_type (), pointee_type,
2851 size_in_bytes (pointee_type));
2852 else
2853 return ev.formatted_print ("allocated %qE bytes and assigned to"
2854 " %qT here; %<sizeof (%T)%> is %qE",
2855 m_expr, m_lhs->get_type (), pointee_type,
2856 size_in_bytes (pointee_type));
2857 }
2858
2859 return ev.formatted_print ("allocated and assigned to %qT here;"
2860 " %<sizeof (%T)%> is %qE",
2861 m_lhs->get_type (), pointee_type,
2862 size_in_bytes (pointee_type));
2863 }
2864
f5758fe5
DM
2865 void
2866 add_region_creation_events (const region *,
2867 tree capacity,
e24fe128 2868 const event_loc_info &loc_info,
f5758fe5
DM
2869 checker_path &emission_path) final override
2870 {
2871 emission_path.add_event
e24fe128 2872 (make_unique<region_creation_event_allocation_size> (capacity, loc_info));
f5758fe5
DM
2873
2874 m_has_allocation_event = true;
2875 }
2876
e6c3bb37
TL
2877 void mark_interesting_stuff (interesting_t *interest) final override
2878 {
2879 interest->add_region_creation (m_rhs);
2880 }
2881
2882private:
2883 const region *m_lhs;
2884 const region *m_rhs;
2885 const tree m_expr;
f5758fe5 2886 bool m_has_allocation_event;
e6c3bb37
TL
2887};
2888
2889/* Return true on dubious allocation sizes for constant sizes. */
2890
2891static bool
2892capacity_compatible_with_type (tree cst, tree pointee_size_tree,
2893 bool is_struct)
2894{
2895 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
2896 gcc_assert (TREE_CODE (pointee_size_tree) == INTEGER_CST);
2897
2898 unsigned HOST_WIDE_INT pointee_size = TREE_INT_CST_LOW (pointee_size_tree);
2899 unsigned HOST_WIDE_INT alloc_size = TREE_INT_CST_LOW (cst);
2900
2901 if (is_struct)
b4cc945c 2902 return alloc_size == 0 || alloc_size >= pointee_size;
e6c3bb37
TL
2903 return alloc_size % pointee_size == 0;
2904}
2905
2906static bool
2907capacity_compatible_with_type (tree cst, tree pointee_size_tree)
2908{
2909 return capacity_compatible_with_type (cst, pointee_size_tree, false);
2910}
2911
2912/* Checks whether SVAL could be a multiple of SIZE_CST.
2913
2914 It works by visiting all svalues inside SVAL until it reaches
2915 atomic nodes. From those, it goes back up again and adds each
2916 node that might be a multiple of SIZE_CST to the RESULT_SET. */
2917
2918class size_visitor : public visitor
2919{
2920public:
c83e9731
TL
2921 size_visitor (tree size_cst, const svalue *root_sval, constraint_manager *cm)
2922 : m_size_cst (size_cst), m_root_sval (root_sval), m_cm (cm)
e6c3bb37 2923 {
c83e9731 2924 m_root_sval->accept (this);
e6c3bb37
TL
2925 }
2926
2927 bool get_result ()
2928 {
c83e9731 2929 return result_set.contains (m_root_sval);
e6c3bb37
TL
2930 }
2931
2932 void visit_constant_svalue (const constant_svalue *sval) final override
2933 {
c83e9731 2934 check_constant (sval->get_constant (), sval);
e6c3bb37
TL
2935 }
2936
2937 void visit_unknown_svalue (const unknown_svalue *sval ATTRIBUTE_UNUSED)
2938 final override
2939 {
2940 result_set.add (sval);
2941 }
2942
2943 void visit_poisoned_svalue (const poisoned_svalue *sval ATTRIBUTE_UNUSED)
2944 final override
2945 {
2946 result_set.add (sval);
2947 }
2948
bdd385b2 2949 void visit_unaryop_svalue (const unaryop_svalue *sval) final override
e6c3bb37
TL
2950 {
2951 const svalue *arg = sval->get_arg ();
2952 if (result_set.contains (arg))
2953 result_set.add (sval);
2954 }
2955
2956 void visit_binop_svalue (const binop_svalue *sval) final override
2957 {
2958 const svalue *arg0 = sval->get_arg0 ();
2959 const svalue *arg1 = sval->get_arg1 ();
2960
2961 if (sval->get_op () == MULT_EXPR)
2962 {
2963 if (result_set.contains (arg0) || result_set.contains (arg1))
2964 result_set.add (sval);
2965 }
2966 else
2967 {
2968 if (result_set.contains (arg0) && result_set.contains (arg1))
2969 result_set.add (sval);
2970 }
2971 }
2972
bdd385b2 2973 void visit_repeated_svalue (const repeated_svalue *sval) final override
e6c3bb37
TL
2974 {
2975 sval->get_inner_svalue ()->accept (this);
2976 if (result_set.contains (sval->get_inner_svalue ()))
2977 result_set.add (sval);
2978 }
2979
2980 void visit_unmergeable_svalue (const unmergeable_svalue *sval) final override
2981 {
2982 sval->get_arg ()->accept (this);
2983 if (result_set.contains (sval->get_arg ()))
2984 result_set.add (sval);
2985 }
2986
2987 void visit_widening_svalue (const widening_svalue *sval) final override
2988 {
2989 const svalue *base = sval->get_base_svalue ();
2990 const svalue *iter = sval->get_iter_svalue ();
2991
2992 if (result_set.contains (base) && result_set.contains (iter))
2993 result_set.add (sval);
2994 }
2995
2996 void visit_conjured_svalue (const conjured_svalue *sval ATTRIBUTE_UNUSED)
2997 final override
2998 {
2999 equiv_class_id id (-1);
3000 if (m_cm->get_equiv_class_by_svalue (sval, &id))
3001 {
c83e9731
TL
3002 if (tree cst = id.get_obj (*m_cm).get_any_constant ())
3003 check_constant (cst, sval);
e6c3bb37 3004 else
c83e9731 3005 result_set.add (sval);
e6c3bb37
TL
3006 }
3007 }
3008
3009 void visit_asm_output_svalue (const asm_output_svalue *sval ATTRIBUTE_UNUSED)
3010 final override
3011 {
3012 result_set.add (sval);
3013 }
3014
3015 void visit_const_fn_result_svalue (const const_fn_result_svalue
3016 *sval ATTRIBUTE_UNUSED) final override
3017 {
3018 result_set.add (sval);
3019 }
3020
3021private:
c83e9731
TL
3022 void check_constant (tree cst, const svalue *sval)
3023 {
3024 switch (TREE_CODE (cst))
3025 {
3026 default:
3027 /* Assume all unhandled operands are compatible. */
3028 result_set.add (sval);
3029 break;
3030 case INTEGER_CST:
3031 if (capacity_compatible_with_type (cst, m_size_cst))
3032 result_set.add (sval);
3033 break;
3034 }
3035 }
3036
e6c3bb37 3037 tree m_size_cst;
c83e9731 3038 const svalue *m_root_sval;
e6c3bb37
TL
3039 constraint_manager *m_cm;
3040 svalue_set result_set; /* Used as a mapping of svalue*->bool. */
3041};
3042
3043/* Return true if a struct or union either uses the inheritance pattern,
3044 where the first field is a base struct, or the flexible array member
3045 pattern, where the last field is an array without a specified size. */
3046
3047static bool
3048struct_or_union_with_inheritance_p (tree struc)
3049{
3050 tree iter = TYPE_FIELDS (struc);
3051 if (iter == NULL_TREE)
3052 return false;
3053 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (iter)))
3054 return true;
3055
3056 tree last_field;
3057 while (iter != NULL_TREE)
3058 {
3059 last_field = iter;
3060 iter = DECL_CHAIN (iter);
3061 }
3062
3063 if (last_field != NULL_TREE
3064 && TREE_CODE (TREE_TYPE (last_field)) == ARRAY_TYPE)
3065 return true;
3066
3067 return false;
3068}
3069
3070/* Return true if the lhs and rhs of an assignment have different types. */
3071
3072static bool
3073is_any_cast_p (const gimple *stmt)
3074{
c83e9731 3075 if (const gassign *assign = dyn_cast <const gassign *> (stmt))
e6c3bb37
TL
3076 return gimple_assign_cast_p (assign)
3077 || !pending_diagnostic::same_tree_p (
3078 TREE_TYPE (gimple_assign_lhs (assign)),
3079 TREE_TYPE (gimple_assign_rhs1 (assign)));
c83e9731 3080 else if (const gcall *call = dyn_cast <const gcall *> (stmt))
e6c3bb37
TL
3081 {
3082 tree lhs = gimple_call_lhs (call);
3083 return lhs != NULL_TREE && !pending_diagnostic::same_tree_p (
3084 TREE_TYPE (gimple_call_lhs (call)),
3085 gimple_call_return_type (call));
3086 }
3087
3088 return false;
3089}
3090
3091/* On pointer assignments, check whether the buffer size of
3092 RHS_SVAL is compatible with the type of the LHS_REG.
3093 Use a non-null CTXT to report allocation size warnings. */
3094
3095void
3096region_model::check_region_size (const region *lhs_reg, const svalue *rhs_sval,
3097 region_model_context *ctxt) const
3098{
3099 if (!ctxt || ctxt->get_stmt () == NULL)
3100 return;
3101 /* Only report warnings on assignments that actually change the type. */
3102 if (!is_any_cast_p (ctxt->get_stmt ()))
3103 return;
3104
3105 const region_svalue *reg_sval = dyn_cast <const region_svalue *> (rhs_sval);
3106 if (!reg_sval)
3107 return;
3108
3109 tree pointer_type = lhs_reg->get_type ();
3110 if (pointer_type == NULL_TREE || !POINTER_TYPE_P (pointer_type))
3111 return;
3112
3113 tree pointee_type = TREE_TYPE (pointer_type);
3114 /* Make sure that the type on the left-hand size actually has a size. */
3115 if (pointee_type == NULL_TREE || VOID_TYPE_P (pointee_type)
3116 || TYPE_SIZE_UNIT (pointee_type) == NULL_TREE)
3117 return;
3118
3119 /* Bail out early on pointers to structs where we can
3120 not deduce whether the buffer size is compatible. */
3121 bool is_struct = RECORD_OR_UNION_TYPE_P (pointee_type);
3122 if (is_struct && struct_or_union_with_inheritance_p (pointee_type))
3123 return;
3124
3125 tree pointee_size_tree = size_in_bytes (pointee_type);
3126 /* We give up if the type size is not known at compile-time or the
3127 type size is always compatible regardless of the buffer size. */
3128 if (TREE_CODE (pointee_size_tree) != INTEGER_CST
3129 || integer_zerop (pointee_size_tree)
3130 || integer_onep (pointee_size_tree))
3131 return;
3132
3133 const region *rhs_reg = reg_sval->get_pointee ();
3134 const svalue *capacity = get_capacity (rhs_reg);
3135 switch (capacity->get_kind ())
3136 {
3137 case svalue_kind::SK_CONSTANT:
3138 {
3139 const constant_svalue *cst_cap_sval
c83e9731 3140 = as_a <const constant_svalue *> (capacity);
e6c3bb37 3141 tree cst_cap = cst_cap_sval->get_constant ();
c83e9731
TL
3142 if (TREE_CODE (cst_cap) == INTEGER_CST
3143 && !capacity_compatible_with_type (cst_cap, pointee_size_tree,
3144 is_struct))
6341f14e
DM
3145 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg, rhs_reg,
3146 cst_cap));
e6c3bb37
TL
3147 }
3148 break;
3149 default:
3150 {
3151 if (!is_struct)
3152 {
3153 size_visitor v (pointee_size_tree, capacity, m_constraints);
3154 if (!v.get_result ())
3155 {
3156 tree expr = get_representative_tree (capacity);
6341f14e
DM
3157 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg,
3158 rhs_reg,
3159 expr));
e6c3bb37
TL
3160 }
3161 }
3162 break;
3163 }
3164 }
3165}
3166
808f4dfe 3167/* Set the value of the region given by LHS_REG to the value given
9faf8348
DM
3168 by RHS_SVAL.
3169 Use CTXT to report any warnings associated with writing to LHS_REG. */
757bf1df 3170
808f4dfe
DM
3171void
3172region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
3175d40f 3173 region_model_context *ctxt)
757bf1df 3174{
808f4dfe
DM
3175 gcc_assert (lhs_reg);
3176 gcc_assert (rhs_sval);
3177
dfe2ef7f
DM
3178 /* Setting the value of an empty region is a no-op. */
3179 if (lhs_reg->empty_p ())
3180 return;
3181
e6c3bb37
TL
3182 check_region_size (lhs_reg, rhs_sval, ctxt);
3183
9faf8348 3184 check_region_for_write (lhs_reg, ctxt);
3175d40f 3185
808f4dfe 3186 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 3187 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
3188}
3189
808f4dfe 3190/* Set the value of the region given by LHS to the value given by RHS. */
757bf1df
DM
3191
3192void
808f4dfe 3193region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
757bf1df 3194{
808f4dfe
DM
3195 const region *lhs_reg = get_lvalue (lhs, ctxt);
3196 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
3197 gcc_assert (lhs_reg);
3198 gcc_assert (rhs_sval);
3199 set_value (lhs_reg, rhs_sval, ctxt);
757bf1df
DM
3200}
3201
808f4dfe 3202/* Remove all bindings overlapping REG within the store. */
884d9141
DM
3203
3204void
808f4dfe
DM
3205region_model::clobber_region (const region *reg)
3206{
3207 m_store.clobber_region (m_mgr->get_store_manager(), reg);
3208}
3209
3210/* Remove any bindings for REG within the store. */
3211
3212void
3213region_model::purge_region (const region *reg)
3214{
3215 m_store.purge_region (m_mgr->get_store_manager(), reg);
3216}
3217
e61ffa20
DM
3218/* Fill REG with SVAL. */
3219
3220void
3221region_model::fill_region (const region *reg, const svalue *sval)
3222{
3223 m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
3224}
3225
808f4dfe
DM
3226/* Zero-fill REG. */
3227
3228void
3229region_model::zero_fill_region (const region *reg)
3230{
3231 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
3232}
3233
3234/* Mark REG as having unknown content. */
3235
3236void
3a66c289
DM
3237region_model::mark_region_as_unknown (const region *reg,
3238 uncertainty_t *uncertainty)
884d9141 3239{
3a66c289
DM
3240 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
3241 uncertainty);
884d9141
DM
3242}
3243
808f4dfe 3244/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
757bf1df
DM
3245 this model. */
3246
3247tristate
808f4dfe
DM
3248region_model::eval_condition (const svalue *lhs,
3249 enum tree_code op,
3250 const svalue *rhs) const
757bf1df 3251{
757bf1df
DM
3252 gcc_assert (lhs);
3253 gcc_assert (rhs);
3254
808f4dfe
DM
3255 /* For now, make no attempt to capture constraints on floating-point
3256 values. */
3257 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
3258 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
3259 return tristate::unknown ();
3260
9bbcee45
DM
3261 /* See what we know based on the values. */
3262
808f4dfe
DM
3263 /* Unwrap any unmergeable values. */
3264 lhs = lhs->unwrap_any_unmergeable ();
3265 rhs = rhs->unwrap_any_unmergeable ();
3266
3267 if (lhs == rhs)
757bf1df 3268 {
808f4dfe
DM
3269 /* If we have the same svalue, then we have equality
3270 (apart from NaN-handling).
3271 TODO: should this definitely be the case for poisoned values? */
3272 /* Poisoned and unknown values are "unknowable". */
3273 if (lhs->get_kind () == SK_POISONED
3274 || lhs->get_kind () == SK_UNKNOWN)
3275 return tristate::TS_UNKNOWN;
e978955d 3276
808f4dfe 3277 switch (op)
757bf1df 3278 {
808f4dfe
DM
3279 case EQ_EXPR:
3280 case GE_EXPR:
3281 case LE_EXPR:
3282 return tristate::TS_TRUE;
07c86323 3283
808f4dfe
DM
3284 case NE_EXPR:
3285 case GT_EXPR:
3286 case LT_EXPR:
3287 return tristate::TS_FALSE;
3288
3289 default:
3290 /* For other ops, use the logic below. */
3291 break;
757bf1df 3292 }
808f4dfe 3293 }
757bf1df 3294
808f4dfe
DM
3295 /* If we have a pair of region_svalues, compare them. */
3296 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
3297 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
3298 {
3299 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
3300 if (res.is_known ())
3301 return res;
3302 /* Otherwise, only known through constraints. */
3303 }
757bf1df 3304
808f4dfe 3305 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
18faaeb3
DM
3306 {
3307 /* If we have a pair of constants, compare them. */
3308 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
3309 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
3310 else
3311 {
3312 /* When we have one constant, put it on the RHS. */
3313 std::swap (lhs, rhs);
3314 op = swap_tree_comparison (op);
3315 }
3316 }
3317 gcc_assert (lhs->get_kind () != SK_CONSTANT);
757bf1df 3318
e82e0f14
DM
3319 /* Handle comparison against zero. */
3320 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
3321 if (zerop (cst_rhs->get_constant ()))
3322 {
3323 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
3324 {
3325 /* A region_svalue is a non-NULL pointer, except in certain
3326 special cases (see the comment for region::non_null_p). */
3327 const region *pointee = ptr->get_pointee ();
3328 if (pointee->non_null_p ())
3329 {
3330 switch (op)
3331 {
3332 default:
3333 gcc_unreachable ();
3334
3335 case EQ_EXPR:
3336 case GE_EXPR:
3337 case LE_EXPR:
3338 return tristate::TS_FALSE;
3339
3340 case NE_EXPR:
3341 case GT_EXPR:
3342 case LT_EXPR:
3343 return tristate::TS_TRUE;
3344 }
3345 }
3346 }
3347 else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
3348 {
3349 /* Treat offsets from a non-NULL pointer as being non-NULL. This
3350 isn't strictly true, in that eventually ptr++ will wrap
3351 around and be NULL, but it won't occur in practise and thus
3352 can be used to suppress effectively false positives that we
3353 shouldn't warn for. */
3354 if (binop->get_op () == POINTER_PLUS_EXPR)
3355 {
9bbcee45 3356 tristate lhs_ts = eval_condition (binop->get_arg0 (), op, rhs);
e82e0f14
DM
3357 if (lhs_ts.is_known ())
3358 return lhs_ts;
3359 }
3360 }
0b737090
DM
3361 else if (const unaryop_svalue *unaryop
3362 = lhs->dyn_cast_unaryop_svalue ())
3363 {
3364 if (unaryop->get_op () == NEGATE_EXPR)
3365 {
3366 /* e.g. "-X <= 0" is equivalent to X >= 0". */
3367 tristate lhs_ts = eval_condition (unaryop->get_arg (),
3368 swap_tree_comparison (op),
3369 rhs);
3370 if (lhs_ts.is_known ())
3371 return lhs_ts;
3372 }
3373 }
e82e0f14 3374 }
808f4dfe
DM
3375
3376 /* Handle rejection of equality for comparisons of the initial values of
3377 "external" values (such as params) with the address of locals. */
3378 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
3379 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
3380 {
3381 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
3382 if (res.is_known ())
3383 return res;
3384 }
3385 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
3386 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
3387 {
3388 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
3389 if (res.is_known ())
3390 return res;
3391 }
3392
3393 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
3394 if (tree rhs_cst = rhs->maybe_get_constant ())
3395 {
3396 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
3397 if (res.is_known ())
3398 return res;
3399 }
3400
7a6564c9 3401 /* Handle comparisons between two svalues with more than one operand. */
9bbcee45 3402 if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
7a6564c9
TL
3403 {
3404 switch (op)
3405 {
3406 default:
3407 break;
3408 case EQ_EXPR:
3409 {
3410 /* TODO: binops can be equal even if they are not structurally
3411 equal in case of commutative operators. */
3412 tristate res = structural_equality (lhs, rhs);
3413 if (res.is_true ())
3414 return res;
3415 }
3416 break;
3417 case LE_EXPR:
3418 {
3419 tristate res = structural_equality (lhs, rhs);
3420 if (res.is_true ())
3421 return res;
3422 }
3423 break;
3424 case GE_EXPR:
3425 {
3426 tristate res = structural_equality (lhs, rhs);
3427 if (res.is_true ())
3428 return res;
3429 res = symbolic_greater_than (binop, rhs);
3430 if (res.is_true ())
3431 return res;
3432 }
3433 break;
3434 case GT_EXPR:
3435 {
3436 tristate res = symbolic_greater_than (binop, rhs);
3437 if (res.is_true ())
3438 return res;
3439 }
3440 break;
3441 }
3442 }
3443
9bbcee45
DM
3444 /* Otherwise, try constraints.
3445 Cast to const to ensure we don't change the constraint_manager as we
3446 do this (e.g. by creating equivalence classes). */
3447 const constraint_manager *constraints = m_constraints;
3448 return constraints->eval_condition (lhs, op, rhs);
808f4dfe
DM
3449}
3450
9bbcee45 3451/* Subroutine of region_model::eval_condition, for rejecting
808f4dfe
DM
3452 equality of INIT_VAL(PARM) with &LOCAL. */
3453
3454tristate
3455region_model::compare_initial_and_pointer (const initial_svalue *init,
3456 const region_svalue *ptr) const
3457{
3458 const region *pointee = ptr->get_pointee ();
3459
3460 /* If we have a pointer to something within a stack frame, it can't be the
3461 initial value of a param. */
3462 if (pointee->maybe_get_frame_region ())
e0139b2a
DM
3463 if (init->initial_value_of_param_p ())
3464 return tristate::TS_FALSE;
757bf1df
DM
3465
3466 return tristate::TS_UNKNOWN;
3467}
3468
7a6564c9
TL
3469/* Return true if SVAL is definitely positive. */
3470
3471static bool
3472is_positive_svalue (const svalue *sval)
3473{
3474 if (tree cst = sval->maybe_get_constant ())
3475 return !zerop (cst) && get_range_pos_neg (cst) == 1;
3476 tree type = sval->get_type ();
3477 if (!type)
3478 return false;
3479 /* Consider a binary operation size_t + int. The analyzer wraps the int in
3480 an unaryop_svalue, converting it to a size_t, but in the dynamic execution
3481 the result is smaller than the first operand. Thus, we have to look if
3482 the argument of the unaryop_svalue is also positive. */
3483 if (const unaryop_svalue *un_op = dyn_cast <const unaryop_svalue *> (sval))
3484 return CONVERT_EXPR_CODE_P (un_op->get_op ()) && TYPE_UNSIGNED (type)
3485 && is_positive_svalue (un_op->get_arg ());
3486 return TYPE_UNSIGNED (type);
3487}
3488
3489/* Return true if A is definitely larger than B.
3490
3491 Limitation: does not account for integer overflows and does not try to
3492 return false, so it can not be used negated. */
3493
3494tristate
3495region_model::symbolic_greater_than (const binop_svalue *bin_a,
3496 const svalue *b) const
3497{
3498 if (bin_a->get_op () == PLUS_EXPR || bin_a->get_op () == MULT_EXPR)
3499 {
3500 /* Eliminate the right-hand side of both svalues. */
3501 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
3502 if (bin_a->get_op () == bin_b->get_op ()
9bbcee45
DM
3503 && eval_condition (bin_a->get_arg1 (),
3504 GT_EXPR,
3505 bin_b->get_arg1 ()).is_true ()
3506 && eval_condition (bin_a->get_arg0 (),
3507 GE_EXPR,
3508 bin_b->get_arg0 ()).is_true ())
7a6564c9
TL
3509 return tristate (tristate::TS_TRUE);
3510
3511 /* Otherwise, try to remove a positive offset or factor from BIN_A. */
3512 if (is_positive_svalue (bin_a->get_arg1 ())
9bbcee45
DM
3513 && eval_condition (bin_a->get_arg0 (),
3514 GE_EXPR, b).is_true ())
7a6564c9
TL
3515 return tristate (tristate::TS_TRUE);
3516 }
3517 return tristate::unknown ();
3518}
3519
3520/* Return true if A and B are equal structurally.
3521
3522 Structural equality means that A and B are equal if the svalues A and B have
3523 the same nodes at the same positions in the tree and the leafs are equal.
3524 Equality for conjured_svalues and initial_svalues is determined by comparing
3525 the pointers while constants are compared by value. That behavior is useful
3526 to check for binaryop_svlaues that evaluate to the same concrete value but
3527 might use one operand with a different type but the same constant value.
3528
3529 For example,
3530 binop_svalue (mult_expr,
3531 initial_svalue (‘size_t’, decl_region (..., 'some_var')),
3532 constant_svalue (‘size_t’, 4))
3533 and
3534 binop_svalue (mult_expr,
3535 initial_svalue (‘size_t’, decl_region (..., 'some_var'),
3536 constant_svalue (‘sizetype’, 4))
3537 are structurally equal. A concrete C code example, where this occurs, can
3538 be found in test7 of out-of-bounds-5.c. */
3539
3540tristate
3541region_model::structural_equality (const svalue *a, const svalue *b) const
3542{
3543 /* If A and B are referentially equal, they are also structurally equal. */
3544 if (a == b)
3545 return tristate (tristate::TS_TRUE);
3546
3547 switch (a->get_kind ())
3548 {
3549 default:
3550 return tristate::unknown ();
3551 /* SK_CONJURED and SK_INITIAL are already handled
3552 by the referential equality above. */
3553 case SK_CONSTANT:
3554 {
3555 tree a_cst = a->maybe_get_constant ();
3556 tree b_cst = b->maybe_get_constant ();
3557 if (a_cst && b_cst)
3558 return tristate (tree_int_cst_equal (a_cst, b_cst));
3559 }
3560 return tristate (tristate::TS_FALSE);
3561 case SK_UNARYOP:
3562 {
3563 const unaryop_svalue *un_a = as_a <const unaryop_svalue *> (a);
3564 if (const unaryop_svalue *un_b = dyn_cast <const unaryop_svalue *> (b))
3565 return tristate (pending_diagnostic::same_tree_p (un_a->get_type (),
3566 un_b->get_type ())
3567 && un_a->get_op () == un_b->get_op ()
3568 && structural_equality (un_a->get_arg (),
3569 un_b->get_arg ()));
3570 }
3571 return tristate (tristate::TS_FALSE);
3572 case SK_BINOP:
3573 {
3574 const binop_svalue *bin_a = as_a <const binop_svalue *> (a);
3575 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
3576 return tristate (bin_a->get_op () == bin_b->get_op ()
3577 && structural_equality (bin_a->get_arg0 (),
3578 bin_b->get_arg0 ())
3579 && structural_equality (bin_a->get_arg1 (),
3580 bin_b->get_arg1 ()));
3581 }
3582 return tristate (tristate::TS_FALSE);
3583 }
3584}
3585
48e8a7a6
DM
3586/* Handle various constraints of the form:
3587 LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
3588 OP : == or !=
3589 RHS: zero
3590 and (with a cast):
3591 LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
3592 OP : == or !=
3593 RHS: zero
3594 by adding constraints for INNER_LHS INNEROP INNER_RHS.
3595
3596 Return true if this function can fully handle the constraint; if
3597 so, add the implied constraint(s) and write true to *OUT if they
3598 are consistent with existing constraints, or write false to *OUT
3599 if they contradicts existing constraints.
3600
3601 Return false for cases that this function doeesn't know how to handle.
3602
3603 For example, if we're checking a stored conditional, we'll have
3604 something like:
3605 LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
3606 OP : NE_EXPR
3607 RHS: zero
3608 which this function can turn into an add_constraint of:
3609 (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
3610
3611 Similarly, optimized && and || conditionals lead to e.g.
3612 if (p && q)
3613 becoming gimple like this:
3614 _1 = p_6 == 0B;
3615 _2 = q_8 == 0B
3616 _3 = _1 | _2
3617 On the "_3 is false" branch we can have constraints of the form:
3618 ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
3619 | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
3620 == 0
3621 which implies that both _1 and _2 are false,
3622 which this function can turn into a pair of add_constraints of
3623 (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
3624 and:
3625 (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
3626
3627bool
3628region_model::add_constraints_from_binop (const svalue *outer_lhs,
3629 enum tree_code outer_op,
3630 const svalue *outer_rhs,
3631 bool *out,
3632 region_model_context *ctxt)
3633{
3634 while (const svalue *cast = outer_lhs->maybe_undo_cast ())
3635 outer_lhs = cast;
3636 const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
3637 if (!binop_sval)
3638 return false;
3639 if (!outer_rhs->all_zeroes_p ())
3640 return false;
3641
3642 const svalue *inner_lhs = binop_sval->get_arg0 ();
3643 enum tree_code inner_op = binop_sval->get_op ();
3644 const svalue *inner_rhs = binop_sval->get_arg1 ();
3645
3646 if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
3647 return false;
3648
3649 /* We have either
3650 - "OUTER_LHS != false" (i.e. OUTER is true), or
3651 - "OUTER_LHS == false" (i.e. OUTER is false). */
3652 bool is_true = outer_op == NE_EXPR;
3653
3654 switch (inner_op)
3655 {
3656 default:
3657 return false;
3658
3659 case EQ_EXPR:
3660 case NE_EXPR:
3661 {
3662 /* ...and "(inner_lhs OP inner_rhs) == 0"
3663 then (inner_lhs OP inner_rhs) must have the same
3664 logical value as LHS. */
3665 if (!is_true)
3666 inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
3667 *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
3668 return true;
3669 }
3670 break;
3671
3672 case BIT_AND_EXPR:
3673 if (is_true)
3674 {
3675 /* ...and "(inner_lhs & inner_rhs) != 0"
3676 then both inner_lhs and inner_rhs must be true. */
3677 const svalue *false_sval
3678 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
3679 bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
3680 bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
3681 *out = sat1 && sat2;
3682 return true;
3683 }
3684 return false;
3685
3686 case BIT_IOR_EXPR:
3687 if (!is_true)
3688 {
3689 /* ...and "(inner_lhs | inner_rhs) == 0"
3690 i.e. "(inner_lhs | inner_rhs)" is false
3691 then both inner_lhs and inner_rhs must be false. */
3692 const svalue *false_sval
3693 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
3694 bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
3695 bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
3696 *out = sat1 && sat2;
3697 return true;
3698 }
3699 return false;
3700 }
3701}
3702
757bf1df
DM
3703/* Attempt to add the constraint "LHS OP RHS" to this region_model.
3704 If it is consistent with existing constraints, add it, and return true.
3705 Return false if it contradicts existing constraints.
3706 Use CTXT for reporting any diagnostics associated with the accesses. */
3707
3708bool
3709region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
3710 region_model_context *ctxt)
3711{
e978955d
DM
3712 /* For now, make no attempt to capture constraints on floating-point
3713 values. */
3714 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
3715 return true;
3716
808f4dfe
DM
3717 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
3718 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
757bf1df 3719
48e8a7a6
DM
3720 return add_constraint (lhs_sval, op, rhs_sval, ctxt);
3721}
3722
3723/* Attempt to add the constraint "LHS OP RHS" to this region_model.
3724 If it is consistent with existing constraints, add it, and return true.
3725 Return false if it contradicts existing constraints.
3726 Use CTXT for reporting any diagnostics associated with the accesses. */
3727
3728bool
3729region_model::add_constraint (const svalue *lhs,
3730 enum tree_code op,
3731 const svalue *rhs,
3732 region_model_context *ctxt)
3733{
3734 tristate t_cond = eval_condition (lhs, op, rhs);
757bf1df
DM
3735
3736 /* If we already have the condition, do nothing. */
3737 if (t_cond.is_true ())
3738 return true;
3739
3740 /* Reject a constraint that would contradict existing knowledge, as
3741 unsatisfiable. */
3742 if (t_cond.is_false ())
3743 return false;
3744
48e8a7a6
DM
3745 bool out;
3746 if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
3747 return out;
757bf1df 3748
c4b8f373
DM
3749 /* Attempt to store the constraint. */
3750 if (!m_constraints->add_constraint (lhs, op, rhs))
3751 return false;
757bf1df
DM
3752
3753 /* Notify the context, if any. This exists so that the state machines
3754 in a program_state can be notified about the condition, and so can
3755 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
3756 when synthesizing constraints as above. */
3757 if (ctxt)
3758 ctxt->on_condition (lhs, op, rhs);
3759
9a2c9579
DM
3760 /* If we have &REGION == NULL, then drop dynamic extents for REGION (for
3761 the case where REGION is heap-allocated and thus could be NULL). */
48e8a7a6
DM
3762 if (tree rhs_cst = rhs->maybe_get_constant ())
3763 if (op == EQ_EXPR && zerop (rhs_cst))
3764 if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
3765 unset_dynamic_extents (region_sval->get_pointee ());
9a2c9579 3766
757bf1df
DM
3767 return true;
3768}
3769
84fb3546
DM
3770/* As above, but when returning false, if OUT is non-NULL, write a
3771 new rejected_constraint to *OUT. */
3772
3773bool
3774region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
3775 region_model_context *ctxt,
3776 rejected_constraint **out)
3777{
3778 bool sat = add_constraint (lhs, op, rhs, ctxt);
3779 if (!sat && out)
8ca7fa84 3780 *out = new rejected_op_constraint (*this, lhs, op, rhs);
84fb3546
DM
3781 return sat;
3782}
3783
757bf1df
DM
3784/* Determine what is known about the condition "LHS OP RHS" within
3785 this model.
3786 Use CTXT for reporting any diagnostics associated with the accesses. */
3787
3788tristate
3789region_model::eval_condition (tree lhs,
3790 enum tree_code op,
3791 tree rhs,
5c6546ca 3792 region_model_context *ctxt) const
757bf1df 3793{
e978955d
DM
3794 /* For now, make no attempt to model constraints on floating-point
3795 values. */
3796 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
3797 return tristate::unknown ();
3798
757bf1df
DM
3799 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
3800}
3801
467a4820
DM
3802/* Implementation of region_model::get_representative_path_var.
3803 Attempt to return a path_var that represents SVAL, or return NULL_TREE.
808f4dfe
DM
3804 Use VISITED to prevent infinite mutual recursion with the overload for
3805 regions. */
757bf1df 3806
808f4dfe 3807path_var
467a4820
DM
3808region_model::get_representative_path_var_1 (const svalue *sval,
3809 svalue_set *visited) const
757bf1df 3810{
467a4820 3811 gcc_assert (sval);
757bf1df 3812
808f4dfe
DM
3813 /* Prevent infinite recursion. */
3814 if (visited->contains (sval))
3815 return path_var (NULL_TREE, 0);
3816 visited->add (sval);
757bf1df 3817
467a4820
DM
3818 /* Handle casts by recursion into get_representative_path_var. */
3819 if (const svalue *cast_sval = sval->maybe_undo_cast ())
3820 {
3821 path_var result = get_representative_path_var (cast_sval, visited);
3822 tree orig_type = sval->get_type ();
3823 /* If necessary, wrap the result in a cast. */
3824 if (result.m_tree && orig_type)
3825 result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
3826 return result;
3827 }
3828
808f4dfe
DM
3829 auto_vec<path_var> pvs;
3830 m_store.get_representative_path_vars (this, visited, sval, &pvs);
757bf1df 3831
808f4dfe
DM
3832 if (tree cst = sval->maybe_get_constant ())
3833 pvs.safe_push (path_var (cst, 0));
757bf1df 3834
90f7c300 3835 /* Handle string literals and various other pointers. */
808f4dfe
DM
3836 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
3837 {
3838 const region *reg = ptr_sval->get_pointee ();
3839 if (path_var pv = get_representative_path_var (reg, visited))
3840 return path_var (build1 (ADDR_EXPR,
467a4820 3841 sval->get_type (),
808f4dfe
DM
3842 pv.m_tree),
3843 pv.m_stack_depth);
3844 }
3845
3846 /* If we have a sub_svalue, look for ways to represent the parent. */
3847 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
90f7c300 3848 {
808f4dfe
DM
3849 const svalue *parent_sval = sub_sval->get_parent ();
3850 const region *subreg = sub_sval->get_subregion ();
3851 if (path_var parent_pv
3852 = get_representative_path_var (parent_sval, visited))
3853 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
3854 return path_var (build3 (COMPONENT_REF,
3855 sval->get_type (),
3856 parent_pv.m_tree,
3857 field_reg->get_field (),
3858 NULL_TREE),
3859 parent_pv.m_stack_depth);
90f7c300
DM
3860 }
3861
b9365b93
DM
3862 /* Handle binops. */
3863 if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
3864 if (path_var lhs_pv
3865 = get_representative_path_var (binop_sval->get_arg0 (), visited))
3866 if (path_var rhs_pv
3867 = get_representative_path_var (binop_sval->get_arg1 (), visited))
3868 return path_var (build2 (binop_sval->get_op (),
3869 sval->get_type (),
3870 lhs_pv.m_tree, rhs_pv.m_tree),
3871 lhs_pv.m_stack_depth);
3872
808f4dfe
DM
3873 if (pvs.length () < 1)
3874 return path_var (NULL_TREE, 0);
3875
3876 pvs.qsort (readability_comparator);
3877 return pvs[0];
757bf1df
DM
3878}
3879
467a4820
DM
3880/* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
3881 Use VISITED to prevent infinite mutual recursion with the overload for
3882 regions
3883
3884 This function defers to get_representative_path_var_1 to do the work;
3885 it adds verification that get_representative_path_var_1 returned a tree
3886 of the correct type. */
3887
3888path_var
3889region_model::get_representative_path_var (const svalue *sval,
3890 svalue_set *visited) const
3891{
3892 if (sval == NULL)
3893 return path_var (NULL_TREE, 0);
3894
3895 tree orig_type = sval->get_type ();
3896
3897 path_var result = get_representative_path_var_1 (sval, visited);
3898
3899 /* Verify that the result has the same type as SVAL, if any. */
3900 if (result.m_tree && orig_type)
3901 gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
3902
3903 return result;
3904}
3905
3906/* Attempt to return a tree that represents SVAL, or return NULL_TREE.
3907
3908 Strip off any top-level cast, to avoid messages like
3909 double-free of '(void *)ptr'
3910 from analyzer diagnostics. */
757bf1df 3911
808f4dfe
DM
3912tree
3913region_model::get_representative_tree (const svalue *sval) const
757bf1df 3914{
808f4dfe 3915 svalue_set visited;
467a4820
DM
3916 tree expr = get_representative_path_var (sval, &visited).m_tree;
3917
3918 /* Strip off any top-level cast. */
7e3b45be
TL
3919 if (expr && TREE_CODE (expr) == NOP_EXPR)
3920 expr = TREE_OPERAND (expr, 0);
3921
3922 return fixup_tree_for_diagnostic (expr);
3923}
3924
3925tree
3926region_model::get_representative_tree (const region *reg) const
3927{
3928 svalue_set visited;
3929 tree expr = get_representative_path_var (reg, &visited).m_tree;
3930
3931 /* Strip off any top-level cast. */
467a4820 3932 if (expr && TREE_CODE (expr) == NOP_EXPR)
e4bb1bd6 3933 expr = TREE_OPERAND (expr, 0);
467a4820 3934
e4bb1bd6 3935 return fixup_tree_for_diagnostic (expr);
808f4dfe
DM
3936}
3937
467a4820
DM
3938/* Implementation of region_model::get_representative_path_var.
3939
3940 Attempt to return a path_var that represents REG, or return
808f4dfe
DM
3941 the NULL path_var.
3942 For example, a region for a field of a local would be a path_var
3943 wrapping a COMPONENT_REF.
3944 Use VISITED to prevent infinite mutual recursion with the overload for
3945 svalues. */
757bf1df 3946
808f4dfe 3947path_var
467a4820
DM
3948region_model::get_representative_path_var_1 (const region *reg,
3949 svalue_set *visited) const
808f4dfe
DM
3950{
3951 switch (reg->get_kind ())
757bf1df 3952 {
808f4dfe
DM
3953 default:
3954 gcc_unreachable ();
e516294a 3955
808f4dfe
DM
3956 case RK_FRAME:
3957 case RK_GLOBALS:
3958 case RK_CODE:
3959 case RK_HEAP:
3960 case RK_STACK:
358dab90 3961 case RK_THREAD_LOCAL:
808f4dfe
DM
3962 case RK_ROOT:
3963 /* Regions that represent memory spaces are not expressible as trees. */
3964 return path_var (NULL_TREE, 0);
757bf1df 3965
808f4dfe 3966 case RK_FUNCTION:
884d9141 3967 {
808f4dfe
DM
3968 const function_region *function_reg
3969 = as_a <const function_region *> (reg);
3970 return path_var (function_reg->get_fndecl (), 0);
884d9141 3971 }
808f4dfe 3972 case RK_LABEL:
9e78634c
DM
3973 {
3974 const label_region *label_reg = as_a <const label_region *> (reg);
3975 return path_var (label_reg->get_label (), 0);
3976 }
90f7c300 3977
808f4dfe
DM
3978 case RK_SYMBOLIC:
3979 {
3980 const symbolic_region *symbolic_reg
3981 = as_a <const symbolic_region *> (reg);
3982 const svalue *pointer = symbolic_reg->get_pointer ();
3983 path_var pointer_pv = get_representative_path_var (pointer, visited);
3984 if (!pointer_pv)
3985 return path_var (NULL_TREE, 0);
3986 tree offset = build_int_cst (pointer->get_type (), 0);
3987 return path_var (build2 (MEM_REF,
3988 reg->get_type (),
3989 pointer_pv.m_tree,
3990 offset),
3991 pointer_pv.m_stack_depth);
3992 }
3993 case RK_DECL:
3994 {
3995 const decl_region *decl_reg = as_a <const decl_region *> (reg);
3996 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
3997 }
3998 case RK_FIELD:
3999 {
4000 const field_region *field_reg = as_a <const field_region *> (reg);
4001 path_var parent_pv
4002 = get_representative_path_var (reg->get_parent_region (), visited);
4003 if (!parent_pv)
4004 return path_var (NULL_TREE, 0);
4005 return path_var (build3 (COMPONENT_REF,
4006 reg->get_type (),
4007 parent_pv.m_tree,
4008 field_reg->get_field (),
4009 NULL_TREE),
4010 parent_pv.m_stack_depth);
4011 }
757bf1df 4012
808f4dfe
DM
4013 case RK_ELEMENT:
4014 {
4015 const element_region *element_reg
4016 = as_a <const element_region *> (reg);
4017 path_var parent_pv
4018 = get_representative_path_var (reg->get_parent_region (), visited);
4019 if (!parent_pv)
4020 return path_var (NULL_TREE, 0);
4021 path_var index_pv
4022 = get_representative_path_var (element_reg->get_index (), visited);
4023 if (!index_pv)
4024 return path_var (NULL_TREE, 0);
4025 return path_var (build4 (ARRAY_REF,
4026 reg->get_type (),
4027 parent_pv.m_tree, index_pv.m_tree,
4028 NULL_TREE, NULL_TREE),
4029 parent_pv.m_stack_depth);
4030 }
757bf1df 4031
808f4dfe 4032 case RK_OFFSET:
757bf1df 4033 {
808f4dfe
DM
4034 const offset_region *offset_reg
4035 = as_a <const offset_region *> (reg);
4036 path_var parent_pv
4037 = get_representative_path_var (reg->get_parent_region (), visited);
4038 if (!parent_pv)
4039 return path_var (NULL_TREE, 0);
4040 path_var offset_pv
4041 = get_representative_path_var (offset_reg->get_byte_offset (),
4042 visited);
29f5db8e 4043 if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
808f4dfe 4044 return path_var (NULL_TREE, 0);
29f5db8e
DM
4045 tree addr_parent = build1 (ADDR_EXPR,
4046 build_pointer_type (reg->get_type ()),
4047 parent_pv.m_tree);
808f4dfe
DM
4048 return path_var (build2 (MEM_REF,
4049 reg->get_type (),
29f5db8e 4050 addr_parent, offset_pv.m_tree),
808f4dfe 4051 parent_pv.m_stack_depth);
757bf1df 4052 }
757bf1df 4053
e61ffa20
DM
4054 case RK_SIZED:
4055 return path_var (NULL_TREE, 0);
4056
808f4dfe
DM
4057 case RK_CAST:
4058 {
4059 path_var parent_pv
4060 = get_representative_path_var (reg->get_parent_region (), visited);
4061 if (!parent_pv)
4062 return path_var (NULL_TREE, 0);
4063 return path_var (build1 (NOP_EXPR,
4064 reg->get_type (),
4065 parent_pv.m_tree),
4066 parent_pv.m_stack_depth);
4067 }
757bf1df 4068
808f4dfe
DM
4069 case RK_HEAP_ALLOCATED:
4070 case RK_ALLOCA:
4071 /* No good way to express heap-allocated/alloca regions as trees. */
4072 return path_var (NULL_TREE, 0);
757bf1df 4073
808f4dfe
DM
4074 case RK_STRING:
4075 {
4076 const string_region *string_reg = as_a <const string_region *> (reg);
4077 return path_var (string_reg->get_string_cst (), 0);
4078 }
757bf1df 4079
2402dc6b 4080 case RK_VAR_ARG:
358dab90 4081 case RK_ERRNO:
808f4dfe
DM
4082 case RK_UNKNOWN:
4083 return path_var (NULL_TREE, 0);
4084 }
757bf1df
DM
4085}
4086
467a4820
DM
4087/* Attempt to return a path_var that represents REG, or return
4088 the NULL path_var.
4089 For example, a region for a field of a local would be a path_var
4090 wrapping a COMPONENT_REF.
4091 Use VISITED to prevent infinite mutual recursion with the overload for
4092 svalues.
4093
4094 This function defers to get_representative_path_var_1 to do the work;
4095 it adds verification that get_representative_path_var_1 returned a tree
4096 of the correct type. */
4097
4098path_var
4099region_model::get_representative_path_var (const region *reg,
4100 svalue_set *visited) const
4101{
4102 path_var result = get_representative_path_var_1 (reg, visited);
4103
4104 /* Verify that the result has the same type as REG, if any. */
4105 if (result.m_tree && reg->get_type ())
4106 gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
4107
4108 return result;
4109}
4110
757bf1df
DM
4111/* Update this model for any phis in SNODE, assuming we came from
4112 LAST_CFG_SUPEREDGE. */
4113
4114void
4115region_model::update_for_phis (const supernode *snode,
4116 const cfg_superedge *last_cfg_superedge,
4117 region_model_context *ctxt)
4118{
4119 gcc_assert (last_cfg_superedge);
4120
e0a7a675
DM
4121 /* Copy this state and pass it to handle_phi so that all of the phi stmts
4122 are effectively handled simultaneously. */
4123 const region_model old_state (*this);
4124
757bf1df
DM
4125 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
4126 !gsi_end_p (gpi); gsi_next (&gpi))
4127 {
4128 gphi *phi = gpi.phi ();
4129
4130 tree src = last_cfg_superedge->get_phi_arg (phi);
4131 tree lhs = gimple_phi_result (phi);
4132
e0a7a675
DM
4133 /* Update next_state based on phi and old_state. */
4134 handle_phi (phi, lhs, src, old_state, ctxt);
757bf1df
DM
4135 }
4136}
4137
4138/* Attempt to update this model for taking EDGE (where the last statement
4139 was LAST_STMT), returning true if the edge can be taken, false
4140 otherwise.
84fb3546
DM
4141 When returning false, if OUT is non-NULL, write a new rejected_constraint
4142 to it.
757bf1df
DM
4143
4144 For CFG superedges where LAST_STMT is a conditional or a switch
4145 statement, attempt to add the relevant conditions for EDGE to this
4146 model, returning true if they are feasible, or false if they are
4147 impossible.
4148
4149 For call superedges, push frame information and store arguments
4150 into parameters.
4151
4152 For return superedges, pop frame information and store return
4153 values into any lhs.
4154
4155 Rejection of call/return superedges happens elsewhere, in
4156 program_point::on_edge (i.e. based on program point, rather
4157 than program state). */
4158
4159bool
4160region_model::maybe_update_for_edge (const superedge &edge,
4161 const gimple *last_stmt,
84fb3546
DM
4162 region_model_context *ctxt,
4163 rejected_constraint **out)
757bf1df
DM
4164{
4165 /* Handle frame updates for interprocedural edges. */
4166 switch (edge.m_kind)
4167 {
4168 default:
4169 break;
4170
4171 case SUPEREDGE_CALL:
4172 {
4173 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
4174 update_for_call_superedge (*call_edge, ctxt);
4175 }
4176 break;
4177
4178 case SUPEREDGE_RETURN:
4179 {
4180 const return_superedge *return_edge
4181 = as_a <const return_superedge *> (&edge);
4182 update_for_return_superedge (*return_edge, ctxt);
4183 }
4184 break;
4185
4186 case SUPEREDGE_INTRAPROCEDURAL_CALL:
bfca9505
DM
4187 /* This is a no-op for call summaries; we should already
4188 have handled the effect of the call summary at the call stmt. */
757bf1df
DM
4189 break;
4190 }
4191
4192 if (last_stmt == NULL)
4193 return true;
4194
4195 /* Apply any constraints for conditionals/switch statements. */
4196
4197 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
4198 {
4199 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
84fb3546 4200 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out);
757bf1df
DM
4201 }
4202
4203 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
4204 {
4205 const switch_cfg_superedge *switch_sedge
4206 = as_a <const switch_cfg_superedge *> (&edge);
84fb3546
DM
4207 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt,
4208 ctxt, out);
757bf1df
DM
4209 }
4210
1690a839
DM
4211 /* Apply any constraints due to an exception being thrown. */
4212 if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge))
4213 if (cfg_sedge->get_flags () & EDGE_EH)
84fb3546 4214 return apply_constraints_for_exception (last_stmt, ctxt, out);
1690a839 4215
757bf1df
DM
4216 return true;
4217}
4218
4219/* Push a new frame_region on to the stack region.
4220 Populate the frame_region with child regions for the function call's
4221 parameters, using values from the arguments at the callsite in the
4222 caller's frame. */
4223
4224void
aef703cf 4225region_model::update_for_gcall (const gcall *call_stmt,
e92d0ff6
AS
4226 region_model_context *ctxt,
4227 function *callee)
757bf1df 4228{
808f4dfe 4229 /* Build a vec of argument svalues, using the current top
757bf1df 4230 frame for resolving tree expressions. */
808f4dfe 4231 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
757bf1df
DM
4232
4233 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
4234 {
4235 tree arg = gimple_call_arg (call_stmt, i);
808f4dfe 4236 arg_svals.quick_push (get_rvalue (arg, ctxt));
757bf1df
DM
4237 }
4238
e92d0ff6
AS
4239 if(!callee)
4240 {
4241 /* Get the function * from the gcall. */
4242 tree fn_decl = get_fndecl_for_call (call_stmt,ctxt);
4243 callee = DECL_STRUCT_FUNCTION (fn_decl);
4244 }
4245
4246 push_frame (callee, &arg_svals, ctxt);
757bf1df
DM
4247}
4248
a96f1c38
DM
4249/* Pop the top-most frame_region from the stack, and copy the return
4250 region's values (if any) into the region for the lvalue of the LHS of
757bf1df 4251 the call (if any). */
aef703cf 4252
757bf1df 4253void
aef703cf
AS
4254region_model::update_for_return_gcall (const gcall *call_stmt,
4255 region_model_context *ctxt)
757bf1df 4256{
4cebae09
DM
4257 /* Get the lvalue for the result of the call, passing it to pop_frame,
4258 so that pop_frame can determine the region with respect to the
4259 *caller* frame. */
757bf1df 4260 tree lhs = gimple_call_lhs (call_stmt);
4cebae09 4261 pop_frame (lhs, NULL, ctxt);
757bf1df
DM
4262}
4263
aef703cf
AS
4264/* Extract calling information from the superedge and update the model for the
4265 call */
4266
4267void
4268region_model::update_for_call_superedge (const call_superedge &call_edge,
4269 region_model_context *ctxt)
4270{
4271 const gcall *call_stmt = call_edge.get_call_stmt ();
e92d0ff6 4272 update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ());
aef703cf
AS
4273}
4274
4275/* Extract calling information from the return superedge and update the model
4276 for the returning call */
4277
4278void
4279region_model::update_for_return_superedge (const return_superedge &return_edge,
4280 region_model_context *ctxt)
4281{
4282 const gcall *call_stmt = return_edge.get_call_stmt ();
4283 update_for_return_gcall (call_stmt, ctxt);
4284}
4285
bfca9505
DM
4286/* Attempt to to use R to replay SUMMARY into this object.
4287 Return true if it is possible. */
757bf1df 4288
bfca9505
DM
4289bool
4290region_model::replay_call_summary (call_summary_replay &r,
4291 const region_model &summary)
757bf1df 4292{
bfca9505
DM
4293 gcc_assert (summary.get_stack_depth () == 1);
4294
4295 m_store.replay_call_summary (r, summary.m_store);
757bf1df 4296
bfca9505
DM
4297 if (!m_constraints->replay_call_summary (r, *summary.m_constraints))
4298 return false;
4299
4300 for (auto kv : summary.m_dynamic_extents)
4301 {
4302 const region *summary_reg = kv.first;
4303 const region *caller_reg = r.convert_region_from_summary (summary_reg);
4304 if (!caller_reg)
4305 continue;
4306 const svalue *summary_sval = kv.second;
4307 const svalue *caller_sval = r.convert_svalue_from_summary (summary_sval);
4308 if (!caller_sval)
4309 continue;
4310 m_dynamic_extents.put (caller_reg, caller_sval);
4311 }
4312
4313 return true;
757bf1df
DM
4314}
4315
4316/* Given a true or false edge guarded by conditional statement COND_STMT,
4317 determine appropriate constraints for the edge to be taken.
4318
4319 If they are feasible, add the constraints and return true.
4320
4321 Return false if the constraints contradict existing knowledge
84fb3546
DM
4322 (and so the edge should not be taken).
4323 When returning false, if OUT is non-NULL, write a new rejected_constraint
4324 to it. */
757bf1df
DM
4325
4326bool
4327region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
4328 const gcond *cond_stmt,
84fb3546
DM
4329 region_model_context *ctxt,
4330 rejected_constraint **out)
757bf1df
DM
4331{
4332 ::edge cfg_edge = sedge.get_cfg_edge ();
4333 gcc_assert (cfg_edge != NULL);
4334 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
4335
4336 enum tree_code op = gimple_cond_code (cond_stmt);
4337 tree lhs = gimple_cond_lhs (cond_stmt);
4338 tree rhs = gimple_cond_rhs (cond_stmt);
4339 if (cfg_edge->flags & EDGE_FALSE_VALUE)
4340 op = invert_tree_comparison (op, false /* honor_nans */);
84fb3546 4341 return add_constraint (lhs, op, rhs, ctxt, out);
757bf1df
DM
4342}
4343
4344/* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
4345 for the edge to be taken.
4346
4347 If they are feasible, add the constraints and return true.
4348
4349 Return false if the constraints contradict existing knowledge
84fb3546
DM
4350 (and so the edge should not be taken).
4351 When returning false, if OUT is non-NULL, write a new rejected_constraint
4352 to it. */
757bf1df
DM
4353
4354bool
4355region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
4356 const gswitch *switch_stmt,
84fb3546
DM
4357 region_model_context *ctxt,
4358 rejected_constraint **out)
757bf1df 4359{
8ca7fa84
DM
4360 bounded_ranges_manager *ranges_mgr = get_range_manager ();
4361 const bounded_ranges *all_cases_ranges
4362 = ranges_mgr->get_or_create_ranges_for_switch (&edge, switch_stmt);
757bf1df 4363 tree index = gimple_switch_index (switch_stmt);
8ca7fa84
DM
4364 const svalue *index_sval = get_rvalue (index, ctxt);
4365 bool sat = m_constraints->add_bounded_ranges (index_sval, all_cases_ranges);
4366 if (!sat && out)
4367 *out = new rejected_ranges_constraint (*this, index, all_cases_ranges);
2c044ff1
DM
4368 if (sat && ctxt && !all_cases_ranges->empty_p ())
4369 ctxt->on_bounded_ranges (*index_sval, *all_cases_ranges);
8ca7fa84 4370 return sat;
757bf1df
DM
4371}
4372
1690a839
DM
4373/* Apply any constraints due to an exception being thrown at LAST_STMT.
4374
4375 If they are feasible, add the constraints and return true.
4376
4377 Return false if the constraints contradict existing knowledge
84fb3546
DM
4378 (and so the edge should not be taken).
4379 When returning false, if OUT is non-NULL, write a new rejected_constraint
4380 to it. */
1690a839
DM
4381
4382bool
4383region_model::apply_constraints_for_exception (const gimple *last_stmt,
84fb3546
DM
4384 region_model_context *ctxt,
4385 rejected_constraint **out)
1690a839
DM
4386{
4387 gcc_assert (last_stmt);
4388 if (const gcall *call = dyn_cast <const gcall *> (last_stmt))
4389 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
4390 if (is_named_call_p (callee_fndecl, "operator new", call, 1)
4391 || is_named_call_p (callee_fndecl, "operator new []", call, 1))
4392 {
4393 /* We have an exception thrown from operator new.
4394 Add a constraint that the result was NULL, to avoid a false
4395 leak report due to the result being lost when following
4396 the EH edge. */
4397 if (tree lhs = gimple_call_lhs (call))
84fb3546 4398 return add_constraint (lhs, EQ_EXPR, null_pointer_node, ctxt, out);
1690a839
DM
4399 return true;
4400 }
4401 return true;
4402}
4403
808f4dfe
DM
4404/* For use with push_frame when handling a top-level call within the analysis.
4405 PARAM has a defined but unknown initial value.
4406 Anything it points to has escaped, since the calling context "knows"
4407 the pointer, and thus calls to unknown functions could read/write into
dcfc7ac9
DM
4408 the region.
4409 If NONNULL is true, then assume that PARAM must be non-NULL. */
757bf1df
DM
4410
4411void
808f4dfe 4412region_model::on_top_level_param (tree param,
dcfc7ac9
DM
4413 bool nonnull,
4414 region_model_context *ctxt)
757bf1df 4415{
808f4dfe 4416 if (POINTER_TYPE_P (TREE_TYPE (param)))
5eae0ac7 4417 {
808f4dfe
DM
4418 const region *param_reg = get_lvalue (param, ctxt);
4419 const svalue *init_ptr_sval
4420 = m_mgr->get_or_create_initial_value (param_reg);
4421 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
4422 m_store.mark_as_escaped (pointee_reg);
dcfc7ac9
DM
4423 if (nonnull)
4424 {
4425 const svalue *null_ptr_sval
4426 = m_mgr->get_or_create_null_ptr (TREE_TYPE (param));
4427 add_constraint (init_ptr_sval, NE_EXPR, null_ptr_sval, ctxt);
4428 }
5eae0ac7 4429 }
757bf1df
DM
4430}
4431
808f4dfe
DM
4432/* Update this region_model to reflect pushing a frame onto the stack
4433 for a call to FUN.
757bf1df 4434
808f4dfe
DM
4435 If ARG_SVALS is non-NULL, use it to populate the parameters
4436 in the new frame.
4437 Otherwise, the params have their initial_svalues.
757bf1df 4438
808f4dfe 4439 Return the frame_region for the new frame. */
757bf1df 4440
808f4dfe
DM
4441const region *
4442region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
4443 region_model_context *ctxt)
757bf1df 4444{
808f4dfe
DM
4445 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
4446 if (arg_svals)
757bf1df 4447 {
808f4dfe
DM
4448 /* Arguments supplied from a caller frame. */
4449 tree fndecl = fun->decl;
4450 unsigned idx = 0;
4451 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
4452 iter_parm = DECL_CHAIN (iter_parm), ++idx)
757bf1df 4453 {
808f4dfe
DM
4454 /* If there's a mismatching declaration, the call stmt might
4455 not have enough args. Handle this case by leaving the
4456 rest of the params as uninitialized. */
4457 if (idx >= arg_svals->length ())
4458 break;
294b6da2
DM
4459 tree parm_lval = iter_parm;
4460 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
4461 parm_lval = parm_default_ssa;
4462 const region *parm_reg = get_lvalue (parm_lval, ctxt);
808f4dfe 4463 const svalue *arg_sval = (*arg_svals)[idx];
808f4dfe 4464 set_value (parm_reg, arg_sval, ctxt);
757bf1df 4465 }
2402dc6b
DM
4466
4467 /* Handle any variadic args. */
4468 unsigned va_arg_idx = 0;
4469 for (; idx < arg_svals->length (); idx++, va_arg_idx++)
4470 {
4471 const svalue *arg_sval = (*arg_svals)[idx];
4472 const region *var_arg_reg
4473 = m_mgr->get_var_arg_region (m_current_frame,
4474 va_arg_idx);
4475 set_value (var_arg_reg, arg_sval, ctxt);
4476 }
757bf1df 4477 }
808f4dfe 4478 else
757bf1df 4479 {
808f4dfe
DM
4480 /* Otherwise we have a top-level call within the analysis. The params
4481 have defined but unknown initial values.
4482 Anything they point to has escaped. */
4483 tree fndecl = fun->decl;
dcfc7ac9
DM
4484
4485 /* Handle "__attribute__((nonnull))". */
4486 tree fntype = TREE_TYPE (fndecl);
4487 bitmap nonnull_args = get_nonnull_args (fntype);
4488
4489 unsigned parm_idx = 0;
808f4dfe
DM
4490 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
4491 iter_parm = DECL_CHAIN (iter_parm))
757bf1df 4492 {
dcfc7ac9
DM
4493 bool non_null = (nonnull_args
4494 ? (bitmap_empty_p (nonnull_args)
4495 || bitmap_bit_p (nonnull_args, parm_idx))
4496 : false);
294b6da2 4497 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
dcfc7ac9 4498 on_top_level_param (parm_default_ssa, non_null, ctxt);
294b6da2 4499 else
dcfc7ac9
DM
4500 on_top_level_param (iter_parm, non_null, ctxt);
4501 parm_idx++;
757bf1df 4502 }
dcfc7ac9
DM
4503
4504 BITMAP_FREE (nonnull_args);
757bf1df 4505 }
757bf1df 4506
808f4dfe 4507 return m_current_frame;
757bf1df
DM
4508}
4509
808f4dfe
DM
4510/* Get the function of the top-most frame in this region_model's stack.
4511 There must be such a frame. */
757bf1df 4512
808f4dfe
DM
4513function *
4514region_model::get_current_function () const
757bf1df 4515{
808f4dfe
DM
4516 const frame_region *frame = get_current_frame ();
4517 gcc_assert (frame);
4518 return frame->get_function ();
757bf1df
DM
4519}
4520
808f4dfe 4521/* Pop the topmost frame_region from this region_model's stack;
757bf1df 4522
4cebae09
DM
4523 If RESULT_LVALUE is non-null, copy any return value from the frame
4524 into the corresponding region (evaluated with respect to the *caller*
4525 frame, rather than the called frame).
808f4dfe
DM
4526 If OUT_RESULT is non-null, copy any return value from the frame
4527 into *OUT_RESULT.
757bf1df 4528
808f4dfe
DM
4529 Purge the frame region and all its descendent regions.
4530 Convert any pointers that point into such regions into
4531 POISON_KIND_POPPED_STACK svalues. */
757bf1df 4532
808f4dfe 4533void
4cebae09 4534region_model::pop_frame (tree result_lvalue,
808f4dfe
DM
4535 const svalue **out_result,
4536 region_model_context *ctxt)
4537{
4538 gcc_assert (m_current_frame);
757bf1df 4539
808f4dfe 4540 const frame_region *frame_reg = m_current_frame;
5c6546ca
DM
4541
4542 /* Notify state machines. */
4543 if (ctxt)
4544 ctxt->on_pop_frame (frame_reg);
4545
4546 /* Evaluate the result, within the callee frame. */
808f4dfe
DM
4547 tree fndecl = m_current_frame->get_function ()->decl;
4548 tree result = DECL_RESULT (fndecl);
4cebae09 4549 const svalue *retval = NULL;
808f4dfe
DM
4550 if (result && TREE_TYPE (result) != void_type_node)
4551 {
4cebae09 4552 retval = get_rvalue (result, ctxt);
808f4dfe 4553 if (out_result)
13ad6d9f 4554 *out_result = retval;
808f4dfe 4555 }
757bf1df 4556
808f4dfe
DM
4557 /* Pop the frame. */
4558 m_current_frame = m_current_frame->get_calling_frame ();
757bf1df 4559
4cebae09
DM
4560 if (result_lvalue && retval)
4561 {
4562 /* Compute result_dst_reg using RESULT_LVALUE *after* popping
4563 the frame, but before poisoning pointers into the old frame. */
4564 const region *result_dst_reg = get_lvalue (result_lvalue, ctxt);
4565 set_value (result_dst_reg, retval, ctxt);
4566 }
4567
808f4dfe 4568 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
757bf1df
DM
4569}
4570
808f4dfe 4571/* Get the number of frames in this region_model's stack. */
757bf1df 4572
808f4dfe
DM
4573int
4574region_model::get_stack_depth () const
757bf1df 4575{
808f4dfe
DM
4576 const frame_region *frame = get_current_frame ();
4577 if (frame)
4578 return frame->get_stack_depth ();
4579 else
4580 return 0;
757bf1df
DM
4581}
4582
808f4dfe
DM
4583/* Get the frame_region with the given index within the stack.
4584 The frame_region must exist. */
757bf1df 4585
808f4dfe
DM
4586const frame_region *
4587region_model::get_frame_at_index (int index) const
757bf1df 4588{
808f4dfe
DM
4589 const frame_region *frame = get_current_frame ();
4590 gcc_assert (frame);
4591 gcc_assert (index >= 0);
4592 gcc_assert (index <= frame->get_index ());
4593 while (index != frame->get_index ())
4594 {
4595 frame = frame->get_calling_frame ();
4596 gcc_assert (frame);
4597 }
4598 return frame;
757bf1df
DM
4599}
4600
808f4dfe
DM
4601/* Unbind svalues for any regions in REG and below.
4602 Find any pointers to such regions; convert them to
9a2c9579
DM
4603 poisoned values of kind PKIND.
4604 Also purge any dynamic extents. */
757bf1df 4605
808f4dfe
DM
4606void
4607region_model::unbind_region_and_descendents (const region *reg,
4608 enum poison_kind pkind)
757bf1df 4609{
808f4dfe
DM
4610 /* Gather a set of base regions to be unbound. */
4611 hash_set<const region *> base_regs;
4612 for (store::cluster_map_t::iterator iter = m_store.begin ();
4613 iter != m_store.end (); ++iter)
757bf1df 4614 {
808f4dfe
DM
4615 const region *iter_base_reg = (*iter).first;
4616 if (iter_base_reg->descendent_of_p (reg))
4617 base_regs.add (iter_base_reg);
757bf1df 4618 }
808f4dfe
DM
4619 for (hash_set<const region *>::iterator iter = base_regs.begin ();
4620 iter != base_regs.end (); ++iter)
4621 m_store.purge_cluster (*iter);
757bf1df 4622
808f4dfe
DM
4623 /* Find any pointers to REG or its descendents; convert to poisoned. */
4624 poison_any_pointers_to_descendents (reg, pkind);
9a2c9579
DM
4625
4626 /* Purge dynamic extents of any base regions in REG and below
4627 (e.g. VLAs and alloca stack regions). */
4628 for (auto iter : m_dynamic_extents)
4629 {
4630 const region *iter_reg = iter.first;
4631 if (iter_reg->descendent_of_p (reg))
4632 unset_dynamic_extents (iter_reg);
4633 }
757bf1df
DM
4634}
4635
808f4dfe
DM
4636/* Implementation of BindingVisitor.
4637 Update the bound svalues for regions below REG to use poisoned
4638 values instead. */
757bf1df 4639
808f4dfe 4640struct bad_pointer_finder
757bf1df 4641{
808f4dfe
DM
4642 bad_pointer_finder (const region *reg, enum poison_kind pkind,
4643 region_model_manager *mgr)
4644 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
4645 {}
757bf1df 4646
808f4dfe
DM
4647 void on_binding (const binding_key *, const svalue *&sval)
4648 {
4649 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
4650 {
4651 const region *ptr_dst = ptr_sval->get_pointee ();
4652 /* Poison ptrs to descendents of REG, but not to REG itself,
4653 otherwise double-free detection doesn't work (since sm-state
4654 for "free" is stored on the original ptr svalue). */
4655 if (ptr_dst->descendent_of_p (m_reg)
4656 && ptr_dst != m_reg)
4657 {
4658 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
4659 sval->get_type ());
4660 ++m_count;
4661 }
4662 }
4663 }
757bf1df 4664
808f4dfe
DM
4665 const region *m_reg;
4666 enum poison_kind m_pkind;
4667 region_model_manager *const m_mgr;
4668 int m_count;
4669};
757bf1df 4670
808f4dfe
DM
4671/* Find any pointers to REG or its descendents; convert them to
4672 poisoned values of kind PKIND.
4673 Return the number of pointers that were poisoned. */
757bf1df 4674
808f4dfe
DM
4675int
4676region_model::poison_any_pointers_to_descendents (const region *reg,
4677 enum poison_kind pkind)
4678{
4679 bad_pointer_finder bv (reg, pkind, m_mgr);
4680 m_store.for_each_binding (bv);
4681 return bv.m_count;
757bf1df
DM
4682}
4683
808f4dfe
DM
4684/* Attempt to merge THIS with OTHER_MODEL, writing the result
4685 to OUT_MODEL. Use POINT to distinguish values created as a
4686 result of merging. */
757bf1df 4687
808f4dfe
DM
4688bool
4689region_model::can_merge_with_p (const region_model &other_model,
4690 const program_point &point,
f573d351
DM
4691 region_model *out_model,
4692 const extrinsic_state *ext_state,
4693 const program_state *state_a,
4694 const program_state *state_b) const
757bf1df 4695{
808f4dfe
DM
4696 gcc_assert (out_model);
4697 gcc_assert (m_mgr == other_model.m_mgr);
4698 gcc_assert (m_mgr == out_model->m_mgr);
757bf1df 4699
808f4dfe
DM
4700 if (m_current_frame != other_model.m_current_frame)
4701 return false;
4702 out_model->m_current_frame = m_current_frame;
757bf1df 4703
f573d351
DM
4704 model_merger m (this, &other_model, point, out_model,
4705 ext_state, state_a, state_b);
757bf1df 4706
808f4dfe
DM
4707 if (!store::can_merge_p (&m_store, &other_model.m_store,
4708 &out_model->m_store, m_mgr->get_store_manager (),
4709 &m))
4710 return false;
4711
9a2c9579
DM
4712 if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
4713 &out_model->m_dynamic_extents))
4714 return false;
4715
808f4dfe
DM
4716 /* Merge constraints. */
4717 constraint_manager::merge (*m_constraints,
4718 *other_model.m_constraints,
c710051a 4719 out_model->m_constraints);
757bf1df 4720
808f4dfe 4721 return true;
757bf1df
DM
4722}
4723
4724/* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
4725 otherwise. */
4726
4727tree
4728region_model::get_fndecl_for_call (const gcall *call,
4729 region_model_context *ctxt)
4730{
4731 tree fn_ptr = gimple_call_fn (call);
4732 if (fn_ptr == NULL_TREE)
4733 return NULL_TREE;
808f4dfe
DM
4734 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
4735 if (const region_svalue *fn_ptr_ptr
4736 = fn_ptr_sval->dyn_cast_region_svalue ())
757bf1df 4737 {
808f4dfe
DM
4738 const region *reg = fn_ptr_ptr->get_pointee ();
4739 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
757bf1df 4740 {
808f4dfe 4741 tree fn_decl = fn_reg->get_fndecl ();
0ba70d1b
DM
4742 cgraph_node *node = cgraph_node::get (fn_decl);
4743 if (!node)
4744 return NULL_TREE;
4745 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
91f993b7
DM
4746 if (ultimate_node)
4747 return ultimate_node->decl;
757bf1df
DM
4748 }
4749 }
4750
4751 return NULL_TREE;
4752}
4753
808f4dfe 4754/* Would be much simpler to use a lambda here, if it were supported. */
757bf1df 4755
faacafd2 4756struct append_regions_cb_data
757bf1df 4757{
808f4dfe
DM
4758 const region_model *model;
4759 auto_vec<const decl_region *> *out;
4760};
757bf1df 4761
faacafd2 4762/* Populate *OUT with all decl_regions in the current
808f4dfe 4763 frame that have clusters within the store. */
757bf1df
DM
4764
4765void
808f4dfe 4766region_model::
faacafd2 4767get_regions_for_current_frame (auto_vec<const decl_region *> *out) const
757bf1df 4768{
faacafd2 4769 append_regions_cb_data data;
808f4dfe
DM
4770 data.model = this;
4771 data.out = out;
faacafd2 4772 m_store.for_each_cluster (append_regions_cb, &data);
757bf1df
DM
4773}
4774
faacafd2 4775/* Implementation detail of get_regions_for_current_frame. */
757bf1df 4776
808f4dfe 4777void
faacafd2
DM
4778region_model::append_regions_cb (const region *base_reg,
4779 append_regions_cb_data *cb_data)
757bf1df 4780{
808f4dfe
DM
4781 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
4782 return;
4783 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
faacafd2 4784 cb_data->out->safe_push (decl_reg);
757bf1df
DM
4785}
4786
c83e9731
TL
4787
4788/* Abstract class for diagnostics related to the use of
4789 floating-point arithmetic where precision is needed. */
4790
4791class imprecise_floating_point_arithmetic : public pending_diagnostic
4792{
4793public:
4794 int get_controlling_option () const final override
4795 {
4796 return OPT_Wanalyzer_imprecise_fp_arithmetic;
4797 }
4798};
4799
4800/* Concrete diagnostic to complain about uses of floating-point arithmetic
4801 in the size argument of malloc etc. */
4802
4803class float_as_size_arg : public imprecise_floating_point_arithmetic
4804{
4805public:
4806 float_as_size_arg (tree arg) : m_arg (arg)
4807 {}
4808
4809 const char *get_kind () const final override
4810 {
4811 return "float_as_size_arg_diagnostic";
4812 }
4813
ac9230fb 4814 bool subclass_equal_p (const pending_diagnostic &other) const final override
c83e9731
TL
4815 {
4816 return same_tree_p (m_arg, ((const float_as_size_arg &) other).m_arg);
4817 }
4818
4819 bool emit (rich_location *rich_loc) final override
4820 {
4821 diagnostic_metadata m;
4822 bool warned = warning_meta (rich_loc, m, get_controlling_option (),
4823 "use of floating-point arithmetic here might"
4824 " yield unexpected results");
4825 if (warned)
4826 inform (rich_loc->get_loc (), "only use operands of an integer type"
4827 " inside the size argument");
4828 return warned;
4829 }
4830
4831 label_text describe_final_event (const evdesc::final_event &ev) final
4832 override
4833 {
4834 if (m_arg)
4835 return ev.formatted_print ("operand %qE is of type %qT",
4836 m_arg, TREE_TYPE (m_arg));
4837 return ev.formatted_print ("at least one operand of the size argument is"
4838 " of a floating-point type");
4839 }
4840
4841private:
4842 tree m_arg;
4843};
4844
4845/* Visitor to find uses of floating-point variables/constants in an svalue. */
4846
4847class contains_floating_point_visitor : public visitor
4848{
4849public:
4850 contains_floating_point_visitor (const svalue *root_sval) : m_result (NULL)
4851 {
4852 root_sval->accept (this);
4853 }
4854
4855 const svalue *get_svalue_to_report ()
4856 {
4857 return m_result;
4858 }
4859
4860 void visit_constant_svalue (const constant_svalue *sval) final override
4861 {
4862 /* At the point the analyzer runs, constant integer operands in a floating
4863 point expression are already implictly converted to floating-points.
4864 Thus, we do prefer to report non-constants such that the diagnostic
4865 always reports a floating-point operand. */
4866 tree type = sval->get_type ();
4867 if (type && FLOAT_TYPE_P (type) && !m_result)
4868 m_result = sval;
4869 }
4870
4871 void visit_conjured_svalue (const conjured_svalue *sval) final override
4872 {
4873 tree type = sval->get_type ();
4874 if (type && FLOAT_TYPE_P (type))
4875 m_result = sval;
4876 }
4877
4878 void visit_initial_svalue (const initial_svalue *sval) final override
4879 {
4880 tree type = sval->get_type ();
4881 if (type && FLOAT_TYPE_P (type))
4882 m_result = sval;
4883 }
4884
4885private:
4886 /* Non-null if at least one floating-point operand was found. */
4887 const svalue *m_result;
4888};
4889
4890/* May complain about uses of floating-point operands in SIZE_IN_BYTES. */
4891
4892void
4893region_model::check_dynamic_size_for_floats (const svalue *size_in_bytes,
4894 region_model_context *ctxt) const
4895{
4896 gcc_assert (ctxt);
4897
4898 contains_floating_point_visitor v (size_in_bytes);
4899 if (const svalue *float_sval = v.get_svalue_to_report ())
4900 {
4901 tree diag_arg = get_representative_tree (float_sval);
6341f14e 4902 ctxt->warn (make_unique<float_as_size_arg> (diag_arg));
c83e9731
TL
4903 }
4904}
4905
ce917b04
DM
4906/* Return a region describing a heap-allocated block of memory.
4907 Use CTXT to complain about tainted sizes.
4908
4909 Reuse an existing heap_allocated_region if it's not being referenced by
4910 this region_model; otherwise create a new one. */
757bf1df 4911
808f4dfe 4912const region *
ce917b04
DM
4913region_model::get_or_create_region_for_heap_alloc (const svalue *size_in_bytes,
4914 region_model_context *ctxt)
4915{
4916 /* Determine which regions are referenced in this region_model, so that
4917 we can reuse an existing heap_allocated_region if it's not in use on
4918 this path. */
7dc0ecaf 4919 auto_bitmap base_regs_in_use;
ce917b04
DM
4920 get_referenced_base_regions (base_regs_in_use);
4921 const region *reg
4922 = m_mgr->get_or_create_region_for_heap_alloc (base_regs_in_use);
688fc162
DM
4923 if (size_in_bytes)
4924 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
4925 set_dynamic_extents (reg, size_in_bytes, ctxt);
808f4dfe 4926 return reg;
757bf1df
DM
4927}
4928
ce917b04
DM
4929/* Populate OUT_IDS with the set of IDs of those base regions which are
4930 reachable in this region_model. */
4931
4932void
7dc0ecaf 4933region_model::get_referenced_base_regions (auto_bitmap &out_ids) const
ce917b04
DM
4934{
4935 reachable_regions reachable_regs (const_cast<region_model *> (this));
4936 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
4937 &reachable_regs);
4938 /* Get regions for locals that have explicitly bound values. */
4939 for (store::cluster_map_t::iterator iter = m_store.begin ();
4940 iter != m_store.end (); ++iter)
4941 {
4942 const region *base_reg = (*iter).first;
4943 if (const region *parent = base_reg->get_parent_region ())
4944 if (parent->get_kind () == RK_FRAME)
4945 reachable_regs.add (base_reg, false);
4946 }
4947
4948 bitmap_clear (out_ids);
4949 for (auto iter_reg : reachable_regs)
4950 bitmap_set_bit (out_ids, iter_reg->get_id ());
4951}
4952
808f4dfe 4953/* Return a new region describing a block of memory allocated within the
b9365b93
DM
4954 current frame.
4955 Use CTXT to complain about tainted sizes. */
757bf1df 4956
808f4dfe 4957const region *
b9365b93
DM
4958region_model::create_region_for_alloca (const svalue *size_in_bytes,
4959 region_model_context *ctxt)
757bf1df 4960{
808f4dfe 4961 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
ea4e3218 4962 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
b9365b93 4963 set_dynamic_extents (reg, size_in_bytes, ctxt);
808f4dfe 4964 return reg;
757bf1df
DM
4965}
4966
b9365b93
DM
4967/* Record that the size of REG is SIZE_IN_BYTES.
4968 Use CTXT to complain about tainted sizes. */
757bf1df
DM
4969
4970void
9a2c9579 4971region_model::set_dynamic_extents (const region *reg,
b9365b93
DM
4972 const svalue *size_in_bytes,
4973 region_model_context *ctxt)
9a2c9579
DM
4974{
4975 assert_compat_types (size_in_bytes->get_type (), size_type_node);
b9365b93 4976 if (ctxt)
c83e9731
TL
4977 {
4978 check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
4979 ctxt);
4980 check_dynamic_size_for_floats (size_in_bytes, ctxt);
4981 }
9a2c9579
DM
4982 m_dynamic_extents.put (reg, size_in_bytes);
4983}
4984
4985/* Get the recording of REG in bytes, or NULL if no dynamic size was
4986 recorded. */
4987
4988const svalue *
4989region_model::get_dynamic_extents (const region *reg) const
757bf1df 4990{
9a2c9579
DM
4991 if (const svalue * const *slot = m_dynamic_extents.get (reg))
4992 return *slot;
4993 return NULL;
4994}
4995
4996/* Unset any recorded dynamic size of REG. */
4997
4998void
4999region_model::unset_dynamic_extents (const region *reg)
5000{
5001 m_dynamic_extents.remove (reg);
757bf1df
DM
5002}
5003
c81b60b8
DM
5004/* Information of the layout of a RECORD_TYPE, capturing it as a vector
5005 of items, where each item is either a field or padding. */
5006
5007class record_layout
5008{
5009public:
5010 /* An item within a record; either a field, or padding after a field. */
5011 struct item
5012 {
5013 public:
5014 item (const bit_range &br,
5015 tree field,
5016 bool is_padding)
5017 : m_bit_range (br),
5018 m_field (field),
5019 m_is_padding (is_padding)
5020 {
5021 }
5022
5023 bit_offset_t get_start_bit_offset () const
5024 {
5025 return m_bit_range.get_start_bit_offset ();
5026 }
5027 bit_offset_t get_next_bit_offset () const
5028 {
5029 return m_bit_range.get_next_bit_offset ();
5030 }
5031
5032 bool contains_p (bit_offset_t offset) const
5033 {
5034 return m_bit_range.contains_p (offset);
5035 }
5036
5037 void dump_to_pp (pretty_printer *pp) const
5038 {
5039 if (m_is_padding)
5040 pp_printf (pp, "padding after %qD", m_field);
5041 else
5042 pp_printf (pp, "%qD", m_field);
5043 pp_string (pp, ", ");
5044 m_bit_range.dump_to_pp (pp);
5045 }
5046
5047 bit_range m_bit_range;
5048 tree m_field;
5049 bool m_is_padding;
5050 };
5051
5052 record_layout (tree record_type)
c81b60b8
DM
5053 {
5054 gcc_assert (TREE_CODE (record_type) == RECORD_TYPE);
5055
5056 for (tree iter = TYPE_FIELDS (record_type); iter != NULL_TREE;
5057 iter = DECL_CHAIN (iter))
5058 {
5059 if (TREE_CODE (iter) == FIELD_DECL)
5060 {
5061 int iter_field_offset = int_bit_position (iter);
5062 bit_size_t size_in_bits;
5063 if (!int_size_in_bits (TREE_TYPE (iter), &size_in_bits))
5064 size_in_bits = 0;
5065
5066 maybe_pad_to (iter_field_offset);
5067
5068 /* Add field. */
5069 m_items.safe_push (item (bit_range (iter_field_offset,
5070 size_in_bits),
5071 iter, false));
5072 }
5073 }
5074
5075 /* Add any trailing padding. */
5076 bit_size_t size_in_bits;
5077 if (int_size_in_bits (record_type, &size_in_bits))
5078 maybe_pad_to (size_in_bits);
5079 }
5080
5081 void dump_to_pp (pretty_printer *pp) const
5082 {
5083 unsigned i;
5084 item *it;
5085 FOR_EACH_VEC_ELT (m_items, i, it)
5086 {
5087 it->dump_to_pp (pp);
5088 pp_newline (pp);
5089 }
5090 }
5091
5092 DEBUG_FUNCTION void dump () const
5093 {
5094 pretty_printer pp;
5095 pp_format_decoder (&pp) = default_tree_printer;
5096 pp.buffer->stream = stderr;
5097 dump_to_pp (&pp);
5098 pp_flush (&pp);
5099 }
5100
5101 const record_layout::item *get_item_at (bit_offset_t offset) const
5102 {
5103 unsigned i;
5104 item *it;
5105 FOR_EACH_VEC_ELT (m_items, i, it)
5106 if (it->contains_p (offset))
5107 return it;
5108 return NULL;
5109 }
5110
5111private:
5112 /* Subroutine of ctor. Add padding item to NEXT_OFFSET if necessary. */
5113
5114 void maybe_pad_to (bit_offset_t next_offset)
5115 {
5116 if (m_items.length () > 0)
5117 {
5118 const item &last_item = m_items[m_items.length () - 1];
5119 bit_offset_t offset_after_last_item
5120 = last_item.get_next_bit_offset ();
5121 if (next_offset > offset_after_last_item)
5122 {
5123 bit_size_t padding_size
5124 = next_offset - offset_after_last_item;
5125 m_items.safe_push (item (bit_range (offset_after_last_item,
5126 padding_size),
5127 last_item.m_field, true));
5128 }
5129 }
5130 }
5131
c81b60b8
DM
5132 auto_vec<item> m_items;
5133};
5134
5135/* A subclass of pending_diagnostic for complaining about uninitialized data
5136 being copied across a trust boundary to an untrusted output
5137 (e.g. copy_to_user infoleaks in the Linux kernel). */
5138
5139class exposure_through_uninit_copy
5140 : public pending_diagnostic_subclass<exposure_through_uninit_copy>
5141{
5142public:
5143 exposure_through_uninit_copy (const region *src_region,
5144 const region *dest_region,
ffaeb9dc 5145 const svalue *copied_sval)
c81b60b8
DM
5146 : m_src_region (src_region),
5147 m_dest_region (dest_region),
ffaeb9dc 5148 m_copied_sval (copied_sval)
c81b60b8
DM
5149 {
5150 gcc_assert (m_copied_sval->get_kind () == SK_POISONED
5151 || m_copied_sval->get_kind () == SK_COMPOUND);
5152 }
5153
5154 const char *get_kind () const final override
5155 {
5156 return "exposure_through_uninit_copy";
5157 }
5158
5159 bool operator== (const exposure_through_uninit_copy &other) const
5160 {
5161 return (m_src_region == other.m_src_region
5162 && m_dest_region == other.m_dest_region
5163 && m_copied_sval == other.m_copied_sval);
5164 }
5165
5166 int get_controlling_option () const final override
5167 {
5168 return OPT_Wanalyzer_exposure_through_uninit_copy;
5169 }
5170
5171 bool emit (rich_location *rich_loc) final override
5172 {
5173 diagnostic_metadata m;
5174 /* CWE-200: Exposure of Sensitive Information to an Unauthorized Actor. */
5175 m.add_cwe (200);
5176 enum memory_space mem_space = get_src_memory_space ();
5177 bool warned;
5178 switch (mem_space)
5179 {
5180 default:
5181 warned = warning_meta
5182 (rich_loc, m, get_controlling_option (),
5183 "potential exposure of sensitive information"
5184 " by copying uninitialized data across trust boundary");
5185 break;
5186 case MEMSPACE_STACK:
5187 warned = warning_meta
5188 (rich_loc, m, get_controlling_option (),
5189 "potential exposure of sensitive information"
5190 " by copying uninitialized data from stack across trust boundary");
5191 break;
5192 case MEMSPACE_HEAP:
5193 warned = warning_meta
5194 (rich_loc, m, get_controlling_option (),
5195 "potential exposure of sensitive information"
5196 " by copying uninitialized data from heap across trust boundary");
5197 break;
5198 }
5199 if (warned)
5200 {
5201 location_t loc = rich_loc->get_loc ();
5202 inform_number_of_uninit_bits (loc);
5203 complain_about_uninit_ranges (loc);
5204
5205 if (mem_space == MEMSPACE_STACK)
5206 maybe_emit_fixit_hint ();
5207 }
5208 return warned;
5209 }
5210
5211 label_text describe_final_event (const evdesc::final_event &) final override
5212 {
5213 enum memory_space mem_space = get_src_memory_space ();
5214 switch (mem_space)
5215 {
5216 default:
5217 return label_text::borrow ("uninitialized data copied here");
5218
5219 case MEMSPACE_STACK:
5220 return label_text::borrow ("uninitialized data copied from stack here");
5221
5222 case MEMSPACE_HEAP:
5223 return label_text::borrow ("uninitialized data copied from heap here");
5224 }
5225 }
5226
5227 void mark_interesting_stuff (interesting_t *interest) final override
5228 {
5229 if (m_src_region)
5230 interest->add_region_creation (m_src_region);
5231 }
5232
5233private:
5234 enum memory_space get_src_memory_space () const
5235 {
5236 return m_src_region ? m_src_region->get_memory_space () : MEMSPACE_UNKNOWN;
5237 }
5238
5239 bit_size_t calc_num_uninit_bits () const
5240 {
5241 switch (m_copied_sval->get_kind ())
5242 {
5243 default:
5244 gcc_unreachable ();
5245 break;
5246 case SK_POISONED:
5247 {
5248 const poisoned_svalue *poisoned_sval
5249 = as_a <const poisoned_svalue *> (m_copied_sval);
5250 gcc_assert (poisoned_sval->get_poison_kind () == POISON_KIND_UNINIT);
5251
5252 /* Give up if don't have type information. */
5253 if (m_copied_sval->get_type () == NULL_TREE)
5254 return 0;
5255
5256 bit_size_t size_in_bits;
5257 if (int_size_in_bits (m_copied_sval->get_type (), &size_in_bits))
5258 return size_in_bits;
5259
5260 /* Give up if we can't get the size of the type. */
5261 return 0;
5262 }
5263 break;
5264 case SK_COMPOUND:
5265 {
5266 const compound_svalue *compound_sval
5267 = as_a <const compound_svalue *> (m_copied_sval);
5268 bit_size_t result = 0;
5269 /* Find keys for uninit svals. */
5270 for (auto iter : *compound_sval)
5271 {
5272 const svalue *sval = iter.second;
5273 if (const poisoned_svalue *psval
5274 = sval->dyn_cast_poisoned_svalue ())
5275 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
5276 {
5277 const binding_key *key = iter.first;
5278 const concrete_binding *ckey
5279 = key->dyn_cast_concrete_binding ();
5280 gcc_assert (ckey);
5281 result += ckey->get_size_in_bits ();
5282 }
5283 }
5284 return result;
5285 }
5286 }
5287 }
5288
5289 void inform_number_of_uninit_bits (location_t loc) const
5290 {
5291 bit_size_t num_uninit_bits = calc_num_uninit_bits ();
5292 if (num_uninit_bits <= 0)
5293 return;
5294 if (num_uninit_bits % BITS_PER_UNIT == 0)
5295 {
5296 /* Express in bytes. */
5297 byte_size_t num_uninit_bytes = num_uninit_bits / BITS_PER_UNIT;
5298 if (num_uninit_bytes == 1)
5299 inform (loc, "1 byte is uninitialized");
5300 else
5301 inform (loc,
5302 "%wu bytes are uninitialized", num_uninit_bytes.to_uhwi ());
5303 }
5304 else
5305 {
5306 /* Express in bits. */
5307 if (num_uninit_bits == 1)
5308 inform (loc, "1 bit is uninitialized");
5309 else
5310 inform (loc,
5311 "%wu bits are uninitialized", num_uninit_bits.to_uhwi ());
5312 }
5313 }
5314
5315 void complain_about_uninit_ranges (location_t loc) const
5316 {
5317 if (const compound_svalue *compound_sval
5318 = m_copied_sval->dyn_cast_compound_svalue ())
5319 {
5320 /* Find keys for uninit svals. */
5321 auto_vec<const concrete_binding *> uninit_keys;
5322 for (auto iter : *compound_sval)
5323 {
5324 const svalue *sval = iter.second;
5325 if (const poisoned_svalue *psval
5326 = sval->dyn_cast_poisoned_svalue ())
5327 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
5328 {
5329 const binding_key *key = iter.first;
5330 const concrete_binding *ckey
5331 = key->dyn_cast_concrete_binding ();
5332 gcc_assert (ckey);
5333 uninit_keys.safe_push (ckey);
5334 }
5335 }
5336 /* Complain about them in sorted order. */
5337 uninit_keys.qsort (concrete_binding::cmp_ptr_ptr);
5338
5339 std::unique_ptr<record_layout> layout;
5340
5341 tree type = m_copied_sval->get_type ();
5342 if (type && TREE_CODE (type) == RECORD_TYPE)
5343 {
5344 // (std::make_unique is C++14)
5345 layout = std::unique_ptr<record_layout> (new record_layout (type));
5346
5347 if (0)
5348 layout->dump ();
5349 }
5350
5351 unsigned i;
5352 const concrete_binding *ckey;
5353 FOR_EACH_VEC_ELT (uninit_keys, i, ckey)
5354 {
5355 bit_offset_t start_bit = ckey->get_start_bit_offset ();
5356 bit_offset_t next_bit = ckey->get_next_bit_offset ();
5357 complain_about_uninit_range (loc, start_bit, next_bit,
5358 layout.get ());
5359 }
5360 }
5361 }
5362
5363 void complain_about_uninit_range (location_t loc,
5364 bit_offset_t start_bit,
5365 bit_offset_t next_bit,
5366 const record_layout *layout) const
5367 {
5368 if (layout)
5369 {
5370 while (start_bit < next_bit)
5371 {
5372 if (const record_layout::item *item
5373 = layout->get_item_at (start_bit))
5374 {
5375 gcc_assert (start_bit >= item->get_start_bit_offset ());
5376 gcc_assert (start_bit < item->get_next_bit_offset ());
5377 if (item->get_start_bit_offset () == start_bit
5378 && item->get_next_bit_offset () <= next_bit)
5379 complain_about_fully_uninit_item (*item);
5380 else
5381 complain_about_partially_uninit_item (*item);
5382 start_bit = item->get_next_bit_offset ();
5383 continue;
5384 }
5385 else
5386 break;
5387 }
5388 }
5389
5390 if (start_bit >= next_bit)
5391 return;
5392
5393 if (start_bit % 8 == 0 && next_bit % 8 == 0)
5394 {
5395 /* Express in bytes. */
5396 byte_offset_t start_byte = start_bit / 8;
5397 byte_offset_t last_byte = (next_bit / 8) - 1;
5398 if (last_byte == start_byte)
5399 inform (loc,
5400 "byte %wu is uninitialized",
5401 start_byte.to_uhwi ());
5402 else
5403 inform (loc,
5404 "bytes %wu - %wu are uninitialized",
5405 start_byte.to_uhwi (),
5406 last_byte.to_uhwi ());
5407 }
5408 else
5409 {
5410 /* Express in bits. */
5411 bit_offset_t last_bit = next_bit - 1;
5412 if (last_bit == start_bit)
5413 inform (loc,
5414 "bit %wu is uninitialized",
5415 start_bit.to_uhwi ());
5416 else
5417 inform (loc,
5418 "bits %wu - %wu are uninitialized",
5419 start_bit.to_uhwi (),
5420 last_bit.to_uhwi ());
5421 }
5422 }
5423
5424 static void
5425 complain_about_fully_uninit_item (const record_layout::item &item)
5426 {
5427 tree field = item.m_field;
5428 bit_size_t num_bits = item.m_bit_range.m_size_in_bits;
5429 if (item.m_is_padding)
5430 {
5431 if (num_bits % 8 == 0)
5432 {
5433 /* Express in bytes. */
5434 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
5435 if (num_bytes == 1)
5436 inform (DECL_SOURCE_LOCATION (field),
5437 "padding after field %qD is uninitialized (1 byte)",
5438 field);
5439 else
5440 inform (DECL_SOURCE_LOCATION (field),
5441 "padding after field %qD is uninitialized (%wu bytes)",
5442 field, num_bytes.to_uhwi ());
5443 }
5444 else
5445 {
5446 /* Express in bits. */
5447 if (num_bits == 1)
5448 inform (DECL_SOURCE_LOCATION (field),
5449 "padding after field %qD is uninitialized (1 bit)",
5450 field);
5451 else
5452 inform (DECL_SOURCE_LOCATION (field),
5453 "padding after field %qD is uninitialized (%wu bits)",
5454 field, num_bits.to_uhwi ());
5455 }
5456 }
5457 else
5458 {
5459 if (num_bits % 8 == 0)
5460 {
5461 /* Express in bytes. */
5462 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
5463 if (num_bytes == 1)
5464 inform (DECL_SOURCE_LOCATION (field),
5465 "field %qD is uninitialized (1 byte)", field);
5466 else
5467 inform (DECL_SOURCE_LOCATION (field),
5468 "field %qD is uninitialized (%wu bytes)",
5469 field, num_bytes.to_uhwi ());
5470 }
5471 else
5472 {
5473 /* Express in bits. */
5474 if (num_bits == 1)
5475 inform (DECL_SOURCE_LOCATION (field),
5476 "field %qD is uninitialized (1 bit)", field);
5477 else
5478 inform (DECL_SOURCE_LOCATION (field),
5479 "field %qD is uninitialized (%wu bits)",
5480 field, num_bits.to_uhwi ());
5481 }
5482 }
5483 }
5484
5485 static void
5486 complain_about_partially_uninit_item (const record_layout::item &item)
5487 {
5488 tree field = item.m_field;
5489 if (item.m_is_padding)
5490 inform (DECL_SOURCE_LOCATION (field),
5491 "padding after field %qD is partially uninitialized",
5492 field);
5493 else
5494 inform (DECL_SOURCE_LOCATION (field),
5495 "field %qD is partially uninitialized",
5496 field);
5497 /* TODO: ideally we'd describe what parts are uninitialized. */
5498 }
5499
5500 void maybe_emit_fixit_hint () const
5501 {
5502 if (tree decl = m_src_region->maybe_get_decl ())
5503 {
5504 gcc_rich_location hint_richloc (DECL_SOURCE_LOCATION (decl));
5505 hint_richloc.add_fixit_insert_after (" = {0}");
5506 inform (&hint_richloc,
5507 "suggest forcing zero-initialization by"
5508 " providing a %<{0}%> initializer");
5509 }
5510 }
5511
5512private:
5513 const region *m_src_region;
5514 const region *m_dest_region;
5515 const svalue *m_copied_sval;
c81b60b8
DM
5516};
5517
5518/* Return true if any part of SVAL is uninitialized. */
5519
5520static bool
5521contains_uninit_p (const svalue *sval)
5522{
5523 struct uninit_finder : public visitor
5524 {
5525 public:
5526 uninit_finder () : m_found_uninit (false) {}
5527 void visit_poisoned_svalue (const poisoned_svalue *sval)
5528 {
5529 if (sval->get_poison_kind () == POISON_KIND_UNINIT)
5530 m_found_uninit = true;
5531 }
5532 bool m_found_uninit;
5533 };
5534
5535 uninit_finder v;
5536 sval->accept (&v);
5537
5538 return v.m_found_uninit;
5539}
5540
5541/* Function for use by plugins when simulating writing data through a
5542 pointer to an "untrusted" region DST_REG (and thus crossing a security
5543 boundary), such as copying data to user space in an OS kernel.
5544
5545 Check that COPIED_SVAL is fully initialized. If not, complain about
5546 an infoleak to CTXT.
5547
5548 SRC_REG can be NULL; if non-NULL it is used as a hint in the diagnostic
5549 as to where COPIED_SVAL came from. */
5550
5551void
5552region_model::maybe_complain_about_infoleak (const region *dst_reg,
5553 const svalue *copied_sval,
5554 const region *src_reg,
5555 region_model_context *ctxt)
5556{
5557 /* Check for exposure. */
5558 if (contains_uninit_p (copied_sval))
6341f14e
DM
5559 ctxt->warn (make_unique<exposure_through_uninit_copy> (src_reg,
5560 dst_reg,
5561 copied_sval));
c81b60b8
DM
5562}
5563
3d2d04cd
DM
5564/* Set errno to a positive symbolic int, as if some error has occurred. */
5565
5566void
5567region_model::set_errno (const call_details &cd)
5568{
5569 const region *errno_reg = m_mgr->get_errno_region ();
5570 conjured_purge p (this, cd.get_ctxt ());
5571 const svalue *new_errno_sval
5572 = m_mgr->get_or_create_conjured_svalue (integer_type_node,
5573 cd.get_call_stmt (),
5574 errno_reg, p);
5575 const svalue *zero
5576 = m_mgr->get_or_create_int_cst (integer_type_node, 0);
5577 add_constraint (new_errno_sval, GT_EXPR, zero, cd.get_ctxt ());
5578 set_value (errno_reg, new_errno_sval, cd.get_ctxt ());
5579}
5580
eafa9d96
DM
5581/* class noop_region_model_context : public region_model_context. */
5582
c65d3c7f 5583void
6341f14e 5584noop_region_model_context::add_note (std::unique_ptr<pending_note>)
c65d3c7f 5585{
c65d3c7f
DM
5586}
5587
eafa9d96 5588void
accece8c 5589noop_region_model_context::bifurcate (std::unique_ptr<custom_edge_info>)
eafa9d96 5590{
eafa9d96
DM
5591}
5592
5593void
5594noop_region_model_context::terminate_path ()
5595{
5596}
5597
808f4dfe 5598/* struct model_merger. */
757bf1df 5599
808f4dfe 5600/* Dump a multiline representation of this merger to PP. */
757bf1df
DM
5601
5602void
808f4dfe 5603model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
757bf1df 5604{
808f4dfe
DM
5605 pp_string (pp, "model A:");
5606 pp_newline (pp);
5607 m_model_a->dump_to_pp (pp, simple, true);
5608 pp_newline (pp);
757bf1df 5609
808f4dfe 5610 pp_string (pp, "model B:");
757bf1df 5611 pp_newline (pp);
808f4dfe 5612 m_model_b->dump_to_pp (pp, simple, true);
757bf1df
DM
5613 pp_newline (pp);
5614
808f4dfe 5615 pp_string (pp, "merged model:");
757bf1df 5616 pp_newline (pp);
808f4dfe 5617 m_merged_model->dump_to_pp (pp, simple, true);
757bf1df
DM
5618 pp_newline (pp);
5619}
5620
808f4dfe 5621/* Dump a multiline representation of this merger to FILE. */
757bf1df
DM
5622
5623void
808f4dfe 5624model_merger::dump (FILE *fp, bool simple) const
757bf1df
DM
5625{
5626 pretty_printer pp;
5627 pp_format_decoder (&pp) = default_tree_printer;
5628 pp_show_color (&pp) = pp_show_color (global_dc->printer);
5629 pp.buffer->stream = fp;
808f4dfe 5630 dump_to_pp (&pp, simple);
757bf1df
DM
5631 pp_flush (&pp);
5632}
5633
808f4dfe 5634/* Dump a multiline representation of this merger to stderr. */
757bf1df
DM
5635
5636DEBUG_FUNCTION void
808f4dfe 5637model_merger::dump (bool simple) const
757bf1df 5638{
808f4dfe 5639 dump (stderr, simple);
757bf1df
DM
5640}
5641
f573d351
DM
5642/* Return true if it's OK to merge SVAL with other svalues. */
5643
5644bool
5645model_merger::mergeable_svalue_p (const svalue *sval) const
5646{
5647 if (m_ext_state)
5648 {
5649 /* Reject merging svalues that have non-purgable sm-state,
5650 to avoid falsely reporting memory leaks by merging them
5651 with something else. For example, given a local var "p",
5652 reject the merger of a:
5653 store_a mapping "p" to a malloc-ed ptr
5654 with:
5655 store_b mapping "p" to a NULL ptr. */
5656 if (m_state_a)
5657 if (!m_state_a->can_purge_p (*m_ext_state, sval))
5658 return false;
5659 if (m_state_b)
5660 if (!m_state_b->can_purge_p (*m_ext_state, sval))
5661 return false;
5662 }
5663 return true;
5664}
5665
75038aa6
DM
5666} // namespace ana
5667
808f4dfe 5668/* Dump RMODEL fully to stderr (i.e. without summarization). */
757bf1df 5669
808f4dfe
DM
5670DEBUG_FUNCTION void
5671debug (const region_model &rmodel)
757bf1df 5672{
808f4dfe 5673 rmodel.dump (false);
757bf1df
DM
5674}
5675
8ca7fa84 5676/* class rejected_op_constraint : public rejected_constraint. */
84fb3546
DM
5677
5678void
8ca7fa84 5679rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
84fb3546
DM
5680{
5681 region_model m (m_model);
5682 const svalue *lhs_sval = m.get_rvalue (m_lhs, NULL);
5683 const svalue *rhs_sval = m.get_rvalue (m_rhs, NULL);
5684 lhs_sval->dump_to_pp (pp, true);
5685 pp_printf (pp, " %s ", op_symbol_code (m_op));
5686 rhs_sval->dump_to_pp (pp, true);
5687}
5688
8ca7fa84
DM
5689/* class rejected_ranges_constraint : public rejected_constraint. */
5690
5691void
5692rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
5693{
5694 region_model m (m_model);
5695 const svalue *sval = m.get_rvalue (m_expr, NULL);
5696 sval->dump_to_pp (pp, true);
5697 pp_string (pp, " in ");
5698 m_ranges->dump_to_pp (pp, true);
5699}
5700
808f4dfe 5701/* class engine. */
757bf1df 5702
11a2ff8d
DM
5703/* engine's ctor. */
5704
4cebae09
DM
5705engine::engine (const supergraph *sg, logger *logger)
5706: m_sg (sg), m_mgr (logger)
11a2ff8d
DM
5707{
5708}
5709
808f4dfe 5710/* Dump the managed objects by class to LOGGER, and the per-class totals. */
757bf1df 5711
808f4dfe
DM
5712void
5713engine::log_stats (logger *logger) const
757bf1df 5714{
808f4dfe 5715 m_mgr.log_stats (logger, true);
757bf1df
DM
5716}
5717
75038aa6
DM
5718namespace ana {
5719
757bf1df
DM
5720#if CHECKING_P
5721
5722namespace selftest {
5723
8c08c983
DM
5724/* Build a constant tree of the given type from STR. */
5725
5726static tree
5727build_real_cst_from_string (tree type, const char *str)
5728{
5729 REAL_VALUE_TYPE real;
5730 real_from_string (&real, str);
5731 return build_real (type, real);
5732}
5733
5734/* Append various "interesting" constants to OUT (e.g. NaN). */
5735
5736static void
5737append_interesting_constants (auto_vec<tree> *out)
5738{
5739 out->safe_push (build_int_cst (integer_type_node, 0));
5740 out->safe_push (build_int_cst (integer_type_node, 42));
5741 out->safe_push (build_int_cst (unsigned_type_node, 0));
5742 out->safe_push (build_int_cst (unsigned_type_node, 42));
5743 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
5744 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
5745 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
5746 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
5747 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
5748 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
5749 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
5750 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
5751}
5752
5753/* Verify that tree_cmp is a well-behaved comparator for qsort, even
5754 if the underlying constants aren't comparable. */
5755
5756static void
5757test_tree_cmp_on_constants ()
5758{
5759 auto_vec<tree> csts;
5760 append_interesting_constants (&csts);
5761
5762 /* Try sorting every triple. */
5763 const unsigned num = csts.length ();
5764 for (unsigned i = 0; i < num; i++)
5765 for (unsigned j = 0; j < num; j++)
5766 for (unsigned k = 0; k < num; k++)
5767 {
5768 auto_vec<tree> v (3);
5769 v.quick_push (csts[i]);
5770 v.quick_push (csts[j]);
5771 v.quick_push (csts[k]);
5772 v.qsort (tree_cmp);
5773 }
5774}
5775
757bf1df
DM
5776/* Implementation detail of the ASSERT_CONDITION_* macros. */
5777
808f4dfe
DM
5778void
5779assert_condition (const location &loc,
5780 region_model &model,
5781 const svalue *lhs, tree_code op, const svalue *rhs,
5782 tristate expected)
5783{
5784 tristate actual = model.eval_condition (lhs, op, rhs);
5785 ASSERT_EQ_AT (loc, actual, expected);
5786}
5787
5788/* Implementation detail of the ASSERT_CONDITION_* macros. */
5789
757bf1df
DM
5790void
5791assert_condition (const location &loc,
5792 region_model &model,
5793 tree lhs, tree_code op, tree rhs,
5794 tristate expected)
5795{
5796 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
5797 ASSERT_EQ_AT (loc, actual, expected);
5798}
5799
90f7c300
DM
5800/* Implementation detail of ASSERT_DUMP_TREE_EQ. */
5801
5802static void
5803assert_dump_tree_eq (const location &loc, tree t, const char *expected)
5804{
5805 auto_fix_quotes sentinel;
5806 pretty_printer pp;
5807 pp_format_decoder (&pp) = default_tree_printer;
5808 dump_tree (&pp, t);
5809 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
5810}
5811
5812/* Assert that dump_tree (T) is EXPECTED. */
5813
5814#define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
5815 SELFTEST_BEGIN_STMT \
5816 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
5817 SELFTEST_END_STMT
5818
757bf1df
DM
5819/* Implementation detail of ASSERT_DUMP_EQ. */
5820
5821static void
5822assert_dump_eq (const location &loc,
5823 const region_model &model,
5824 bool summarize,
5825 const char *expected)
5826{
5827 auto_fix_quotes sentinel;
5828 pretty_printer pp;
5829 pp_format_decoder (&pp) = default_tree_printer;
808f4dfe
DM
5830
5831 model.dump_to_pp (&pp, summarize, true);
757bf1df
DM
5832 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
5833}
5834
5835/* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
5836
5837#define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
5838 SELFTEST_BEGIN_STMT \
5839 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
5840 SELFTEST_END_STMT
5841
5842/* Smoketest for region_model::dump_to_pp. */
5843
5844static void
5845test_dump ()
5846{
808f4dfe
DM
5847 region_model_manager mgr;
5848 region_model model (&mgr);
757bf1df
DM
5849
5850 ASSERT_DUMP_EQ (model, false,
808f4dfe
DM
5851 "stack depth: 0\n"
5852 "m_called_unknown_fn: FALSE\n"
5853 "constraint_manager:\n"
5854 " equiv classes:\n"
5855 " constraints:\n");
5856 ASSERT_DUMP_EQ (model, true,
5857 "stack depth: 0\n"
5858 "m_called_unknown_fn: FALSE\n"
5859 "constraint_manager:\n"
757bf1df
DM
5860 " equiv classes:\n"
5861 " constraints:\n");
757bf1df
DM
5862}
5863
884d9141
DM
5864/* Helper function for selftests. Create a struct or union type named NAME,
5865 with the fields given by the FIELD_DECLS in FIELDS.
5866 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
5867 create a UNION_TYPE. */
5868
5869static tree
5870make_test_compound_type (const char *name, bool is_struct,
5871 const auto_vec<tree> *fields)
5872{
5873 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
5874 TYPE_NAME (t) = get_identifier (name);
5875 TYPE_SIZE (t) = 0;
5876
5877 tree fieldlist = NULL;
5878 int i;
5879 tree field;
5880 FOR_EACH_VEC_ELT (*fields, i, field)
5881 {
5882 gcc_assert (TREE_CODE (field) == FIELD_DECL);
5883 DECL_CONTEXT (field) = t;
5884 fieldlist = chainon (field, fieldlist);
5885 }
5886 fieldlist = nreverse (fieldlist);
5887 TYPE_FIELDS (t) = fieldlist;
5888
5889 layout_type (t);
5890 return t;
5891}
5892
a96f1c38
DM
5893/* Selftest fixture for creating the type "struct coord {int x; int y; };". */
5894
5895struct coord_test
5896{
5897 coord_test ()
5898 {
5899 auto_vec<tree> fields;
5900 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
5901 get_identifier ("x"), integer_type_node);
5902 fields.safe_push (m_x_field);
5903 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
5904 get_identifier ("y"), integer_type_node);
5905 fields.safe_push (m_y_field);
5906 m_coord_type = make_test_compound_type ("coord", true, &fields);
5907 }
5908
5909 tree m_x_field;
5910 tree m_y_field;
5911 tree m_coord_type;
5912};
5913
808f4dfe 5914/* Verify usage of a struct. */
884d9141
DM
5915
5916static void
808f4dfe 5917test_struct ()
884d9141 5918{
a96f1c38
DM
5919 coord_test ct;
5920
5921 tree c = build_global_decl ("c", ct.m_coord_type);
5922 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
5923 c, ct.m_x_field, NULL_TREE);
5924 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
5925 c, ct.m_y_field, NULL_TREE);
884d9141
DM
5926
5927 tree int_17 = build_int_cst (integer_type_node, 17);
5928 tree int_m3 = build_int_cst (integer_type_node, -3);
5929
808f4dfe
DM
5930 region_model_manager mgr;
5931 region_model model (&mgr);
884d9141
DM
5932 model.set_value (c_x, int_17, NULL);
5933 model.set_value (c_y, int_m3, NULL);
5934
808f4dfe
DM
5935 /* Verify get_offset for "c.x". */
5936 {
5937 const region *c_x_reg = model.get_lvalue (c_x, NULL);
7a6564c9 5938 region_offset offset = c_x_reg->get_offset (&mgr);
808f4dfe
DM
5939 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
5940 ASSERT_EQ (offset.get_bit_offset (), 0);
5941 }
5942
5943 /* Verify get_offset for "c.y". */
5944 {
5945 const region *c_y_reg = model.get_lvalue (c_y, NULL);
7a6564c9 5946 region_offset offset = c_y_reg->get_offset (&mgr);
808f4dfe
DM
5947 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
5948 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
5949 }
884d9141
DM
5950}
5951
808f4dfe 5952/* Verify usage of an array element. */
884d9141
DM
5953
5954static void
808f4dfe 5955test_array_1 ()
884d9141
DM
5956{
5957 tree tlen = size_int (10);
5958 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
5959
5960 tree a = build_global_decl ("a", arr_type);
5961
808f4dfe
DM
5962 region_model_manager mgr;
5963 region_model model (&mgr);
884d9141
DM
5964 tree int_0 = build_int_cst (integer_type_node, 0);
5965 tree a_0 = build4 (ARRAY_REF, char_type_node,
5966 a, int_0, NULL_TREE, NULL_TREE);
5967 tree char_A = build_int_cst (char_type_node, 'A');
5968 model.set_value (a_0, char_A, NULL);
884d9141
DM
5969}
5970
90f7c300
DM
5971/* Verify that region_model::get_representative_tree works as expected. */
5972
5973static void
5974test_get_representative_tree ()
5975{
808f4dfe
DM
5976 region_model_manager mgr;
5977
90f7c300
DM
5978 /* STRING_CST. */
5979 {
5980 tree string_cst = build_string (4, "foo");
808f4dfe
DM
5981 region_model m (&mgr);
5982 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
5983 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
5984 ASSERT_EQ (rep, string_cst);
5985 }
5986
5987 /* String literal. */
5988 {
5989 tree string_cst_ptr = build_string_literal (4, "foo");
808f4dfe
DM
5990 region_model m (&mgr);
5991 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
5992 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
5993 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
5994 }
808f4dfe
DM
5995
5996 /* Value of an element within an array. */
5997 {
5998 tree tlen = size_int (10);
5999 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
6000 tree a = build_global_decl ("a", arr_type);
6001 placeholder_svalue test_sval (char_type_node, "test value");
6002
6003 /* Value of a[3]. */
6004 {
6005 test_region_model_context ctxt;
6006 region_model model (&mgr);
6007 tree int_3 = build_int_cst (integer_type_node, 3);
6008 tree a_3 = build4 (ARRAY_REF, char_type_node,
6009 a, int_3, NULL_TREE, NULL_TREE);
6010 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
6011 model.set_value (a_3_reg, &test_sval, &ctxt);
6012 tree rep = model.get_representative_tree (&test_sval);
6013 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
6014 }
6015
6016 /* Value of a[0]. */
6017 {
6018 test_region_model_context ctxt;
6019 region_model model (&mgr);
6020 tree idx = build_int_cst (integer_type_node, 0);
6021 tree a_0 = build4 (ARRAY_REF, char_type_node,
6022 a, idx, NULL_TREE, NULL_TREE);
6023 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
6024 model.set_value (a_0_reg, &test_sval, &ctxt);
6025 tree rep = model.get_representative_tree (&test_sval);
6026 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
6027 }
6028 }
6029
6030 /* Value of a field within a struct. */
6031 {
6032 coord_test ct;
6033
6034 tree c = build_global_decl ("c", ct.m_coord_type);
6035 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6036 c, ct.m_x_field, NULL_TREE);
6037 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6038 c, ct.m_y_field, NULL_TREE);
6039
6040 test_region_model_context ctxt;
6041
6042 /* Value of initial field. */
6043 {
6044 region_model m (&mgr);
6045 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
6046 placeholder_svalue test_sval_x (integer_type_node, "test x val");
6047 m.set_value (c_x_reg, &test_sval_x, &ctxt);
6048 tree rep = m.get_representative_tree (&test_sval_x);
6049 ASSERT_DUMP_TREE_EQ (rep, "c.x");
6050 }
6051
6052 /* Value of non-initial field. */
6053 {
6054 region_model m (&mgr);
6055 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
6056 placeholder_svalue test_sval_y (integer_type_node, "test y val");
6057 m.set_value (c_y_reg, &test_sval_y, &ctxt);
6058 tree rep = m.get_representative_tree (&test_sval_y);
6059 ASSERT_DUMP_TREE_EQ (rep, "c.y");
6060 }
6061 }
90f7c300
DM
6062}
6063
757bf1df 6064/* Verify that calling region_model::get_rvalue repeatedly on the same
808f4dfe 6065 tree constant retrieves the same svalue *. */
757bf1df
DM
6066
6067static void
6068test_unique_constants ()
6069{
6070 tree int_0 = build_int_cst (integer_type_node, 0);
6071 tree int_42 = build_int_cst (integer_type_node, 42);
6072
6073 test_region_model_context ctxt;
808f4dfe
DM
6074 region_model_manager mgr;
6075 region_model model (&mgr);
757bf1df
DM
6076 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
6077 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
6078 model.get_rvalue (int_42, &ctxt));
6079 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
6080 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
757bf1df 6081
808f4dfe
DM
6082 /* A "(const int)42" will be a different tree from "(int)42)"... */
6083 tree const_int_type_node
6084 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
6085 tree const_int_42 = build_int_cst (const_int_type_node, 42);
6086 ASSERT_NE (int_42, const_int_42);
6087 /* It should have a different const_svalue. */
6088 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
6089 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
6090 ASSERT_NE (int_42_sval, const_int_42_sval);
6091 /* But they should compare as equal. */
6092 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
6093 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
757bf1df
DM
6094}
6095
808f4dfe
DM
6096/* Verify that each type gets its own singleton unknown_svalue within a
6097 region_model_manager, and that NULL_TREE gets its own singleton. */
757bf1df
DM
6098
6099static void
808f4dfe 6100test_unique_unknowns ()
757bf1df 6101{
808f4dfe
DM
6102 region_model_manager mgr;
6103 const svalue *unknown_int
6104 = mgr.get_or_create_unknown_svalue (integer_type_node);
6105 /* Repeated calls with the same type should get the same "unknown"
6106 svalue. */
6107 const svalue *unknown_int_2
6108 = mgr.get_or_create_unknown_svalue (integer_type_node);
6109 ASSERT_EQ (unknown_int, unknown_int_2);
757bf1df 6110
808f4dfe
DM
6111 /* Different types (or the NULL type) should have different
6112 unknown_svalues. */
6113 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
6114 ASSERT_NE (unknown_NULL_type, unknown_int);
757bf1df 6115
808f4dfe
DM
6116 /* Repeated calls with NULL for the type should get the same "unknown"
6117 svalue. */
6118 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
6119 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
757bf1df
DM
6120}
6121
808f4dfe 6122/* Verify that initial_svalue are handled as expected. */
757bf1df 6123
808f4dfe
DM
6124static void
6125test_initial_svalue_folding ()
757bf1df 6126{
808f4dfe
DM
6127 region_model_manager mgr;
6128 tree x = build_global_decl ("x", integer_type_node);
6129 tree y = build_global_decl ("y", integer_type_node);
757bf1df 6130
808f4dfe
DM
6131 test_region_model_context ctxt;
6132 region_model model (&mgr);
6133 const svalue *x_init = model.get_rvalue (x, &ctxt);
6134 const svalue *y_init = model.get_rvalue (y, &ctxt);
6135 ASSERT_NE (x_init, y_init);
6136 const region *x_reg = model.get_lvalue (x, &ctxt);
6137 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
757bf1df 6138
808f4dfe 6139}
757bf1df 6140
808f4dfe 6141/* Verify that unary ops are folded as expected. */
757bf1df
DM
6142
6143static void
808f4dfe 6144test_unaryop_svalue_folding ()
757bf1df 6145{
808f4dfe 6146 region_model_manager mgr;
757bf1df
DM
6147 tree x = build_global_decl ("x", integer_type_node);
6148 tree y = build_global_decl ("y", integer_type_node);
6149
808f4dfe
DM
6150 test_region_model_context ctxt;
6151 region_model model (&mgr);
6152 const svalue *x_init = model.get_rvalue (x, &ctxt);
6153 const svalue *y_init = model.get_rvalue (y, &ctxt);
6154 const region *x_reg = model.get_lvalue (x, &ctxt);
6155 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
6156
6157 /* "(int)x" -> "x". */
6158 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
6159
6160 /* "(void *)x" -> something other than "x". */
6161 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
6162
6163 /* "!(x == y)" -> "x != y". */
6164 ASSERT_EQ (mgr.get_or_create_unaryop
6165 (boolean_type_node, TRUTH_NOT_EXPR,
6166 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
6167 x_init, y_init)),
6168 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
6169 x_init, y_init));
6170 /* "!(x > y)" -> "x <= y". */
6171 ASSERT_EQ (mgr.get_or_create_unaryop
6172 (boolean_type_node, TRUTH_NOT_EXPR,
6173 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
6174 x_init, y_init)),
6175 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
6176 x_init, y_init));
6177}
6178
6179/* Verify that binops on constant svalues are folded. */
757bf1df 6180
808f4dfe
DM
6181static void
6182test_binop_svalue_folding ()
6183{
6184#define NUM_CSTS 10
6185 tree cst_int[NUM_CSTS];
6186 region_model_manager mgr;
6187 const svalue *cst_sval[NUM_CSTS];
6188 for (int i = 0; i < NUM_CSTS; i++)
6189 {
6190 cst_int[i] = build_int_cst (integer_type_node, i);
6191 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
6192 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
6193 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
6194 }
757bf1df 6195
808f4dfe
DM
6196 for (int i = 0; i < NUM_CSTS; i++)
6197 for (int j = 0; j < NUM_CSTS; j++)
6198 {
6199 if (i != j)
6200 ASSERT_NE (cst_sval[i], cst_sval[j]);
6201 if (i + j < NUM_CSTS)
6202 {
6203 const svalue *sum
6204 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6205 cst_sval[i], cst_sval[j]);
6206 ASSERT_EQ (sum, cst_sval[i + j]);
6207 }
6208 if (i - j >= 0)
6209 {
6210 const svalue *difference
6211 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
6212 cst_sval[i], cst_sval[j]);
6213 ASSERT_EQ (difference, cst_sval[i - j]);
6214 }
6215 if (i * j < NUM_CSTS)
6216 {
6217 const svalue *product
6218 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6219 cst_sval[i], cst_sval[j]);
6220 ASSERT_EQ (product, cst_sval[i * j]);
6221 }
6222 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
6223 cst_sval[i], cst_sval[j]);
6224 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
6225 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
6226 cst_sval[i], cst_sval[j]);
6227 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
6228 // etc
6229 }
757bf1df 6230
808f4dfe 6231 tree x = build_global_decl ("x", integer_type_node);
757bf1df 6232
808f4dfe
DM
6233 test_region_model_context ctxt;
6234 region_model model (&mgr);
6235 const svalue *x_init = model.get_rvalue (x, &ctxt);
6236
6237 /* PLUS_EXPR folding. */
6238 const svalue *x_init_plus_zero
6239 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6240 x_init, cst_sval[0]);
6241 ASSERT_EQ (x_init_plus_zero, x_init);
6242 const svalue *zero_plus_x_init
6243 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6244 cst_sval[0], x_init);
6245 ASSERT_EQ (zero_plus_x_init, x_init);
6246
6247 /* MULT_EXPR folding. */
6248 const svalue *x_init_times_zero
6249 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6250 x_init, cst_sval[0]);
6251 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
6252 const svalue *zero_times_x_init
6253 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6254 cst_sval[0], x_init);
6255 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
6256
6257 const svalue *x_init_times_one
6258 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6259 x_init, cst_sval[1]);
6260 ASSERT_EQ (x_init_times_one, x_init);
6261 const svalue *one_times_x_init
6262 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6263 cst_sval[1], x_init);
6264 ASSERT_EQ (one_times_x_init, x_init);
6265
6266 // etc
6267 // TODO: do we want to use the match-and-simplify DSL for this?
6268
6269 /* Verify that binops put any constants on the RHS. */
6270 const svalue *four_times_x_init
6271 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6272 cst_sval[4], x_init);
6273 const svalue *x_init_times_four
6274 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6275 x_init, cst_sval[4]);
6276 ASSERT_EQ (four_times_x_init, x_init_times_four);
6277 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
6278 ASSERT_EQ (binop->get_op (), MULT_EXPR);
6279 ASSERT_EQ (binop->get_arg0 (), x_init);
6280 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
6281
6282 /* Verify that ((x + 1) + 1) == (x + 2). */
6283 const svalue *x_init_plus_one
6284 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6285 x_init, cst_sval[1]);
6286 const svalue *x_init_plus_two
6287 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6288 x_init, cst_sval[2]);
6289 const svalue *x_init_plus_one_plus_one
6290 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6291 x_init_plus_one, cst_sval[1]);
6292 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
4f34f8cc
DM
6293
6294 /* Verify various binops on booleans. */
6295 {
6296 const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1);
6297 const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0);
6298 const svalue *sval_unknown
6299 = mgr.get_or_create_unknown_svalue (boolean_type_node);
6300 const placeholder_svalue sval_placeholder (boolean_type_node, "v");
6301 for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR})
6302 {
6303 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6304 sval_true, sval_unknown),
6305 sval_true);
6306 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6307 sval_false, sval_unknown),
6308 sval_unknown);
6309 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6310 sval_false, &sval_placeholder),
6311 &sval_placeholder);
6312 }
6313 for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR})
6314 {
6315 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6316 sval_false, sval_unknown),
6317 sval_false);
6318 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6319 sval_true, sval_unknown),
6320 sval_unknown);
6321 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6322 sval_true, &sval_placeholder),
6323 &sval_placeholder);
6324 }
6325 }
808f4dfe
DM
6326}
6327
6328/* Verify that sub_svalues are folded as expected. */
757bf1df 6329
808f4dfe
DM
6330static void
6331test_sub_svalue_folding ()
6332{
6333 coord_test ct;
6334 tree c = build_global_decl ("c", ct.m_coord_type);
6335 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6336 c, ct.m_x_field, NULL_TREE);
757bf1df 6337
808f4dfe
DM
6338 region_model_manager mgr;
6339 region_model model (&mgr);
6340 test_region_model_context ctxt;
6341 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
757bf1df 6342
808f4dfe
DM
6343 /* Verify that sub_svalue of "unknown" simply
6344 yields an unknown. */
757bf1df 6345
808f4dfe
DM
6346 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
6347 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
6348 unknown, c_x_reg);
6349 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
6350 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
757bf1df
DM
6351}
6352
f09b9955
DM
6353/* Get BIT within VAL as a symbolic value within MGR. */
6354
6355static const svalue *
6356get_bit (region_model_manager *mgr,
6357 bit_offset_t bit,
6358 unsigned HOST_WIDE_INT val)
6359{
6360 const svalue *inner_svalue
6361 = mgr->get_or_create_int_cst (unsigned_type_node, val);
6362 return mgr->get_or_create_bits_within (boolean_type_node,
6363 bit_range (bit, 1),
6364 inner_svalue);
6365}
6366
6367/* Verify that bits_within_svalues are folded as expected. */
6368
6369static void
6370test_bits_within_svalue_folding ()
6371{
6372 region_model_manager mgr;
6373
6374 const svalue *zero = mgr.get_or_create_int_cst (boolean_type_node, 0);
6375 const svalue *one = mgr.get_or_create_int_cst (boolean_type_node, 1);
6376
6377 {
6378 const unsigned val = 0x0000;
6379 for (unsigned bit = 0; bit < 16; bit++)
6380 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
6381 }
6382
6383 {
6384 const unsigned val = 0x0001;
6385 ASSERT_EQ (get_bit (&mgr, 0, val), one);
6386 for (unsigned bit = 1; bit < 16; bit++)
6387 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
6388 }
6389
6390 {
6391 const unsigned val = 0x8000;
6392 for (unsigned bit = 0; bit < 15; bit++)
6393 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
6394 ASSERT_EQ (get_bit (&mgr, 15, val), one);
6395 }
6396
6397 {
6398 const unsigned val = 0xFFFF;
6399 for (unsigned bit = 0; bit < 16; bit++)
6400 ASSERT_EQ (get_bit (&mgr, bit, val), one);
6401 }
6402}
6403
808f4dfe 6404/* Test that region::descendent_of_p works as expected. */
757bf1df
DM
6405
6406static void
808f4dfe 6407test_descendent_of_p ()
757bf1df 6408{
808f4dfe
DM
6409 region_model_manager mgr;
6410 const region *stack = mgr.get_stack_region ();
6411 const region *heap = mgr.get_heap_region ();
6412 const region *code = mgr.get_code_region ();
6413 const region *globals = mgr.get_globals_region ();
757bf1df 6414
808f4dfe
DM
6415 /* descendent_of_p should return true when used on the region itself. */
6416 ASSERT_TRUE (stack->descendent_of_p (stack));
6417 ASSERT_FALSE (stack->descendent_of_p (heap));
6418 ASSERT_FALSE (stack->descendent_of_p (code));
6419 ASSERT_FALSE (stack->descendent_of_p (globals));
757bf1df 6420
808f4dfe
DM
6421 tree x = build_global_decl ("x", integer_type_node);
6422 const region *x_reg = mgr.get_region_for_global (x);
6423 ASSERT_TRUE (x_reg->descendent_of_p (globals));
757bf1df 6424
808f4dfe
DM
6425 /* A cast_region should be a descendent of the original region. */
6426 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
6427 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
757bf1df
DM
6428}
6429
391512ad
DM
6430/* Verify that bit_range_region works as expected. */
6431
6432static void
6433test_bit_range_regions ()
6434{
6435 tree x = build_global_decl ("x", integer_type_node);
6436 region_model_manager mgr;
6437 const region *x_reg = mgr.get_region_for_global (x);
6438 const region *byte0
6439 = mgr.get_bit_range (x_reg, char_type_node, bit_range (0, 8));
6440 const region *byte1
6441 = mgr.get_bit_range (x_reg, char_type_node, bit_range (8, 8));
6442 ASSERT_TRUE (byte0->descendent_of_p (x_reg));
6443 ASSERT_TRUE (byte1->descendent_of_p (x_reg));
6444 ASSERT_NE (byte0, byte1);
6445}
6446
757bf1df
DM
6447/* Verify that simple assignments work as expected. */
6448
6449static void
6450test_assignment ()
6451{
6452 tree int_0 = build_int_cst (integer_type_node, 0);
6453 tree x = build_global_decl ("x", integer_type_node);
6454 tree y = build_global_decl ("y", integer_type_node);
6455
6456 /* "x == 0", then use of y, then "y = 0;". */
808f4dfe
DM
6457 region_model_manager mgr;
6458 region_model model (&mgr);
757bf1df
DM
6459 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
6460 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
6461 model.set_value (model.get_lvalue (y, NULL),
6462 model.get_rvalue (int_0, NULL),
6463 NULL);
6464 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
6465 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
757bf1df
DM
6466}
6467
a96f1c38
DM
6468/* Verify that compound assignments work as expected. */
6469
6470static void
6471test_compound_assignment ()
6472{
6473 coord_test ct;
6474
6475 tree c = build_global_decl ("c", ct.m_coord_type);
6476 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6477 c, ct.m_x_field, NULL_TREE);
6478 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6479 c, ct.m_y_field, NULL_TREE);
6480 tree d = build_global_decl ("d", ct.m_coord_type);
6481 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6482 d, ct.m_x_field, NULL_TREE);
6483 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6484 d, ct.m_y_field, NULL_TREE);
6485
6486 tree int_17 = build_int_cst (integer_type_node, 17);
6487 tree int_m3 = build_int_cst (integer_type_node, -3);
6488
808f4dfe
DM
6489 region_model_manager mgr;
6490 region_model model (&mgr);
a96f1c38
DM
6491 model.set_value (c_x, int_17, NULL);
6492 model.set_value (c_y, int_m3, NULL);
6493
a96f1c38 6494 /* Copy c to d. */
13ad6d9f
DM
6495 const svalue *sval = model.get_rvalue (c, NULL);
6496 model.set_value (model.get_lvalue (d, NULL), sval, NULL);
6497
a96f1c38
DM
6498 /* Check that the fields have the same svalues. */
6499 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
6500 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
6501}
6502
757bf1df
DM
6503/* Verify the details of pushing and popping stack frames. */
6504
6505static void
6506test_stack_frames ()
6507{
6508 tree int_42 = build_int_cst (integer_type_node, 42);
6509 tree int_10 = build_int_cst (integer_type_node, 10);
6510 tree int_5 = build_int_cst (integer_type_node, 5);
6511 tree int_0 = build_int_cst (integer_type_node, 0);
6512
6513 auto_vec <tree> param_types;
6514 tree parent_fndecl = make_fndecl (integer_type_node,
6515 "parent_fn",
6516 param_types);
6517 allocate_struct_function (parent_fndecl, true);
6518
6519 tree child_fndecl = make_fndecl (integer_type_node,
6520 "child_fn",
6521 param_types);
6522 allocate_struct_function (child_fndecl, true);
6523
6524 /* "a" and "b" in the parent frame. */
6525 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6526 get_identifier ("a"),
6527 integer_type_node);
4cebae09 6528 DECL_CONTEXT (a) = parent_fndecl;
757bf1df
DM
6529 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6530 get_identifier ("b"),
6531 integer_type_node);
4cebae09 6532 DECL_CONTEXT (b) = parent_fndecl;
757bf1df
DM
6533 /* "x" and "y" in a child frame. */
6534 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6535 get_identifier ("x"),
6536 integer_type_node);
4cebae09 6537 DECL_CONTEXT (x) = child_fndecl;
757bf1df
DM
6538 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6539 get_identifier ("y"),
6540 integer_type_node);
4cebae09 6541 DECL_CONTEXT (y) = child_fndecl;
757bf1df
DM
6542
6543 /* "p" global. */
6544 tree p = build_global_decl ("p", ptr_type_node);
6545
6546 /* "q" global. */
6547 tree q = build_global_decl ("q", ptr_type_node);
6548
808f4dfe 6549 region_model_manager mgr;
757bf1df 6550 test_region_model_context ctxt;
808f4dfe 6551 region_model model (&mgr);
757bf1df
DM
6552
6553 /* Push stack frame for "parent_fn". */
808f4dfe
DM
6554 const region *parent_frame_reg
6555 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl),
6556 NULL, &ctxt);
6557 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
6558 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
6559 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
6560 model.set_value (a_in_parent_reg,
6561 model.get_rvalue (int_42, &ctxt),
6562 &ctxt);
6563 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
6564
757bf1df
DM
6565 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
6566 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
6567 tristate (tristate::TS_TRUE));
6568
6569 /* Push stack frame for "child_fn". */
808f4dfe 6570 const region *child_frame_reg
757bf1df 6571 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
808f4dfe
DM
6572 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
6573 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
6574 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
6575 model.set_value (x_in_child_reg,
6576 model.get_rvalue (int_0, &ctxt),
6577 &ctxt);
6578 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
6579
757bf1df
DM
6580 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
6581 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
6582 tristate (tristate::TS_TRUE));
6583
6584 /* Point a global pointer at a local in the child frame: p = &x. */
808f4dfe
DM
6585 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
6586 model.set_value (p_in_globals_reg,
6587 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
757bf1df 6588 &ctxt);
808f4dfe 6589 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
757bf1df
DM
6590
6591 /* Point another global pointer at p: q = &p. */
808f4dfe
DM
6592 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
6593 model.set_value (q_in_globals_reg,
6594 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
757bf1df
DM
6595 &ctxt);
6596
808f4dfe
DM
6597 /* Test region::descendent_of_p. */
6598 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
6599 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
6600 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
757bf1df
DM
6601
6602 /* Pop the "child_fn" frame from the stack. */
808f4dfe
DM
6603 model.pop_frame (NULL, NULL, &ctxt);
6604 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
6605 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
757bf1df
DM
6606
6607 /* Verify that p (which was pointing at the local "x" in the popped
6608 frame) has been poisoned. */
33255ad3 6609 const svalue *new_p_sval = model.get_rvalue (p, NULL);
757bf1df
DM
6610 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
6611 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
6612 POISON_KIND_POPPED_STACK);
6613
6614 /* Verify that q still points to p, in spite of the region
6615 renumbering. */
808f4dfe 6616 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
757bf1df 6617 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
5932dd35 6618 ASSERT_EQ (new_q_sval->maybe_get_region (),
757bf1df
DM
6619 model.get_lvalue (p, &ctxt));
6620
6621 /* Verify that top of stack has been updated. */
808f4dfe 6622 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
757bf1df
DM
6623
6624 /* Verify locals in parent frame. */
6625 /* Verify "a" still has its value. */
808f4dfe 6626 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
757bf1df
DM
6627 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
6628 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
6629 int_42);
6630 /* Verify "b" still has its constraint. */
6631 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
6632 tristate (tristate::TS_TRUE));
6633}
6634
6635/* Verify that get_representative_path_var works as expected, that
808f4dfe 6636 we can map from regions to parms and back within a recursive call
757bf1df
DM
6637 stack. */
6638
6639static void
6640test_get_representative_path_var ()
6641{
6642 auto_vec <tree> param_types;
6643 tree fndecl = make_fndecl (integer_type_node,
6644 "factorial",
6645 param_types);
6646 allocate_struct_function (fndecl, true);
6647
6648 /* Parm "n". */
6649 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6650 get_identifier ("n"),
6651 integer_type_node);
4cebae09 6652 DECL_CONTEXT (n) = fndecl;
757bf1df 6653
808f4dfe
DM
6654 region_model_manager mgr;
6655 test_region_model_context ctxt;
6656 region_model model (&mgr);
757bf1df
DM
6657
6658 /* Push 5 stack frames for "factorial", each with a param */
808f4dfe
DM
6659 auto_vec<const region *> parm_regs;
6660 auto_vec<const svalue *> parm_svals;
757bf1df
DM
6661 for (int depth = 0; depth < 5; depth++)
6662 {
808f4dfe
DM
6663 const region *frame_n_reg
6664 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
6665 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
6666 parm_regs.safe_push (parm_n_reg);
757bf1df 6667
808f4dfe
DM
6668 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
6669 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
6670 parm_svals.safe_push (sval_n);
757bf1df
DM
6671 }
6672
6673 /* Verify that we can recognize that the regions are the parms,
6674 at every depth. */
6675 for (int depth = 0; depth < 5; depth++)
6676 {
808f4dfe
DM
6677 {
6678 svalue_set visited;
6679 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
6680 &visited),
6681 path_var (n, depth + 1));
6682 }
757bf1df
DM
6683 /* ...and that we can lookup lvalues for locals for all frames,
6684 not just the top. */
6685 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
808f4dfe 6686 parm_regs[depth]);
757bf1df 6687 /* ...and that we can locate the svalues. */
808f4dfe
DM
6688 {
6689 svalue_set visited;
6690 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
6691 &visited),
6692 path_var (n, depth + 1));
6693 }
757bf1df
DM
6694 }
6695}
6696
808f4dfe 6697/* Ensure that region_model::operator== works as expected. */
757bf1df
DM
6698
6699static void
808f4dfe 6700test_equality_1 ()
757bf1df 6701{
808f4dfe
DM
6702 tree int_42 = build_int_cst (integer_type_node, 42);
6703 tree int_17 = build_int_cst (integer_type_node, 17);
757bf1df 6704
808f4dfe
DM
6705/* Verify that "empty" region_model instances are equal to each other. */
6706 region_model_manager mgr;
6707 region_model model0 (&mgr);
6708 region_model model1 (&mgr);
757bf1df 6709 ASSERT_EQ (model0, model1);
808f4dfe
DM
6710
6711 /* Verify that setting state in model1 makes the models non-equal. */
6712 tree x = build_global_decl ("x", integer_type_node);
6713 model0.set_value (x, int_42, NULL);
6714 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
6715 ASSERT_NE (model0, model1);
6716
6717 /* Verify the copy-ctor. */
6718 region_model model2 (model0);
6719 ASSERT_EQ (model0, model2);
6720 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
6721 ASSERT_NE (model1, model2);
6722
6723 /* Verify that models obtained from copy-ctor are independently editable
6724 w/o affecting the original model. */
6725 model2.set_value (x, int_17, NULL);
6726 ASSERT_NE (model0, model2);
6727 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
6728 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
757bf1df
DM
6729}
6730
6731/* Verify that region models for
6732 x = 42; y = 113;
6733 and
6734 y = 113; x = 42;
808f4dfe 6735 are equal. */
757bf1df
DM
6736
6737static void
6738test_canonicalization_2 ()
6739{
6740 tree int_42 = build_int_cst (integer_type_node, 42);
6741 tree int_113 = build_int_cst (integer_type_node, 113);
6742 tree x = build_global_decl ("x", integer_type_node);
6743 tree y = build_global_decl ("y", integer_type_node);
6744
808f4dfe
DM
6745 region_model_manager mgr;
6746 region_model model0 (&mgr);
757bf1df
DM
6747 model0.set_value (model0.get_lvalue (x, NULL),
6748 model0.get_rvalue (int_42, NULL),
6749 NULL);
6750 model0.set_value (model0.get_lvalue (y, NULL),
6751 model0.get_rvalue (int_113, NULL),
6752 NULL);
6753
808f4dfe 6754 region_model model1 (&mgr);
757bf1df
DM
6755 model1.set_value (model1.get_lvalue (y, NULL),
6756 model1.get_rvalue (int_113, NULL),
6757 NULL);
6758 model1.set_value (model1.get_lvalue (x, NULL),
6759 model1.get_rvalue (int_42, NULL),
6760 NULL);
6761
757bf1df
DM
6762 ASSERT_EQ (model0, model1);
6763}
6764
6765/* Verify that constraints for
6766 x > 3 && y > 42
6767 and
6768 y > 42 && x > 3
6769 are equal after canonicalization. */
6770
6771static void
6772test_canonicalization_3 ()
6773{
6774 tree int_3 = build_int_cst (integer_type_node, 3);
6775 tree int_42 = build_int_cst (integer_type_node, 42);
6776 tree x = build_global_decl ("x", integer_type_node);
6777 tree y = build_global_decl ("y", integer_type_node);
6778
808f4dfe
DM
6779 region_model_manager mgr;
6780 region_model model0 (&mgr);
757bf1df
DM
6781 model0.add_constraint (x, GT_EXPR, int_3, NULL);
6782 model0.add_constraint (y, GT_EXPR, int_42, NULL);
6783
808f4dfe 6784 region_model model1 (&mgr);
757bf1df
DM
6785 model1.add_constraint (y, GT_EXPR, int_42, NULL);
6786 model1.add_constraint (x, GT_EXPR, int_3, NULL);
6787
808f4dfe
DM
6788 model0.canonicalize ();
6789 model1.canonicalize ();
757bf1df
DM
6790 ASSERT_EQ (model0, model1);
6791}
6792
8c08c983
DM
6793/* Verify that we can canonicalize a model containing NaN and other real
6794 constants. */
6795
6796static void
6797test_canonicalization_4 ()
6798{
6799 auto_vec<tree> csts;
6800 append_interesting_constants (&csts);
6801
808f4dfe
DM
6802 region_model_manager mgr;
6803 region_model model (&mgr);
8c08c983 6804
3f207ab3 6805 for (tree cst : csts)
8c08c983
DM
6806 model.get_rvalue (cst, NULL);
6807
808f4dfe 6808 model.canonicalize ();
8c08c983
DM
6809}
6810
757bf1df
DM
6811/* Assert that if we have two region_model instances
6812 with values VAL_A and VAL_B for EXPR that they are
6813 mergable. Write the merged model to *OUT_MERGED_MODEL,
6814 and the merged svalue ptr to *OUT_MERGED_SVALUE.
6815 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
6816 for that region_model. */
6817
6818static void
6819assert_region_models_merge (tree expr, tree val_a, tree val_b,
808f4dfe
DM
6820 region_model *out_merged_model,
6821 const svalue **out_merged_svalue)
757bf1df 6822{
808f4dfe 6823 region_model_manager *mgr = out_merged_model->get_manager ();
bb8e93eb
DM
6824 program_point point (program_point::origin (*mgr));
6825 test_region_model_context ctxt;
808f4dfe
DM
6826 region_model model0 (mgr);
6827 region_model model1 (mgr);
757bf1df
DM
6828 if (val_a)
6829 model0.set_value (model0.get_lvalue (expr, &ctxt),
6830 model0.get_rvalue (val_a, &ctxt),
6831 &ctxt);
6832 if (val_b)
6833 model1.set_value (model1.get_lvalue (expr, &ctxt),
6834 model1.get_rvalue (val_b, &ctxt),
6835 &ctxt);
6836
6837 /* They should be mergeable. */
808f4dfe
DM
6838 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
6839 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
757bf1df
DM
6840}
6841
6842/* Verify that we can merge region_model instances. */
6843
6844static void
6845test_state_merging ()
6846{
6847 tree int_42 = build_int_cst (integer_type_node, 42);
6848 tree int_113 = build_int_cst (integer_type_node, 113);
6849 tree x = build_global_decl ("x", integer_type_node);
6850 tree y = build_global_decl ("y", integer_type_node);
6851 tree z = build_global_decl ("z", integer_type_node);
6852 tree p = build_global_decl ("p", ptr_type_node);
6853
6854 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
6855 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
6856
6857 auto_vec <tree> param_types;
6858 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
6859 allocate_struct_function (test_fndecl, true);
6860
6861 /* Param "a". */
6862 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6863 get_identifier ("a"),
6864 integer_type_node);
4cebae09 6865 DECL_CONTEXT (a) = test_fndecl;
757bf1df
DM
6866 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
6867
455f58ec
DM
6868 /* Param "q", a pointer. */
6869 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6870 get_identifier ("q"),
6871 ptr_type_node);
4cebae09 6872 DECL_CONTEXT (q) = test_fndecl;
455f58ec 6873
808f4dfe 6874 region_model_manager mgr;
bb8e93eb 6875 program_point point (program_point::origin (mgr));
808f4dfe 6876
757bf1df 6877 {
808f4dfe
DM
6878 region_model model0 (&mgr);
6879 region_model model1 (&mgr);
6880 region_model merged (&mgr);
757bf1df 6881 /* Verify empty models can be merged. */
808f4dfe 6882 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
6883 ASSERT_EQ (model0, merged);
6884 }
6885
6886 /* Verify that we can merge two contradictory constraints on the
6887 value for a global. */
6888 /* TODO: verify that the merged model doesn't have a value for
6889 the global */
6890 {
808f4dfe
DM
6891 region_model model0 (&mgr);
6892 region_model model1 (&mgr);
6893 region_model merged (&mgr);
757bf1df
DM
6894 test_region_model_context ctxt;
6895 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
6896 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe 6897 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
6898 ASSERT_NE (model0, merged);
6899 ASSERT_NE (model1, merged);
6900 }
6901
6902 /* Verify handling of a PARM_DECL. */
6903 {
6904 test_region_model_context ctxt;
808f4dfe
DM
6905 region_model model0 (&mgr);
6906 region_model model1 (&mgr);
757bf1df
DM
6907 ASSERT_EQ (model0.get_stack_depth (), 0);
6908 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
6909 ASSERT_EQ (model0.get_stack_depth (), 1);
757bf1df
DM
6910 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
6911
808f4dfe
DM
6912 placeholder_svalue test_sval (integer_type_node, "test sval");
6913 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
6914 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
757bf1df
DM
6915 ASSERT_EQ (model0, model1);
6916
757bf1df 6917 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
6918 region_model merged (&mgr);
6919 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 6920 ASSERT_EQ (model0, merged);
808f4dfe
DM
6921 /* In particular, "a" should have the placeholder value. */
6922 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
757bf1df
DM
6923 }
6924
6925 /* Verify handling of a global. */
6926 {
6927 test_region_model_context ctxt;
808f4dfe
DM
6928 region_model model0 (&mgr);
6929 region_model model1 (&mgr);
757bf1df 6930
808f4dfe
DM
6931 placeholder_svalue test_sval (integer_type_node, "test sval");
6932 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
6933 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
6934 ASSERT_EQ (model0, model1);
757bf1df
DM
6935
6936 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
6937 region_model merged (&mgr);
6938 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 6939 ASSERT_EQ (model0, merged);
808f4dfe
DM
6940 /* In particular, "x" should have the placeholder value. */
6941 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
757bf1df
DM
6942 }
6943
6944 /* Use global-handling to verify various combinations of values. */
6945
6946 /* Two equal constant values. */
6947 {
808f4dfe
DM
6948 region_model merged (&mgr);
6949 const svalue *merged_x_sval;
757bf1df
DM
6950 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
6951
6952 /* In particular, there should be a constant value for "x". */
6953 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
6954 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
6955 int_42);
6956 }
6957
6958 /* Two non-equal constant values. */
6959 {
808f4dfe
DM
6960 region_model merged (&mgr);
6961 const svalue *merged_x_sval;
757bf1df
DM
6962 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
6963
808f4dfe
DM
6964 /* In particular, there should be a "widening" value for "x". */
6965 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
757bf1df
DM
6966 }
6967
808f4dfe 6968 /* Initial and constant. */
757bf1df 6969 {
808f4dfe
DM
6970 region_model merged (&mgr);
6971 const svalue *merged_x_sval;
757bf1df
DM
6972 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
6973
6974 /* In particular, there should be an unknown value for "x". */
6975 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
6976 }
6977
808f4dfe 6978 /* Constant and initial. */
757bf1df 6979 {
808f4dfe
DM
6980 region_model merged (&mgr);
6981 const svalue *merged_x_sval;
757bf1df
DM
6982 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
6983
6984 /* In particular, there should be an unknown value for "x". */
6985 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
6986 }
6987
6988 /* Unknown and constant. */
6989 // TODO
6990
6991 /* Pointers: NULL and NULL. */
6992 // TODO
6993
6994 /* Pointers: NULL and non-NULL. */
6995 // TODO
6996
6997 /* Pointers: non-NULL and non-NULL: ptr to a local. */
6998 {
808f4dfe 6999 region_model model0 (&mgr);
757bf1df 7000 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
757bf1df
DM
7001 model0.set_value (model0.get_lvalue (p, NULL),
7002 model0.get_rvalue (addr_of_a, NULL), NULL);
7003
7004 region_model model1 (model0);
7005 ASSERT_EQ (model0, model1);
7006
7007 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7008 region_model merged (&mgr);
7009 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7010 ASSERT_EQ (model0, merged);
7011 }
7012
7013 /* Pointers: non-NULL and non-NULL: ptr to a global. */
7014 {
808f4dfe 7015 region_model merged (&mgr);
757bf1df 7016 /* p == &y in both input models. */
808f4dfe 7017 const svalue *merged_p_sval;
757bf1df
DM
7018 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
7019 &merged_p_sval);
7020
7021 /* We should get p == &y in the merged model. */
7022 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
808f4dfe
DM
7023 const region_svalue *merged_p_ptr
7024 = merged_p_sval->dyn_cast_region_svalue ();
7025 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
7026 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
757bf1df
DM
7027 }
7028
7029 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
7030 {
808f4dfe
DM
7031 region_model merged (&mgr);
7032 /* x == &y vs x == &z in the input models; these are actually casts
7033 of the ptrs to "int". */
7034 const svalue *merged_x_sval;
7035 // TODO:
757bf1df
DM
7036 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
7037 &merged_x_sval);
7038
7039 /* We should get x == unknown in the merged model. */
7040 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7041 }
7042
7043 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
7044 {
7045 test_region_model_context ctxt;
808f4dfe 7046 region_model model0 (&mgr);
9a2c9579 7047 tree size = build_int_cst (size_type_node, 1024);
808f4dfe 7048 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
b9365b93 7049 const region *new_reg
ce917b04 7050 = model0.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe 7051 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
757bf1df 7052 model0.set_value (model0.get_lvalue (p, &ctxt),
808f4dfe 7053 ptr_sval, &ctxt);
757bf1df
DM
7054
7055 region_model model1 (model0);
7056
7057 ASSERT_EQ (model0, model1);
7058
808f4dfe
DM
7059 region_model merged (&mgr);
7060 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7061
808f4dfe 7062 /* The merged model ought to be identical. */
757bf1df
DM
7063 ASSERT_EQ (model0, merged);
7064 }
7065
808f4dfe
DM
7066 /* Two regions sharing the same placeholder svalue should continue sharing
7067 it after self-merger. */
757bf1df
DM
7068 {
7069 test_region_model_context ctxt;
808f4dfe
DM
7070 region_model model0 (&mgr);
7071 placeholder_svalue placeholder_sval (integer_type_node, "test");
7072 model0.set_value (model0.get_lvalue (x, &ctxt),
7073 &placeholder_sval, &ctxt);
7074 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
757bf1df
DM
7075 region_model model1 (model0);
7076
7077 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7078 region_model merged (&mgr);
7079 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7080 ASSERT_EQ (model0, merged);
7081
7082 /* In particular, we should have x == y. */
7083 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
7084 tristate (tristate::TS_TRUE));
7085 }
7086
757bf1df 7087 {
808f4dfe
DM
7088 region_model model0 (&mgr);
7089 region_model model1 (&mgr);
757bf1df
DM
7090 test_region_model_context ctxt;
7091 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7092 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
808f4dfe
DM
7093 region_model merged (&mgr);
7094 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7095 }
7096
7097 {
808f4dfe
DM
7098 region_model model0 (&mgr);
7099 region_model model1 (&mgr);
757bf1df
DM
7100 test_region_model_context ctxt;
7101 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7102 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
7103 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe
DM
7104 region_model merged (&mgr);
7105 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7106 }
757bf1df
DM
7107
7108 // TODO: what can't we merge? need at least one such test
7109
7110 /* TODO: various things
7111 - heap regions
7112 - value merging:
7113 - every combination, but in particular
808f4dfe 7114 - pairs of regions
757bf1df
DM
7115 */
7116
7117 /* Views. */
7118 {
7119 test_region_model_context ctxt;
808f4dfe 7120 region_model model0 (&mgr);
757bf1df 7121
808f4dfe
DM
7122 const region *x_reg = model0.get_lvalue (x, &ctxt);
7123 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
757bf1df
DM
7124 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
7125
7126 region_model model1 (model0);
7127 ASSERT_EQ (model1, model0);
7128
7129 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7130 region_model merged (&mgr);
7131 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7132 }
455f58ec
DM
7133
7134 /* Verify that we can merge a model in which a local in an older stack
7135 frame points to a local in a more recent stack frame. */
7136 {
808f4dfe 7137 region_model model0 (&mgr);
455f58ec 7138 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
808f4dfe 7139 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
455f58ec
DM
7140
7141 /* Push a second frame. */
808f4dfe 7142 const region *reg_2nd_frame
455f58ec
DM
7143 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7144
7145 /* Have a pointer in the older frame point to a local in the
7146 more recent frame. */
808f4dfe
DM
7147 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
7148 model0.set_value (q_in_first_frame, sval_ptr, NULL);
455f58ec
DM
7149
7150 /* Verify that it's pointing at the newer frame. */
5932dd35 7151 const region *reg_pointee = sval_ptr->maybe_get_region ();
808f4dfe 7152 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
455f58ec 7153
808f4dfe 7154 model0.canonicalize ();
455f58ec
DM
7155
7156 region_model model1 (model0);
7157 ASSERT_EQ (model0, model1);
7158
7159 /* They should be mergeable, and the result should be the same
7160 (after canonicalization, at least). */
808f4dfe
DM
7161 region_model merged (&mgr);
7162 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7163 merged.canonicalize ();
455f58ec
DM
7164 ASSERT_EQ (model0, merged);
7165 }
7166
7167 /* Verify that we can merge a model in which a local points to a global. */
7168 {
808f4dfe 7169 region_model model0 (&mgr);
455f58ec
DM
7170 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7171 model0.set_value (model0.get_lvalue (q, NULL),
7172 model0.get_rvalue (addr_of_y, NULL), NULL);
7173
455f58ec
DM
7174 region_model model1 (model0);
7175 ASSERT_EQ (model0, model1);
7176
7177 /* They should be mergeable, and the result should be the same
7178 (after canonicalization, at least). */
808f4dfe
DM
7179 region_model merged (&mgr);
7180 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
455f58ec
DM
7181 ASSERT_EQ (model0, merged);
7182 }
757bf1df
DM
7183}
7184
7185/* Verify that constraints are correctly merged when merging region_model
7186 instances. */
7187
7188static void
7189test_constraint_merging ()
7190{
7191 tree int_0 = build_int_cst (integer_type_node, 0);
7192 tree int_5 = build_int_cst (integer_type_node, 5);
7193 tree x = build_global_decl ("x", integer_type_node);
7194 tree y = build_global_decl ("y", integer_type_node);
7195 tree z = build_global_decl ("z", integer_type_node);
7196 tree n = build_global_decl ("n", integer_type_node);
7197
808f4dfe 7198 region_model_manager mgr;
757bf1df
DM
7199 test_region_model_context ctxt;
7200
7201 /* model0: 0 <= (x == y) < n. */
808f4dfe 7202 region_model model0 (&mgr);
757bf1df
DM
7203 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
7204 model0.add_constraint (x, GE_EXPR, int_0, NULL);
7205 model0.add_constraint (x, LT_EXPR, n, NULL);
7206
7207 /* model1: z != 5 && (0 <= x < n). */
808f4dfe 7208 region_model model1 (&mgr);
757bf1df
DM
7209 model1.add_constraint (z, NE_EXPR, int_5, NULL);
7210 model1.add_constraint (x, GE_EXPR, int_0, NULL);
7211 model1.add_constraint (x, LT_EXPR, n, NULL);
7212
7213 /* They should be mergeable; the merged constraints should
7214 be: (0 <= x < n). */
bb8e93eb 7215 program_point point (program_point::origin (mgr));
808f4dfe
DM
7216 region_model merged (&mgr);
7217 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7218
7219 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
7220 tristate (tristate::TS_TRUE));
7221 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
7222 tristate (tristate::TS_TRUE));
7223
7224 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
7225 tristate (tristate::TS_UNKNOWN));
7226 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
7227 tristate (tristate::TS_UNKNOWN));
7228}
7229
808f4dfe
DM
7230/* Verify that widening_svalue::eval_condition_without_cm works as
7231 expected. */
7232
7233static void
7234test_widening_constraints ()
7235{
bb8e93eb 7236 region_model_manager mgr;
e6fe02d8 7237 function_point point (program_point::origin (mgr).get_function_point ());
808f4dfe
DM
7238 tree int_0 = build_int_cst (integer_type_node, 0);
7239 tree int_m1 = build_int_cst (integer_type_node, -1);
7240 tree int_1 = build_int_cst (integer_type_node, 1);
7241 tree int_256 = build_int_cst (integer_type_node, 256);
808f4dfe
DM
7242 test_region_model_context ctxt;
7243 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
7244 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
7245 const svalue *w_zero_then_one_sval
7246 = mgr.get_or_create_widening_svalue (integer_type_node, point,
7247 int_0_sval, int_1_sval);
7248 const widening_svalue *w_zero_then_one
7249 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
7250 ASSERT_EQ (w_zero_then_one->get_direction (),
7251 widening_svalue::DIR_ASCENDING);
7252 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
7253 tristate::TS_FALSE);
7254 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
7255 tristate::TS_FALSE);
7256 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
7257 tristate::TS_UNKNOWN);
7258 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
7259 tristate::TS_UNKNOWN);
7260
7261 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
7262 tristate::TS_FALSE);
7263 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
7264 tristate::TS_UNKNOWN);
7265 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
7266 tristate::TS_UNKNOWN);
7267 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
7268 tristate::TS_UNKNOWN);
7269
7270 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
7271 tristate::TS_TRUE);
7272 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
7273 tristate::TS_UNKNOWN);
7274 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
7275 tristate::TS_UNKNOWN);
7276 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
7277 tristate::TS_UNKNOWN);
7278
7279 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
7280 tristate::TS_TRUE);
7281 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
7282 tristate::TS_TRUE);
7283 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
7284 tristate::TS_UNKNOWN);
7285 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
7286 tristate::TS_UNKNOWN);
7287
7288 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
7289 tristate::TS_FALSE);
7290 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
7291 tristate::TS_UNKNOWN);
7292 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
7293 tristate::TS_UNKNOWN);
7294 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
7295 tristate::TS_UNKNOWN);
7296
7297 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
7298 tristate::TS_TRUE);
7299 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
7300 tristate::TS_UNKNOWN);
7301 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
7302 tristate::TS_UNKNOWN);
7303 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
7304 tristate::TS_UNKNOWN);
7305}
7306
7307/* Verify merging constraints for states simulating successive iterations
7308 of a loop.
7309 Simulate:
7310 for (i = 0; i < 256; i++)
7311 [...body...]
7312 i.e. this gimple:.
7313 i_15 = 0;
7314 goto <bb 4>;
7315
7316 <bb 4> :
7317 i_11 = PHI <i_15(2), i_23(3)>
7318 if (i_11 <= 255)
7319 goto <bb 3>;
7320 else
7321 goto [AFTER LOOP]
7322
7323 <bb 3> :
7324 [LOOP BODY]
7325 i_23 = i_11 + 1;
7326
7327 and thus these ops (and resultant states):
7328 i_11 = PHI()
7329 {i_11: 0}
7330 add_constraint (i_11 <= 255) [for the true edge]
7331 {i_11: 0} [constraint was a no-op]
7332 i_23 = i_11 + 1;
7333 {i_22: 1}
7334 i_11 = PHI()
7335 {i_11: WIDENED (at phi, 0, 1)}
7336 add_constraint (i_11 <= 255) [for the true edge]
7337 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
7338 i_23 = i_11 + 1;
7339 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
7340 i_11 = PHI(); merge with state at phi above
7341 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
7342 [changing meaning of "WIDENED" here]
7343 if (i_11 <= 255)
7344 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
7345 F: {i_11: 256}
7346 */
7347
7348static void
7349test_iteration_1 ()
7350{
bb8e93eb
DM
7351 region_model_manager mgr;
7352 program_point point (program_point::origin (mgr));
808f4dfe
DM
7353
7354 tree int_0 = build_int_cst (integer_type_node, 0);
7355 tree int_1 = build_int_cst (integer_type_node, 1);
7356 tree int_256 = build_int_cst (integer_type_node, 256);
7357 tree int_257 = build_int_cst (integer_type_node, 257);
7358 tree i = build_global_decl ("i", integer_type_node);
7359
808f4dfe
DM
7360 test_region_model_context ctxt;
7361
7362 /* model0: i: 0. */
7363 region_model model0 (&mgr);
7364 model0.set_value (i, int_0, &ctxt);
7365
7366 /* model1: i: 1. */
7367 region_model model1 (&mgr);
7368 model1.set_value (i, int_1, &ctxt);
7369
7370 /* Should merge "i" to a widened value. */
7371 region_model model2 (&mgr);
7372 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
7373 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
7374 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
7375 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
7376 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
7377
7378 /* Add constraint: i < 256 */
7379 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
7380 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
7381 tristate (tristate::TS_TRUE));
7382 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
7383 tristate (tristate::TS_TRUE));
7384
7385 /* Try merging with the initial state. */
7386 region_model model3 (&mgr);
7387 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
7388 /* Merging the merged value with the initial value should be idempotent,
7389 so that the analysis converges. */
7390 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
7391 /* Merger of 0 and a widening value with constraint < CST
7392 should retain the constraint, even though it was implicit
7393 for the 0 case. */
7394 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
7395 tristate (tristate::TS_TRUE));
7396 /* ...and we should have equality: the analysis should have converged. */
7397 ASSERT_EQ (model3, model2);
7398
7399 /* "i_23 = i_11 + 1;" */
7400 region_model model4 (model3);
7401 ASSERT_EQ (model4, model2);
7402 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
7403 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
7404 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
7405
7406 /* Try merging with the "i: 1" state. */
7407 region_model model5 (&mgr);
7408 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
7409 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
7410 ASSERT_EQ (model5, model4);
7411
7412 /* "i_11 = PHI();" merge with state at phi above.
7413 For i, we should have a merger of WIDENING with WIDENING + 1,
7414 and this should be WIDENING again. */
7415 region_model model6 (&mgr);
7416 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
7417 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
7418 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
7419
7420 ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257);
7421}
7422
6969ac30
DM
7423/* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
7424 all cast pointers to that region are also known to be non-NULL. */
7425
7426static void
7427test_malloc_constraints ()
7428{
808f4dfe
DM
7429 region_model_manager mgr;
7430 region_model model (&mgr);
6969ac30
DM
7431 tree p = build_global_decl ("p", ptr_type_node);
7432 tree char_star = build_pointer_type (char_type_node);
7433 tree q = build_global_decl ("q", char_star);
7434 tree null_ptr = build_int_cst (ptr_type_node, 0);
7435
808f4dfe 7436 const svalue *size_in_bytes
9a2c9579 7437 = mgr.get_or_create_unknown_svalue (size_type_node);
ce917b04
DM
7438 const region *reg
7439 = model.get_or_create_region_for_heap_alloc (size_in_bytes, NULL);
808f4dfe
DM
7440 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
7441 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
6969ac30
DM
7442 model.set_value (q, p, NULL);
7443
6969ac30
DM
7444 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
7445 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
7446 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
7447 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
7448
7449 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
7450
6969ac30
DM
7451 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
7452 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
7453 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
7454 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
7455}
7456
808f4dfe
DM
7457/* Smoketest of getting and setting the value of a variable. */
7458
7459static void
7460test_var ()
7461{
7462 /* "int i;" */
7463 tree i = build_global_decl ("i", integer_type_node);
7464
7465 tree int_17 = build_int_cst (integer_type_node, 17);
7466 tree int_m3 = build_int_cst (integer_type_node, -3);
7467
7468 region_model_manager mgr;
7469 region_model model (&mgr);
7470
7471 const region *i_reg = model.get_lvalue (i, NULL);
7472 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
7473
7474 /* Reading "i" should give a symbolic "initial value". */
7475 const svalue *sval_init = model.get_rvalue (i, NULL);
7476 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
7477 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
7478 /* ..and doing it again should give the same "initial value". */
7479 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
7480
7481 /* "i = 17;". */
7482 model.set_value (i, int_17, NULL);
7483 ASSERT_EQ (model.get_rvalue (i, NULL),
7484 model.get_rvalue (int_17, NULL));
7485
7486 /* "i = -3;". */
7487 model.set_value (i, int_m3, NULL);
7488 ASSERT_EQ (model.get_rvalue (i, NULL),
7489 model.get_rvalue (int_m3, NULL));
7490
7491 /* Verify get_offset for "i". */
7492 {
7a6564c9 7493 region_offset offset = i_reg->get_offset (&mgr);
808f4dfe
DM
7494 ASSERT_EQ (offset.get_base_region (), i_reg);
7495 ASSERT_EQ (offset.get_bit_offset (), 0);
7496 }
7497}
7498
7499static void
7500test_array_2 ()
7501{
7502 /* "int arr[10];" */
7503 tree tlen = size_int (10);
7504 tree arr_type
7505 = build_array_type (integer_type_node, build_index_type (tlen));
7506 tree arr = build_global_decl ("arr", arr_type);
7507
7508 /* "int i;" */
7509 tree i = build_global_decl ("i", integer_type_node);
7510
7511 tree int_0 = build_int_cst (integer_type_node, 0);
7512 tree int_1 = build_int_cst (integer_type_node, 1);
7513
7514 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
7515 arr, int_0, NULL_TREE, NULL_TREE);
7516 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
7517 arr, int_1, NULL_TREE, NULL_TREE);
7518 tree arr_i = build4 (ARRAY_REF, integer_type_node,
7519 arr, i, NULL_TREE, NULL_TREE);
7520
7521 tree int_17 = build_int_cst (integer_type_node, 17);
7522 tree int_42 = build_int_cst (integer_type_node, 42);
7523 tree int_m3 = build_int_cst (integer_type_node, -3);
7524
7525 region_model_manager mgr;
7526 region_model model (&mgr);
7527 /* "arr[0] = 17;". */
7528 model.set_value (arr_0, int_17, NULL);
7529 /* "arr[1] = -3;". */
7530 model.set_value (arr_1, int_m3, NULL);
7531
7532 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
7533 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
7534
7535 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
7536 model.set_value (arr_1, int_42, NULL);
7537 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
7538
7539 /* Verify get_offset for "arr[0]". */
7540 {
7541 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
7a6564c9 7542 region_offset offset = arr_0_reg->get_offset (&mgr);
808f4dfe
DM
7543 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7544 ASSERT_EQ (offset.get_bit_offset (), 0);
7545 }
7546
7547 /* Verify get_offset for "arr[1]". */
7548 {
7549 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
7a6564c9 7550 region_offset offset = arr_1_reg->get_offset (&mgr);
808f4dfe
DM
7551 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7552 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
7553 }
7554
7a6564c9
TL
7555 /* Verify get_offset for "arr[i]". */
7556 {
7557 const region *arr_i_reg = model.get_lvalue (arr_i, NULL);
7558 region_offset offset = arr_i_reg->get_offset (&mgr);
7559 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7560 ASSERT_EQ (offset.get_symbolic_byte_offset ()->get_kind (), SK_BINOP);
7561 }
7562
808f4dfe
DM
7563 /* "arr[i] = i;" - this should remove the earlier bindings. */
7564 model.set_value (arr_i, i, NULL);
7565 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
7566 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
7567
7568 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
7569 model.set_value (arr_0, int_17, NULL);
7570 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
7571 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
7572}
7573
7574/* Smoketest of dereferencing a pointer via MEM_REF. */
7575
7576static void
7577test_mem_ref ()
7578{
7579 /*
7580 x = 17;
7581 p = &x;
7582 *p;
7583 */
7584 tree x = build_global_decl ("x", integer_type_node);
7585 tree int_star = build_pointer_type (integer_type_node);
7586 tree p = build_global_decl ("p", int_star);
7587
7588 tree int_17 = build_int_cst (integer_type_node, 17);
7589 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
7590 tree offset_0 = build_int_cst (integer_type_node, 0);
7591 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
7592
7593 region_model_manager mgr;
7594 region_model model (&mgr);
7595
7596 /* "x = 17;". */
7597 model.set_value (x, int_17, NULL);
7598
7599 /* "p = &x;". */
7600 model.set_value (p, addr_of_x, NULL);
7601
7602 const svalue *sval = model.get_rvalue (star_p, NULL);
7603 ASSERT_EQ (sval->maybe_get_constant (), int_17);
7604}
7605
7606/* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
7607 Analogous to this code:
7608 void test_6 (int a[10])
7609 {
7610 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
7611 a[3] = 42;
7612 __analyzer_eval (a[3] == 42); [should be TRUE]
7613 }
7614 from data-model-1.c, which looks like this at the gimple level:
7615 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
7616 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
7617 int _2 = *_1; # MEM_REF
7618 _Bool _3 = _2 == 42;
7619 int _4 = (int) _3;
7620 __analyzer_eval (_4);
7621
7622 # a[3] = 42;
7623 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
7624 *_5 = 42; # MEM_REF
7625
7626 # __analyzer_eval (a[3] == 42); [should be TRUE]
7627 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
7628 int _7 = *_6; # MEM_REF
7629 _Bool _8 = _7 == 42;
7630 int _9 = (int) _8;
7631 __analyzer_eval (_9); */
7632
7633static void
7634test_POINTER_PLUS_EXPR_then_MEM_REF ()
7635{
7636 tree int_star = build_pointer_type (integer_type_node);
7637 tree a = build_global_decl ("a", int_star);
7638 tree offset_12 = build_int_cst (size_type_node, 12);
7639 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
7640 tree offset_0 = build_int_cst (integer_type_node, 0);
7641 tree mem_ref = build2 (MEM_REF, integer_type_node,
7642 pointer_plus_expr, offset_0);
7643 region_model_manager mgr;
7644 region_model m (&mgr);
7645
7646 tree int_42 = build_int_cst (integer_type_node, 42);
7647 m.set_value (mem_ref, int_42, NULL);
7648 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
7649}
7650
7651/* Verify that malloc works. */
7652
7653static void
7654test_malloc ()
7655{
7656 tree int_star = build_pointer_type (integer_type_node);
7657 tree p = build_global_decl ("p", int_star);
7658 tree n = build_global_decl ("n", integer_type_node);
7659 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
7660 n, build_int_cst (size_type_node, 4));
7661
7662 region_model_manager mgr;
7663 test_region_model_context ctxt;
7664 region_model model (&mgr);
7665
7666 /* "p = malloc (n * 4);". */
7667 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
ce917b04
DM
7668 const region *reg
7669 = model.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe
DM
7670 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
7671 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 7672 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
7673}
7674
7675/* Verify that alloca works. */
7676
7677static void
7678test_alloca ()
7679{
7680 auto_vec <tree> param_types;
7681 tree fndecl = make_fndecl (integer_type_node,
7682 "test_fn",
7683 param_types);
7684 allocate_struct_function (fndecl, true);
7685
7686
7687 tree int_star = build_pointer_type (integer_type_node);
7688 tree p = build_global_decl ("p", int_star);
7689 tree n = build_global_decl ("n", integer_type_node);
7690 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
7691 n, build_int_cst (size_type_node, 4));
7692
7693 region_model_manager mgr;
7694 test_region_model_context ctxt;
7695 region_model model (&mgr);
7696
7697 /* Push stack frame. */
7698 const region *frame_reg
7699 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl),
7700 NULL, &ctxt);
7701 /* "p = alloca (n * 4);". */
7702 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
b9365b93 7703 const region *reg = model.create_region_for_alloca (size_sval, &ctxt);
808f4dfe
DM
7704 ASSERT_EQ (reg->get_parent_region (), frame_reg);
7705 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
7706 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 7707 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
7708
7709 /* Verify that the pointers to the alloca region are replaced by
7710 poisoned values when the frame is popped. */
7711 model.pop_frame (NULL, NULL, &ctxt);
33255ad3 7712 ASSERT_EQ (model.get_rvalue (p, NULL)->get_kind (), SK_POISONED);
808f4dfe
DM
7713}
7714
71fc4655
DM
7715/* Verify that svalue::involves_p works. */
7716
7717static void
7718test_involves_p ()
7719{
7720 region_model_manager mgr;
7721 tree int_star = build_pointer_type (integer_type_node);
7722 tree p = build_global_decl ("p", int_star);
7723 tree q = build_global_decl ("q", int_star);
7724
7725 test_region_model_context ctxt;
7726 region_model model (&mgr);
7727 const svalue *p_init = model.get_rvalue (p, &ctxt);
7728 const svalue *q_init = model.get_rvalue (q, &ctxt);
7729
7730 ASSERT_TRUE (p_init->involves_p (p_init));
7731 ASSERT_FALSE (p_init->involves_p (q_init));
7732
7733 const region *star_p_reg = mgr.get_symbolic_region (p_init);
7734 const region *star_q_reg = mgr.get_symbolic_region (q_init);
7735
7736 const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
7737 const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
7738
7739 ASSERT_TRUE (init_star_p->involves_p (p_init));
7740 ASSERT_FALSE (p_init->involves_p (init_star_p));
7741 ASSERT_FALSE (init_star_p->involves_p (q_init));
7742 ASSERT_TRUE (init_star_q->involves_p (q_init));
7743 ASSERT_FALSE (init_star_q->involves_p (p_init));
7744}
7745
757bf1df
DM
7746/* Run all of the selftests within this file. */
7747
7748void
7749analyzer_region_model_cc_tests ()
7750{
8c08c983 7751 test_tree_cmp_on_constants ();
757bf1df 7752 test_dump ();
808f4dfe
DM
7753 test_struct ();
7754 test_array_1 ();
90f7c300 7755 test_get_representative_tree ();
757bf1df 7756 test_unique_constants ();
808f4dfe
DM
7757 test_unique_unknowns ();
7758 test_initial_svalue_folding ();
7759 test_unaryop_svalue_folding ();
7760 test_binop_svalue_folding ();
7761 test_sub_svalue_folding ();
f09b9955 7762 test_bits_within_svalue_folding ();
808f4dfe 7763 test_descendent_of_p ();
391512ad 7764 test_bit_range_regions ();
757bf1df 7765 test_assignment ();
a96f1c38 7766 test_compound_assignment ();
757bf1df
DM
7767 test_stack_frames ();
7768 test_get_representative_path_var ();
808f4dfe 7769 test_equality_1 ();
757bf1df
DM
7770 test_canonicalization_2 ();
7771 test_canonicalization_3 ();
8c08c983 7772 test_canonicalization_4 ();
757bf1df
DM
7773 test_state_merging ();
7774 test_constraint_merging ();
808f4dfe
DM
7775 test_widening_constraints ();
7776 test_iteration_1 ();
6969ac30 7777 test_malloc_constraints ();
808f4dfe
DM
7778 test_var ();
7779 test_array_2 ();
7780 test_mem_ref ();
7781 test_POINTER_PLUS_EXPR_then_MEM_REF ();
7782 test_malloc ();
7783 test_alloca ();
71fc4655 7784 test_involves_p ();
757bf1df
DM
7785}
7786
7787} // namespace selftest
7788
7789#endif /* CHECKING_P */
7790
75038aa6
DM
7791} // namespace ana
7792
757bf1df 7793#endif /* #if ENABLE_ANALYZER */