]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/analyzer/region-model.cc
Update copyright years.
[thirdparty/gcc.git] / gcc / analyzer / region-model.cc
CommitLineData
757bf1df 1/* Classes for modeling the state of memory.
83ffe9cd 2 Copyright (C) 2019-2023 Free Software Foundation, Inc.
757bf1df
DM
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
c81b60b8 22#define INCLUDE_MEMORY
757bf1df
DM
23#include "system.h"
24#include "coretypes.h"
6341f14e 25#include "make-unique.h"
757bf1df
DM
26#include "tree.h"
27#include "function.h"
28#include "basic-block.h"
29#include "gimple.h"
30#include "gimple-iterator.h"
7892ff37 31#include "diagnostic-core.h"
757bf1df
DM
32#include "graphviz.h"
33#include "options.h"
34#include "cgraph.h"
35#include "tree-dfa.h"
36#include "stringpool.h"
37#include "convert.h"
38#include "target.h"
39#include "fold-const.h"
40#include "tree-pretty-print.h"
41#include "diagnostic-color.h"
42#include "diagnostic-metadata.h"
ef7827b0 43#include "bitmap.h"
757bf1df 44#include "selftest.h"
757bf1df
DM
45#include "analyzer/analyzer.h"
46#include "analyzer/analyzer-logging.h"
47#include "ordered-hash-map.h"
48#include "options.h"
49#include "cgraph.h"
50#include "cfg.h"
757bf1df
DM
51#include "analyzer/supergraph.h"
52#include "sbitmap.h"
808f4dfe
DM
53#include "analyzer/call-string.h"
54#include "analyzer/program-point.h"
55#include "analyzer/store.h"
757bf1df
DM
56#include "analyzer/region-model.h"
57#include "analyzer/constraint-manager.h"
58#include "diagnostic-event-id.h"
59#include "analyzer/sm.h"
60#include "diagnostic-event-id.h"
61#include "analyzer/sm.h"
62#include "analyzer/pending-diagnostic.h"
808f4dfe 63#include "analyzer/region-model-reachability.h"
757bf1df 64#include "analyzer/analyzer-selftests.h"
f573d351 65#include "analyzer/program-state.h"
bfca9505 66#include "analyzer/call-summary.h"
884d9141 67#include "stor-layout.h"
c7e276b8 68#include "attribs.h"
9a2c9579 69#include "tree-object-size.h"
1e2fe671
DM
70#include "gimple-ssa.h"
71#include "tree-phinodes.h"
72#include "tree-ssa-operands.h"
73#include "ssa-iterators.h"
5fbcbcaf 74#include "calls.h"
e6c3bb37 75#include "is-a.h"
c81b60b8 76#include "gcc-rich-location.h"
f5758fe5
DM
77#include "analyzer/checker-event.h"
78#include "analyzer/checker-path.h"
757bf1df
DM
79
80#if ENABLE_ANALYZER
81
75038aa6
DM
82namespace ana {
83
757bf1df
DM
84/* Dump T to PP in language-independent form, for debugging/logging/dumping
85 purposes. */
86
757bf1df 87void
808f4dfe 88dump_tree (pretty_printer *pp, tree t)
757bf1df 89{
808f4dfe 90 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
757bf1df
DM
91}
92
808f4dfe
DM
93/* Dump T to PP in language-independent form in quotes, for
94 debugging/logging/dumping purposes. */
757bf1df
DM
95
96void
808f4dfe 97dump_quoted_tree (pretty_printer *pp, tree t)
757bf1df 98{
808f4dfe
DM
99 pp_begin_quote (pp, pp_show_color (pp));
100 dump_tree (pp, t);
101 pp_end_quote (pp, pp_show_color (pp));
757bf1df
DM
102}
103
808f4dfe
DM
104/* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
105 calls within other pp_printf calls.
757bf1df 106
808f4dfe
DM
107 default_tree_printer handles 'T' and some other codes by calling
108 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
109 dump_generic_node calls pp_printf in various places, leading to
110 garbled output.
757bf1df 111
808f4dfe
DM
112 Ideally pp_printf could be made to be reentrant, but in the meantime
113 this function provides a workaround. */
6969ac30
DM
114
115void
808f4dfe 116print_quoted_type (pretty_printer *pp, tree t)
6969ac30 117{
808f4dfe
DM
118 pp_begin_quote (pp, pp_show_color (pp));
119 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
120 pp_end_quote (pp, pp_show_color (pp));
6969ac30
DM
121}
122
d726a57b
DM
123/* class region_to_value_map. */
124
125/* Assignment operator for region_to_value_map. */
126
127region_to_value_map &
128region_to_value_map::operator= (const region_to_value_map &other)
129{
130 m_hash_map.empty ();
131 for (auto iter : other.m_hash_map)
132 {
133 const region *reg = iter.first;
134 const svalue *sval = iter.second;
135 m_hash_map.put (reg, sval);
136 }
137 return *this;
138}
139
140/* Equality operator for region_to_value_map. */
141
142bool
143region_to_value_map::operator== (const region_to_value_map &other) const
144{
145 if (m_hash_map.elements () != other.m_hash_map.elements ())
146 return false;
147
148 for (auto iter : *this)
149 {
150 const region *reg = iter.first;
151 const svalue *sval = iter.second;
152 const svalue * const *other_slot = other.get (reg);
153 if (other_slot == NULL)
154 return false;
155 if (sval != *other_slot)
156 return false;
157 }
158
159 return true;
160}
161
162/* Dump this object to PP. */
163
164void
165region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
166 bool multiline) const
167{
168 auto_vec<const region *> regs;
169 for (iterator iter = begin (); iter != end (); ++iter)
170 regs.safe_push ((*iter).first);
171 regs.qsort (region::cmp_ptr_ptr);
172 if (multiline)
173 pp_newline (pp);
174 else
175 pp_string (pp, " {");
176 unsigned i;
177 const region *reg;
178 FOR_EACH_VEC_ELT (regs, i, reg)
179 {
180 if (multiline)
181 pp_string (pp, " ");
182 else if (i > 0)
183 pp_string (pp, ", ");
184 reg->dump_to_pp (pp, simple);
185 pp_string (pp, ": ");
186 const svalue *sval = *get (reg);
187 sval->dump_to_pp (pp, true);
188 if (multiline)
189 pp_newline (pp);
190 }
191 if (!multiline)
192 pp_string (pp, "}");
193}
194
195/* Dump this object to stderr. */
196
197DEBUG_FUNCTION void
198region_to_value_map::dump (bool simple) const
199{
200 pretty_printer pp;
201 pp_format_decoder (&pp) = default_tree_printer;
202 pp_show_color (&pp) = pp_show_color (global_dc->printer);
203 pp.buffer->stream = stderr;
204 dump_to_pp (&pp, simple, true);
205 pp_newline (&pp);
206 pp_flush (&pp);
207}
208
209
210/* Attempt to merge THIS with OTHER, writing the result
211 to OUT.
212
213 For now, write (region, value) mappings that are in common between THIS
ce917b04
DM
214 and OTHER to OUT, effectively taking the intersection.
215
216 Reject merger of different values. */
d726a57b
DM
217
218bool
219region_to_value_map::can_merge_with_p (const region_to_value_map &other,
220 region_to_value_map *out) const
221{
222 for (auto iter : *this)
223 {
224 const region *iter_reg = iter.first;
225 const svalue *iter_sval = iter.second;
226 const svalue * const * other_slot = other.get (iter_reg);
227 if (other_slot)
ce917b04
DM
228 {
229 if (iter_sval == *other_slot)
230 out->put (iter_reg, iter_sval);
231 else
232 return false;
233 }
d726a57b
DM
234 }
235 return true;
236}
237
33255ad3
DM
238/* Purge any state involving SVAL. */
239
240void
241region_to_value_map::purge_state_involving (const svalue *sval)
242{
243 auto_vec<const region *> to_purge;
244 for (auto iter : *this)
245 {
246 const region *iter_reg = iter.first;
247 const svalue *iter_sval = iter.second;
248 if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
249 to_purge.safe_push (iter_reg);
250 }
251 for (auto iter : to_purge)
252 m_hash_map.remove (iter);
253}
254
757bf1df
DM
255/* class region_model. */
256
808f4dfe 257/* Ctor for region_model: construct an "empty" model. */
757bf1df 258
808f4dfe 259region_model::region_model (region_model_manager *mgr)
9a2c9579
DM
260: m_mgr (mgr), m_store (), m_current_frame (NULL),
261 m_dynamic_extents ()
757bf1df 262{
808f4dfe 263 m_constraints = new constraint_manager (mgr);
757bf1df
DM
264}
265
266/* region_model's copy ctor. */
267
268region_model::region_model (const region_model &other)
808f4dfe
DM
269: m_mgr (other.m_mgr), m_store (other.m_store),
270 m_constraints (new constraint_manager (*other.m_constraints)),
9a2c9579
DM
271 m_current_frame (other.m_current_frame),
272 m_dynamic_extents (other.m_dynamic_extents)
757bf1df 273{
757bf1df
DM
274}
275
276/* region_model's dtor. */
277
278region_model::~region_model ()
279{
280 delete m_constraints;
281}
282
283/* region_model's assignment operator. */
284
285region_model &
286region_model::operator= (const region_model &other)
287{
808f4dfe
DM
288 /* m_mgr is const. */
289 gcc_assert (m_mgr == other.m_mgr);
757bf1df 290
808f4dfe 291 m_store = other.m_store;
757bf1df
DM
292
293 delete m_constraints;
808f4dfe 294 m_constraints = new constraint_manager (*other.m_constraints);
757bf1df 295
808f4dfe 296 m_current_frame = other.m_current_frame;
757bf1df 297
9a2c9579
DM
298 m_dynamic_extents = other.m_dynamic_extents;
299
757bf1df
DM
300 return *this;
301}
302
303/* Equality operator for region_model.
304
808f4dfe
DM
305 Amongst other things this directly compares the stores and the constraint
306 managers, so for this to be meaningful both this and OTHER should
757bf1df
DM
307 have been canonicalized. */
308
309bool
310region_model::operator== (const region_model &other) const
311{
808f4dfe
DM
312 /* We can only compare instances that use the same manager. */
313 gcc_assert (m_mgr == other.m_mgr);
757bf1df 314
808f4dfe 315 if (m_store != other.m_store)
757bf1df
DM
316 return false;
317
318 if (*m_constraints != *other.m_constraints)
319 return false;
320
808f4dfe
DM
321 if (m_current_frame != other.m_current_frame)
322 return false;
757bf1df 323
9a2c9579
DM
324 if (m_dynamic_extents != other.m_dynamic_extents)
325 return false;
326
757bf1df
DM
327 gcc_checking_assert (hash () == other.hash ());
328
329 return true;
330}
331
332/* Generate a hash value for this region_model. */
333
334hashval_t
808f4dfe
DM
335region_model::hash () const
336{
337 hashval_t result = m_store.hash ();
338 result ^= m_constraints->hash ();
339 return result;
757bf1df
DM
340}
341
808f4dfe
DM
342/* Dump a representation of this model to PP, showing the
343 stack, the store, and any constraints.
344 Use SIMPLE to control how svalues and regions are printed. */
757bf1df
DM
345
346void
808f4dfe
DM
347region_model::dump_to_pp (pretty_printer *pp, bool simple,
348 bool multiline) const
757bf1df 349{
808f4dfe
DM
350 /* Dump stack. */
351 pp_printf (pp, "stack depth: %i", get_stack_depth ());
352 if (multiline)
353 pp_newline (pp);
354 else
355 pp_string (pp, " {");
356 for (const frame_region *iter_frame = m_current_frame; iter_frame;
357 iter_frame = iter_frame->get_calling_frame ())
358 {
359 if (multiline)
360 pp_string (pp, " ");
361 else if (iter_frame != m_current_frame)
362 pp_string (pp, ", ");
363 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
364 iter_frame->dump_to_pp (pp, simple);
365 if (multiline)
366 pp_newline (pp);
367 }
368 if (!multiline)
369 pp_string (pp, "}");
370
371 /* Dump store. */
372 if (!multiline)
373 pp_string (pp, ", {");
374 m_store.dump_to_pp (pp, simple, multiline,
375 m_mgr->get_store_manager ());
376 if (!multiline)
377 pp_string (pp, "}");
378
379 /* Dump constraints. */
380 pp_string (pp, "constraint_manager:");
381 if (multiline)
382 pp_newline (pp);
383 else
384 pp_string (pp, " {");
385 m_constraints->dump_to_pp (pp, multiline);
386 if (!multiline)
387 pp_string (pp, "}");
9a2c9579
DM
388
389 /* Dump sizes of dynamic regions, if any are known. */
390 if (!m_dynamic_extents.is_empty ())
391 {
392 pp_string (pp, "dynamic_extents:");
393 m_dynamic_extents.dump_to_pp (pp, simple, multiline);
394 }
808f4dfe 395}
757bf1df 396
808f4dfe 397/* Dump a representation of this model to FILE. */
757bf1df 398
808f4dfe
DM
399void
400region_model::dump (FILE *fp, bool simple, bool multiline) const
401{
402 pretty_printer pp;
403 pp_format_decoder (&pp) = default_tree_printer;
404 pp_show_color (&pp) = pp_show_color (global_dc->printer);
405 pp.buffer->stream = fp;
406 dump_to_pp (&pp, simple, multiline);
407 pp_newline (&pp);
408 pp_flush (&pp);
757bf1df
DM
409}
410
808f4dfe 411/* Dump a multiline representation of this model to stderr. */
757bf1df 412
808f4dfe
DM
413DEBUG_FUNCTION void
414region_model::dump (bool simple) const
415{
416 dump (stderr, simple, true);
417}
757bf1df 418
808f4dfe 419/* Dump a multiline representation of this model to stderr. */
757bf1df 420
808f4dfe
DM
421DEBUG_FUNCTION void
422region_model::debug () const
757bf1df 423{
808f4dfe 424 dump (true);
757bf1df
DM
425}
426
e61ffa20
DM
427/* Assert that this object is valid. */
428
429void
430region_model::validate () const
431{
432 m_store.validate ();
433}
434
808f4dfe
DM
435/* Canonicalize the store and constraints, to maximize the chance of
436 equality between region_model instances. */
757bf1df
DM
437
438void
808f4dfe 439region_model::canonicalize ()
757bf1df 440{
808f4dfe
DM
441 m_store.canonicalize (m_mgr->get_store_manager ());
442 m_constraints->canonicalize ();
757bf1df
DM
443}
444
445/* Return true if this region_model is in canonical form. */
446
447bool
448region_model::canonicalized_p () const
449{
450 region_model copy (*this);
808f4dfe 451 copy.canonicalize ();
757bf1df
DM
452 return *this == copy;
453}
454
808f4dfe
DM
455/* See the comment for store::loop_replay_fixup. */
456
457void
458region_model::loop_replay_fixup (const region_model *dst_state)
459{
460 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
461}
462
757bf1df
DM
463/* A subclass of pending_diagnostic for complaining about uses of
464 poisoned values. */
465
466class poisoned_value_diagnostic
467: public pending_diagnostic_subclass<poisoned_value_diagnostic>
468{
469public:
00e7d024
DM
470 poisoned_value_diagnostic (tree expr, enum poison_kind pkind,
471 const region *src_region)
472 : m_expr (expr), m_pkind (pkind),
473 m_src_region (src_region)
757bf1df
DM
474 {}
475
ff171cb1 476 const char *get_kind () const final override { return "poisoned_value_diagnostic"; }
757bf1df 477
ff171cb1 478 bool use_of_uninit_p () const final override
33255ad3
DM
479 {
480 return m_pkind == POISON_KIND_UNINIT;
481 }
482
757bf1df
DM
483 bool operator== (const poisoned_value_diagnostic &other) const
484 {
00e7d024
DM
485 return (m_expr == other.m_expr
486 && m_pkind == other.m_pkind
487 && m_src_region == other.m_src_region);
757bf1df
DM
488 }
489
ff171cb1 490 int get_controlling_option () const final override
7fd6e36e
DM
491 {
492 switch (m_pkind)
493 {
494 default:
495 gcc_unreachable ();
496 case POISON_KIND_UNINIT:
497 return OPT_Wanalyzer_use_of_uninitialized_value;
498 case POISON_KIND_FREED:
499 return OPT_Wanalyzer_use_after_free;
500 case POISON_KIND_POPPED_STACK:
501 return OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame;
502 }
503 }
504
ff171cb1 505 bool emit (rich_location *rich_loc) final override
757bf1df
DM
506 {
507 switch (m_pkind)
508 {
509 default:
510 gcc_unreachable ();
33255ad3
DM
511 case POISON_KIND_UNINIT:
512 {
513 diagnostic_metadata m;
514 m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
7fd6e36e 515 return warning_meta (rich_loc, m, get_controlling_option (),
33255ad3
DM
516 "use of uninitialized value %qE",
517 m_expr);
518 }
519 break;
757bf1df
DM
520 case POISON_KIND_FREED:
521 {
522 diagnostic_metadata m;
523 m.add_cwe (416); /* "CWE-416: Use After Free". */
7fd6e36e 524 return warning_meta (rich_loc, m, get_controlling_option (),
6c8e5844
DM
525 "use after %<free%> of %qE",
526 m_expr);
757bf1df
DM
527 }
528 break;
529 case POISON_KIND_POPPED_STACK:
530 {
757bf1df 531 /* TODO: which CWE? */
808f4dfe 532 return warning_at
7fd6e36e 533 (rich_loc, get_controlling_option (),
808f4dfe
DM
534 "dereferencing pointer %qE to within stale stack frame",
535 m_expr);
757bf1df
DM
536 }
537 break;
538 }
539 }
540
ff171cb1 541 label_text describe_final_event (const evdesc::final_event &ev) final override
757bf1df
DM
542 {
543 switch (m_pkind)
544 {
545 default:
546 gcc_unreachable ();
33255ad3
DM
547 case POISON_KIND_UNINIT:
548 return ev.formatted_print ("use of uninitialized value %qE here",
549 m_expr);
757bf1df
DM
550 case POISON_KIND_FREED:
551 return ev.formatted_print ("use after %<free%> of %qE here",
552 m_expr);
553 case POISON_KIND_POPPED_STACK:
554 return ev.formatted_print
808f4dfe 555 ("dereferencing pointer %qE to within stale stack frame",
757bf1df
DM
556 m_expr);
557 }
558 }
559
ff171cb1 560 void mark_interesting_stuff (interesting_t *interest) final override
00e7d024
DM
561 {
562 if (m_src_region)
563 interest->add_region_creation (m_src_region);
564 }
565
757bf1df
DM
566private:
567 tree m_expr;
568 enum poison_kind m_pkind;
00e7d024 569 const region *m_src_region;
757bf1df
DM
570};
571
5e00ad3f
DM
572/* A subclass of pending_diagnostic for complaining about shifts
573 by negative counts. */
574
575class shift_count_negative_diagnostic
576: public pending_diagnostic_subclass<shift_count_negative_diagnostic>
577{
578public:
579 shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
580 : m_assign (assign), m_count_cst (count_cst)
581 {}
582
ff171cb1 583 const char *get_kind () const final override
5e00ad3f
DM
584 {
585 return "shift_count_negative_diagnostic";
586 }
587
588 bool operator== (const shift_count_negative_diagnostic &other) const
589 {
590 return (m_assign == other.m_assign
591 && same_tree_p (m_count_cst, other.m_count_cst));
592 }
593
ff171cb1 594 int get_controlling_option () const final override
7fd6e36e
DM
595 {
596 return OPT_Wanalyzer_shift_count_negative;
597 }
598
ff171cb1 599 bool emit (rich_location *rich_loc) final override
5e00ad3f 600 {
7fd6e36e 601 return warning_at (rich_loc, get_controlling_option (),
5e00ad3f
DM
602 "shift by negative count (%qE)", m_count_cst);
603 }
604
ff171cb1 605 label_text describe_final_event (const evdesc::final_event &ev) final override
5e00ad3f
DM
606 {
607 return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst);
608 }
609
610private:
611 const gassign *m_assign;
612 tree m_count_cst;
613};
614
615/* A subclass of pending_diagnostic for complaining about shifts
616 by counts >= the width of the operand type. */
617
618class shift_count_overflow_diagnostic
619: public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
620{
621public:
622 shift_count_overflow_diagnostic (const gassign *assign,
623 int operand_precision,
624 tree count_cst)
625 : m_assign (assign), m_operand_precision (operand_precision),
626 m_count_cst (count_cst)
627 {}
628
ff171cb1 629 const char *get_kind () const final override
5e00ad3f
DM
630 {
631 return "shift_count_overflow_diagnostic";
632 }
633
634 bool operator== (const shift_count_overflow_diagnostic &other) const
635 {
636 return (m_assign == other.m_assign
637 && m_operand_precision == other.m_operand_precision
638 && same_tree_p (m_count_cst, other.m_count_cst));
639 }
640
ff171cb1 641 int get_controlling_option () const final override
7fd6e36e
DM
642 {
643 return OPT_Wanalyzer_shift_count_overflow;
644 }
645
ff171cb1 646 bool emit (rich_location *rich_loc) final override
5e00ad3f 647 {
7fd6e36e 648 return warning_at (rich_loc, get_controlling_option (),
5e00ad3f
DM
649 "shift by count (%qE) >= precision of type (%qi)",
650 m_count_cst, m_operand_precision);
651 }
652
ff171cb1 653 label_text describe_final_event (const evdesc::final_event &ev) final override
5e00ad3f
DM
654 {
655 return ev.formatted_print ("shift by count %qE here", m_count_cst);
656 }
657
658private:
659 const gassign *m_assign;
660 int m_operand_precision;
661 tree m_count_cst;
662};
663
808f4dfe
DM
664/* If ASSIGN is a stmt that can be modelled via
665 set_value (lhs_reg, SVALUE, CTXT)
666 for some SVALUE, get the SVALUE.
667 Otherwise return NULL. */
757bf1df 668
808f4dfe
DM
669const svalue *
670region_model::get_gassign_result (const gassign *assign,
671 region_model_context *ctxt)
757bf1df
DM
672{
673 tree lhs = gimple_assign_lhs (assign);
674 tree rhs1 = gimple_assign_rhs1 (assign);
757bf1df
DM
675 enum tree_code op = gimple_assign_rhs_code (assign);
676 switch (op)
677 {
678 default:
808f4dfe 679 return NULL;
757bf1df
DM
680
681 case POINTER_PLUS_EXPR:
682 {
683 /* e.g. "_1 = a_10(D) + 12;" */
684 tree ptr = rhs1;
685 tree offset = gimple_assign_rhs2 (assign);
686
808f4dfe
DM
687 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
688 const svalue *offset_sval = get_rvalue (offset, ctxt);
689 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
690 is an integer of type sizetype". */
691 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
692
693 const svalue *sval_binop
694 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
695 ptr_sval, offset_sval);
696 return sval_binop;
757bf1df
DM
697 }
698 break;
699
700 case POINTER_DIFF_EXPR:
701 {
702 /* e.g. "_1 = p_2(D) - q_3(D);". */
808f4dfe
DM
703 tree rhs2 = gimple_assign_rhs2 (assign);
704 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
705 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 706
808f4dfe 707 // TODO: perhaps fold to zero if they're known to be equal?
757bf1df 708
808f4dfe
DM
709 const svalue *sval_binop
710 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
711 rhs1_sval, rhs2_sval);
712 return sval_binop;
757bf1df
DM
713 }
714 break;
715
808f4dfe
DM
716 /* Assignments of the form
717 set_value (lvalue (LHS), rvalue (EXPR))
718 for various EXPR.
719 We already have the lvalue for the LHS above, as "lhs_reg". */
720 case ADDR_EXPR: /* LHS = &RHS; */
721 case BIT_FIELD_REF:
722 case COMPONENT_REF: /* LHS = op0.op1; */
757bf1df 723 case MEM_REF:
757bf1df 724 case REAL_CST:
808f4dfe
DM
725 case COMPLEX_CST:
726 case VECTOR_CST:
757bf1df
DM
727 case INTEGER_CST:
728 case ARRAY_REF:
808f4dfe
DM
729 case SSA_NAME: /* LHS = VAR; */
730 case VAR_DECL: /* LHS = VAR; */
731 case PARM_DECL:/* LHS = VAR; */
732 case REALPART_EXPR:
733 case IMAGPART_EXPR:
734 return get_rvalue (rhs1, ctxt);
735
736 case ABS_EXPR:
737 case ABSU_EXPR:
738 case CONJ_EXPR:
739 case BIT_NOT_EXPR:
757bf1df
DM
740 case FIX_TRUNC_EXPR:
741 case FLOAT_EXPR:
808f4dfe 742 case NEGATE_EXPR:
757bf1df 743 case NOP_EXPR:
808f4dfe 744 case VIEW_CONVERT_EXPR:
757bf1df 745 {
808f4dfe
DM
746 /* Unary ops. */
747 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
748 const svalue *sval_unaryop
749 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
750 return sval_unaryop;
757bf1df 751 }
757bf1df
DM
752
753 case EQ_EXPR:
754 case GE_EXPR:
755 case LE_EXPR:
756 case NE_EXPR:
757 case GT_EXPR:
758 case LT_EXPR:
808f4dfe
DM
759 case UNORDERED_EXPR:
760 case ORDERED_EXPR:
757bf1df
DM
761 {
762 tree rhs2 = gimple_assign_rhs2 (assign);
763
808f4dfe
DM
764 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
765 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 766
2f5951bd 767 if (TREE_TYPE (lhs) == boolean_type_node)
808f4dfe 768 {
2f5951bd
DM
769 /* Consider constraints between svalues. */
770 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
771 if (t.is_known ())
772 return m_mgr->get_or_create_constant_svalue
773 (t.is_true () ? boolean_true_node : boolean_false_node);
808f4dfe 774 }
2f5951bd
DM
775
776 /* Otherwise, generate a symbolic binary op. */
777 const svalue *sval_binop
778 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
779 rhs1_sval, rhs2_sval);
780 return sval_binop;
757bf1df
DM
781 }
782 break;
783
784 case PLUS_EXPR:
785 case MINUS_EXPR:
786 case MULT_EXPR:
808f4dfe 787 case MULT_HIGHPART_EXPR:
757bf1df 788 case TRUNC_DIV_EXPR:
808f4dfe
DM
789 case CEIL_DIV_EXPR:
790 case FLOOR_DIV_EXPR:
791 case ROUND_DIV_EXPR:
757bf1df 792 case TRUNC_MOD_EXPR:
808f4dfe
DM
793 case CEIL_MOD_EXPR:
794 case FLOOR_MOD_EXPR:
795 case ROUND_MOD_EXPR:
796 case RDIV_EXPR:
797 case EXACT_DIV_EXPR:
757bf1df
DM
798 case LSHIFT_EXPR:
799 case RSHIFT_EXPR:
808f4dfe
DM
800 case LROTATE_EXPR:
801 case RROTATE_EXPR:
757bf1df
DM
802 case BIT_IOR_EXPR:
803 case BIT_XOR_EXPR:
804 case BIT_AND_EXPR:
805 case MIN_EXPR:
806 case MAX_EXPR:
808f4dfe 807 case COMPLEX_EXPR:
757bf1df
DM
808 {
809 /* Binary ops. */
810 tree rhs2 = gimple_assign_rhs2 (assign);
811
808f4dfe
DM
812 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
813 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 814
5e00ad3f
DM
815 if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
816 {
817 /* "INT34-C. Do not shift an expression by a negative number of bits
818 or by greater than or equal to the number of bits that exist in
819 the operand." */
820 if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
821 if (TREE_CODE (rhs2_cst) == INTEGER_CST)
822 {
823 if (tree_int_cst_sgn (rhs2_cst) < 0)
6341f14e
DM
824 ctxt->warn
825 (make_unique<shift_count_negative_diagnostic>
826 (assign, rhs2_cst));
5e00ad3f
DM
827 else if (compare_tree_int (rhs2_cst,
828 TYPE_PRECISION (TREE_TYPE (rhs1)))
829 >= 0)
6341f14e
DM
830 ctxt->warn
831 (make_unique<shift_count_overflow_diagnostic>
832 (assign,
833 int (TYPE_PRECISION (TREE_TYPE (rhs1))),
834 rhs2_cst));
5e00ad3f
DM
835 }
836 }
837
808f4dfe
DM
838 const svalue *sval_binop
839 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
840 rhs1_sval, rhs2_sval);
841 return sval_binop;
842 }
843
844 /* Vector expressions. In theory we could implement these elementwise,
845 but for now, simply return unknown values. */
846 case VEC_DUPLICATE_EXPR:
847 case VEC_SERIES_EXPR:
848 case VEC_COND_EXPR:
849 case VEC_PERM_EXPR:
1b0be822
DM
850 case VEC_WIDEN_MULT_HI_EXPR:
851 case VEC_WIDEN_MULT_LO_EXPR:
852 case VEC_WIDEN_MULT_EVEN_EXPR:
853 case VEC_WIDEN_MULT_ODD_EXPR:
854 case VEC_UNPACK_HI_EXPR:
855 case VEC_UNPACK_LO_EXPR:
856 case VEC_UNPACK_FLOAT_HI_EXPR:
857 case VEC_UNPACK_FLOAT_LO_EXPR:
858 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
859 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
860 case VEC_PACK_TRUNC_EXPR:
861 case VEC_PACK_SAT_EXPR:
862 case VEC_PACK_FIX_TRUNC_EXPR:
863 case VEC_PACK_FLOAT_EXPR:
864 case VEC_WIDEN_LSHIFT_HI_EXPR:
865 case VEC_WIDEN_LSHIFT_LO_EXPR:
808f4dfe
DM
866 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
867 }
868}
869
1e2fe671
DM
870/* Workaround for discarding certain false positives from
871 -Wanalyzer-use-of-uninitialized-value
872 of the form:
873 ((A OR-IF B) OR-IF C)
874 and:
875 ((A AND-IF B) AND-IF C)
876 where evaluating B is redundant, but could involve simple accesses of
877 uninitialized locals.
878
879 When optimization is turned on the FE can immediately fold compound
880 conditionals. Specifically, c_parser_condition parses this condition:
881 ((A OR-IF B) OR-IF C)
882 and calls c_fully_fold on the condition.
883 Within c_fully_fold, fold_truth_andor is called, which bails when
884 optimization is off, but if any optimization is turned on can convert the
885 ((A OR-IF B) OR-IF C)
886 into:
887 ((A OR B) OR_IF C)
888 for sufficiently simple B
889 i.e. the inner OR-IF becomes an OR.
890 At gimplification time the inner OR becomes BIT_IOR_EXPR (in gimplify_expr),
891 giving this for the inner condition:
892 tmp = A | B;
893 if (tmp)
894 thus effectively synthesizing a redundant access of B when optimization
895 is turned on, when compared to:
896 if (A) goto L1; else goto L4;
897 L1: if (B) goto L2; else goto L4;
898 L2: if (C) goto L3; else goto L4;
899 for the unoptimized case.
900
901 Return true if CTXT appears to be handling such a short-circuitable stmt,
902 such as the def-stmt for B for the:
903 tmp = A | B;
904 case above, for the case where A is true and thus B would have been
905 short-circuited without optimization, using MODEL for the value of A. */
906
907static bool
908within_short_circuited_stmt_p (const region_model *model,
b33dd787 909 const gassign *assign_stmt)
1e2fe671 910{
1e2fe671 911 /* We must have an assignment to a temporary of _Bool type. */
1e2fe671
DM
912 tree lhs = gimple_assign_lhs (assign_stmt);
913 if (TREE_TYPE (lhs) != boolean_type_node)
914 return false;
915 if (TREE_CODE (lhs) != SSA_NAME)
916 return false;
917 if (SSA_NAME_VAR (lhs) != NULL_TREE)
918 return false;
919
920 /* The temporary bool must be used exactly once: as the second arg of
921 a BIT_IOR_EXPR or BIT_AND_EXPR. */
922 use_operand_p use_op;
923 gimple *use_stmt;
924 if (!single_imm_use (lhs, &use_op, &use_stmt))
925 return false;
926 const gassign *use_assign = dyn_cast <const gassign *> (use_stmt);
927 if (!use_assign)
928 return false;
929 enum tree_code op = gimple_assign_rhs_code (use_assign);
930 if (!(op == BIT_IOR_EXPR ||op == BIT_AND_EXPR))
931 return false;
932 if (!(gimple_assign_rhs1 (use_assign) != lhs
933 && gimple_assign_rhs2 (use_assign) == lhs))
934 return false;
935
936 /* The first arg of the bitwise stmt must have a known value in MODEL
937 that implies that the value of the second arg doesn't matter, i.e.
938 1 for bitwise or, 0 for bitwise and. */
939 tree other_arg = gimple_assign_rhs1 (use_assign);
940 /* Use a NULL ctxt here to avoid generating warnings. */
941 const svalue *other_arg_sval = model->get_rvalue (other_arg, NULL);
942 tree other_arg_cst = other_arg_sval->maybe_get_constant ();
943 if (!other_arg_cst)
944 return false;
945 switch (op)
946 {
947 default:
948 gcc_unreachable ();
949 case BIT_IOR_EXPR:
950 if (zerop (other_arg_cst))
951 return false;
952 break;
953 case BIT_AND_EXPR:
954 if (!zerop (other_arg_cst))
955 return false;
956 break;
957 }
958
959 /* All tests passed. We appear to be in a stmt that generates a boolean
960 temporary with a value that won't matter. */
961 return true;
962}
963
b33dd787
DM
964/* Workaround for discarding certain false positives from
965 -Wanalyzer-use-of-uninitialized-value
966 seen with -ftrivial-auto-var-init=.
967
968 -ftrivial-auto-var-init= will generate calls to IFN_DEFERRED_INIT.
969
970 If the address of the var is taken, gimplification will give us
971 something like:
972
973 _1 = .DEFERRED_INIT (4, 2, &"len"[0]);
974 len = _1;
975
976 The result of DEFERRED_INIT will be an uninit value; we don't
977 want to emit a false positive for "len = _1;"
978
979 Return true if ASSIGN_STMT is such a stmt. */
980
981static bool
982due_to_ifn_deferred_init_p (const gassign *assign_stmt)
983
984{
985 /* We must have an assignment to a decl from an SSA name that's the
986 result of a IFN_DEFERRED_INIT call. */
987 if (gimple_assign_rhs_code (assign_stmt) != SSA_NAME)
988 return false;
989 tree lhs = gimple_assign_lhs (assign_stmt);
990 if (TREE_CODE (lhs) != VAR_DECL)
991 return false;
992 tree rhs = gimple_assign_rhs1 (assign_stmt);
993 if (TREE_CODE (rhs) != SSA_NAME)
994 return false;
995 const gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
996 const gcall *call = dyn_cast <const gcall *> (def_stmt);
997 if (!call)
998 return false;
999 if (gimple_call_internal_p (call)
1000 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1001 return true;
1002 return false;
1003}
1004
33255ad3
DM
1005/* Check for SVAL being poisoned, adding a warning to CTXT.
1006 Return SVAL, or, if a warning is added, another value, to avoid
2fdc8546
DM
1007 repeatedly complaining about the same poisoned value in followup code.
1008 SRC_REGION is a hint about where SVAL came from, and can be NULL. */
33255ad3
DM
1009
1010const svalue *
1011region_model::check_for_poison (const svalue *sval,
1012 tree expr,
2fdc8546 1013 const region *src_region,
33255ad3
DM
1014 region_model_context *ctxt) const
1015{
1016 if (!ctxt)
1017 return sval;
1018
1019 if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
1020 {
cc68ad87
DM
1021 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
1022
1023 /* Ignore uninitialized uses of empty types; there's nothing
1024 to initialize. */
1025 if (pkind == POISON_KIND_UNINIT
1026 && sval->get_type ()
1027 && is_empty_type (sval->get_type ()))
1028 return sval;
1029
b33dd787
DM
1030 if (pkind == POISON_KIND_UNINIT)
1031 if (const gimple *curr_stmt = ctxt->get_stmt ())
1032 if (const gassign *assign_stmt
1033 = dyn_cast <const gassign *> (curr_stmt))
1034 {
1035 /* Special case to avoid certain false positives. */
1036 if (within_short_circuited_stmt_p (this, assign_stmt))
1037 return sval;
1038
1039 /* Special case to avoid false positive on
1040 -ftrivial-auto-var-init=. */
1041 if (due_to_ifn_deferred_init_p (assign_stmt))
1042 return sval;
1043 }
1e2fe671 1044
33255ad3
DM
1045 /* If we have an SSA name for a temporary, we don't want to print
1046 '<unknown>'.
1047 Poisoned values are shared by type, and so we can't reconstruct
1048 the tree other than via the def stmts, using
1049 fixup_tree_for_diagnostic. */
1050 tree diag_arg = fixup_tree_for_diagnostic (expr);
2fdc8546 1051 if (src_region == NULL && pkind == POISON_KIND_UNINIT)
00e7d024 1052 src_region = get_region_for_poisoned_expr (expr);
6341f14e
DM
1053 if (ctxt->warn (make_unique<poisoned_value_diagnostic> (diag_arg,
1054 pkind,
1055 src_region)))
33255ad3
DM
1056 {
1057 /* We only want to report use of a poisoned value at the first
1058 place it gets used; return an unknown value to avoid generating
1059 a chain of followup warnings. */
1060 sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
1061 }
1062
1063 return sval;
1064 }
1065
1066 return sval;
1067}
1068
00e7d024
DM
1069/* Attempt to get a region for describing EXPR, the source of region of
1070 a poisoned_svalue for use in a poisoned_value_diagnostic.
1071 Return NULL if there is no good region to use. */
1072
1073const region *
1074region_model::get_region_for_poisoned_expr (tree expr) const
1075{
1076 if (TREE_CODE (expr) == SSA_NAME)
1077 {
1078 tree decl = SSA_NAME_VAR (expr);
1079 if (decl && DECL_P (decl))
1080 expr = decl;
1081 else
1082 return NULL;
1083 }
1084 return get_lvalue (expr, NULL);
1085}
1086
808f4dfe
DM
1087/* Update this model for the ASSIGN stmt, using CTXT to report any
1088 diagnostics. */
1089
1090void
1091region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
1092{
1093 tree lhs = gimple_assign_lhs (assign);
1094 tree rhs1 = gimple_assign_rhs1 (assign);
1095
1096 const region *lhs_reg = get_lvalue (lhs, ctxt);
1097
1098 /* Most assignments are handled by:
1099 set_value (lhs_reg, SVALUE, CTXT)
1100 for some SVALUE. */
1101 if (const svalue *sval = get_gassign_result (assign, ctxt))
1102 {
33255ad3 1103 tree expr = get_diagnostic_tree_for_gassign (assign);
2fdc8546 1104 check_for_poison (sval, expr, NULL, ctxt);
808f4dfe
DM
1105 set_value (lhs_reg, sval, ctxt);
1106 return;
1107 }
1108
1109 enum tree_code op = gimple_assign_rhs_code (assign);
1110 switch (op)
1111 {
1112 default:
1113 {
1b0be822 1114 if (0)
808f4dfe
DM
1115 sorry_at (assign->location, "unhandled assignment op: %qs",
1116 get_tree_code_name (op));
1b0be822
DM
1117 const svalue *unknown_sval
1118 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1119 set_value (lhs_reg, unknown_sval, ctxt);
757bf1df
DM
1120 }
1121 break;
1122
808f4dfe
DM
1123 case CONSTRUCTOR:
1124 {
1125 if (TREE_CLOBBER_P (rhs1))
1126 {
1127 /* e.g. "x ={v} {CLOBBER};" */
1128 clobber_region (lhs_reg);
1129 }
1130 else
1131 {
1132 /* Any CONSTRUCTOR that survives to this point is either
1133 just a zero-init of everything, or a vector. */
1134 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
1135 zero_fill_region (lhs_reg);
1136 unsigned ix;
1137 tree index;
1138 tree val;
1139 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
1140 {
1141 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
1142 if (!index)
1143 index = build_int_cst (integer_type_node, ix);
1144 gcc_assert (TREE_CODE (index) == INTEGER_CST);
1145 const svalue *index_sval
1146 = m_mgr->get_or_create_constant_svalue (index);
1147 gcc_assert (index_sval);
1148 const region *sub_reg
1149 = m_mgr->get_element_region (lhs_reg,
1150 TREE_TYPE (val),
1151 index_sval);
1152 const svalue *val_sval = get_rvalue (val, ctxt);
1153 set_value (sub_reg, val_sval, ctxt);
1154 }
1155 }
1156 }
1157 break;
1158
1159 case STRING_CST:
757bf1df 1160 {
808f4dfe 1161 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
808f4dfe
DM
1162 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1163 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 1164 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
1165 }
1166 break;
1167 }
1168}
1169
33255ad3 1170/* Handle the pre-sm-state part of STMT, modifying this object in-place.
33255ad3
DM
1171 Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
1172 side effects. */
1173
1174void
1175region_model::on_stmt_pre (const gimple *stmt,
33255ad3
DM
1176 bool *out_unknown_side_effects,
1177 region_model_context *ctxt)
1178{
1179 switch (gimple_code (stmt))
1180 {
1181 default:
1182 /* No-op for now. */
1183 break;
1184
1185 case GIMPLE_ASSIGN:
1186 {
1187 const gassign *assign = as_a <const gassign *> (stmt);
1188 on_assignment (assign, ctxt);
1189 }
1190 break;
1191
1192 case GIMPLE_ASM:
ded2c2c0
DM
1193 {
1194 const gasm *asm_stmt = as_a <const gasm *> (stmt);
1195 on_asm_stmt (asm_stmt, ctxt);
1196 }
33255ad3
DM
1197 break;
1198
1199 case GIMPLE_CALL:
1200 {
1201 /* Track whether we have a gcall to a function that's not recognized by
1202 anything, for which we don't have a function body, or for which we
1203 don't know the fndecl. */
1204 const gcall *call = as_a <const gcall *> (stmt);
6bd31b33 1205 *out_unknown_side_effects = on_call_pre (call, ctxt);
33255ad3
DM
1206 }
1207 break;
1208
1209 case GIMPLE_RETURN:
1210 {
1211 const greturn *return_ = as_a <const greturn *> (stmt);
1212 on_return (return_, ctxt);
1213 }
1214 break;
1215 }
1216}
1217
9ff3e236
DM
1218/* Ensure that all arguments at the call described by CD are checked
1219 for poisoned values, by calling get_rvalue on each argument. */
1220
1221void
1222region_model::check_call_args (const call_details &cd) const
1223{
1224 for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++)
1225 cd.get_arg_svalue (arg_idx);
1226}
1227
aee1adf2
DM
1228/* Return true if CD is known to be a call to a function with
1229 __attribute__((const)). */
1230
1231static bool
1232const_fn_p (const call_details &cd)
1233{
1234 tree fndecl = cd.get_fndecl_for_call ();
1235 if (!fndecl)
1236 return false;
1237 gcc_assert (DECL_P (fndecl));
1238 return TREE_READONLY (fndecl);
1239}
1240
1241/* If this CD is known to be a call to a function with
1242 __attribute__((const)), attempt to get a const_fn_result_svalue
1243 based on the arguments, or return NULL otherwise. */
1244
1245static const svalue *
1246maybe_get_const_fn_result (const call_details &cd)
1247{
1248 if (!const_fn_p (cd))
1249 return NULL;
1250
1251 unsigned num_args = cd.num_args ();
1252 if (num_args > const_fn_result_svalue::MAX_INPUTS)
1253 /* Too many arguments. */
1254 return NULL;
1255
1256 auto_vec<const svalue *> inputs (num_args);
1257 for (unsigned arg_idx = 0; arg_idx < num_args; arg_idx++)
1258 {
1259 const svalue *arg_sval = cd.get_arg_svalue (arg_idx);
1260 if (!arg_sval->can_have_associated_state_p ())
1261 return NULL;
1262 inputs.quick_push (arg_sval);
1263 }
1264
1265 region_model_manager *mgr = cd.get_manager ();
1266 const svalue *sval
1267 = mgr->get_or_create_const_fn_result_svalue (cd.get_lhs_type (),
1268 cd.get_fndecl_for_call (),
1269 inputs);
1270 return sval;
1271}
1272
792f039f
DM
1273/* Update this model for an outcome of a call that returns a specific
1274 integer constant.
07e30160
DM
1275 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1276 the state-merger code from merging success and failure outcomes. */
1277
1278void
792f039f
DM
1279region_model::update_for_int_cst_return (const call_details &cd,
1280 int retval,
1281 bool unmergeable)
07e30160
DM
1282{
1283 if (!cd.get_lhs_type ())
1284 return;
4e4e45a4
DM
1285 if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1286 return;
07e30160 1287 const svalue *result
792f039f 1288 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), retval);
07e30160
DM
1289 if (unmergeable)
1290 result = m_mgr->get_or_create_unmergeable (result);
1291 set_value (cd.get_lhs_region (), result, cd.get_ctxt ());
1292}
1293
792f039f
DM
1294/* Update this model for an outcome of a call that returns zero.
1295 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1296 the state-merger code from merging success and failure outcomes. */
1297
1298void
1299region_model::update_for_zero_return (const call_details &cd,
1300 bool unmergeable)
1301{
1302 update_for_int_cst_return (cd, 0, unmergeable);
1303}
1304
07e30160
DM
1305/* Update this model for an outcome of a call that returns non-zero. */
1306
1307void
1308region_model::update_for_nonzero_return (const call_details &cd)
1309{
1310 if (!cd.get_lhs_type ())
1311 return;
4e4e45a4
DM
1312 if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1313 return;
07e30160
DM
1314 const svalue *zero
1315 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), 0);
1316 const svalue *result
1317 = get_store_value (cd.get_lhs_region (), cd.get_ctxt ());
1318 add_constraint (result, NE_EXPR, zero, cd.get_ctxt ());
1319}
1320
1321/* Subroutine of region_model::maybe_get_copy_bounds.
1322 The Linux kernel commonly uses
1323 min_t([unsigned] long, VAR, sizeof(T));
1324 to set an upper bound on the size of a copy_to_user.
1325 Attempt to simplify such sizes by trying to get the upper bound as a
1326 constant.
1327 Return the simplified svalue if possible, or NULL otherwise. */
1328
1329static const svalue *
1330maybe_simplify_upper_bound (const svalue *num_bytes_sval,
1331 region_model_manager *mgr)
1332{
1333 tree type = num_bytes_sval->get_type ();
1334 while (const svalue *raw = num_bytes_sval->maybe_undo_cast ())
1335 num_bytes_sval = raw;
1336 if (const binop_svalue *binop_sval = num_bytes_sval->dyn_cast_binop_svalue ())
1337 if (binop_sval->get_op () == MIN_EXPR)
1338 if (binop_sval->get_arg1 ()->get_kind () == SK_CONSTANT)
1339 {
1340 return mgr->get_or_create_cast (type, binop_sval->get_arg1 ());
1341 /* TODO: we might want to also capture the constraint
1342 when recording the diagnostic, or note that we're using
1343 the upper bound. */
1344 }
1345 return NULL;
1346}
1347
1348/* Attempt to get an upper bound for the size of a copy when simulating a
1349 copy function.
1350
1351 NUM_BYTES_SVAL is the symbolic value for the size of the copy.
1352 Use it if it's constant, otherwise try to simplify it. Failing
1353 that, use the size of SRC_REG if constant.
1354
1355 Return a symbolic value for an upper limit on the number of bytes
1356 copied, or NULL if no such value could be determined. */
1357
1358const svalue *
1359region_model::maybe_get_copy_bounds (const region *src_reg,
1360 const svalue *num_bytes_sval)
1361{
1362 if (num_bytes_sval->maybe_get_constant ())
1363 return num_bytes_sval;
1364
1365 if (const svalue *simplified
1366 = maybe_simplify_upper_bound (num_bytes_sval, m_mgr))
1367 num_bytes_sval = simplified;
1368
1369 if (num_bytes_sval->maybe_get_constant ())
1370 return num_bytes_sval;
1371
1372 /* For now, try just guessing the size as the capacity of the
1373 base region of the src.
1374 This is a hack; we might get too large a value. */
1375 const region *src_base_reg = src_reg->get_base_region ();
1376 num_bytes_sval = get_capacity (src_base_reg);
1377
1378 if (num_bytes_sval->maybe_get_constant ())
1379 return num_bytes_sval;
1380
1381 /* Non-constant: give up. */
1382 return NULL;
1383}
1384
6bd31b33
DM
1385/* Get any known_function for FNDECL for call CD.
1386
1387 The call must match all assumptions made by the known_function (such as
1388 e.g. "argument 1's type must be a pointer type").
1389
1390 Return NULL if no known_function is found, or it does not match the
1391 assumption(s). */
1392
1393const known_function *
1394region_model::get_known_function (tree fndecl, const call_details &cd) const
1395{
1396 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
1397 return known_fn_mgr->get_match (fndecl, cd);
1398}
1399
1400/* Get any known_function for IFN, or NULL. */
07e30160
DM
1401
1402const known_function *
6bd31b33 1403region_model::get_known_function (enum internal_fn ifn) const
07e30160
DM
1404{
1405 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
6bd31b33 1406 return known_fn_mgr->get_internal_fn (ifn);
07e30160
DM
1407}
1408
757bf1df
DM
1409/* Update this model for the CALL stmt, using CTXT to report any
1410 diagnostics - the first half.
1411
1412 Updates to the region_model that should be made *before* sm-states
1413 are updated are done here; other updates to the region_model are done
ef7827b0 1414 in region_model::on_call_post.
757bf1df 1415
ef7827b0
DM
1416 Return true if the function call has unknown side effects (it wasn't
1417 recognized and we don't have a body for it, or are unable to tell which
6bd31b33 1418 fndecl it is). */
ef7827b0
DM
1419
1420bool
6bd31b33 1421region_model::on_call_pre (const gcall *call, region_model_context *ctxt)
757bf1df 1422{
48e8a7a6
DM
1423 call_details cd (call, this, ctxt);
1424
ef7827b0
DM
1425 bool unknown_side_effects = false;
1426
9b4eee5f
DM
1427 /* Special-case for IFN_DEFERRED_INIT.
1428 We want to report uninitialized variables with -fanalyzer (treating
1429 -ftrivial-auto-var-init= as purely a mitigation feature).
1430 Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the
1431 lhs of the call, so that it is still uninitialized from the point of
1432 view of the analyzer. */
1433 if (gimple_call_internal_p (call)
1434 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1435 return false;
1436
bddd8d86
DM
1437 /* Get svalues for all of the arguments at the callsite, to ensure that we
1438 complain about any uninitialized arguments. This might lead to
1439 duplicates if any of the handling below also looks up the svalues,
1440 but the deduplication code should deal with that. */
1441 if (ctxt)
ca123e01 1442 check_call_args (cd);
bddd8d86 1443
688fc162
DM
1444 tree callee_fndecl = get_fndecl_for_call (call, ctxt);
1445
33255ad3
DM
1446 /* Some of the cases below update the lhs of the call based on the
1447 return value, but not all. Provide a default value, which may
1448 get overwritten below. */
1449 if (tree lhs = gimple_call_lhs (call))
1450 {
1451 const region *lhs_region = get_lvalue (lhs, ctxt);
aee1adf2
DM
1452 const svalue *sval = maybe_get_const_fn_result (cd);
1453 if (!sval)
1454 {
688fc162
DM
1455 if (callee_fndecl
1456 && lookup_attribute ("malloc", DECL_ATTRIBUTES (callee_fndecl)))
1457 {
1458 const region *new_reg
1459 = get_or_create_region_for_heap_alloc (NULL, ctxt);
1460 mark_region_as_unknown (new_reg, NULL);
1461 sval = m_mgr->get_ptr_svalue (cd.get_lhs_type (), new_reg);
1462 }
1463 else
1464 /* For the common case of functions without __attribute__((const)),
1465 use a conjured value, and purge any prior state involving that
1466 value (in case this is in a loop). */
1467 sval = m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs), call,
1468 lhs_region,
1469 conjured_purge (this,
1470 ctxt));
aee1adf2 1471 }
3a1d168e 1472 set_value (lhs_region, sval, ctxt);
33255ad3
DM
1473 }
1474
48e8a7a6 1475 if (gimple_call_internal_p (call))
6bd31b33
DM
1476 if (const known_function *kf
1477 = get_known_function (gimple_call_internal_fn (call)))
1478 {
1479 kf->impl_call_pre (cd);
1480 return false;
1481 }
808f4dfe 1482
688fc162 1483 if (callee_fndecl)
48e8a7a6 1484 {
5fbcbcaf 1485 int callee_fndecl_flags = flags_from_decl_or_type (callee_fndecl);
ee7bfbe5 1486
6bd31b33 1487 if (const known_function *kf = get_known_function (callee_fndecl, cd))
b5081130 1488 {
6bd31b33 1489 kf->impl_call_pre (cd);
b5081130
DM
1490 return false;
1491 }
6bd31b33
DM
1492 else if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
1493 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
5ee4ba03 1494 {
6bd31b33 1495 if (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
5ee4ba03
DM
1496 unknown_side_effects = true;
1497 }
ef7827b0 1498 else if (!fndecl_has_gimple_body_p (callee_fndecl)
5fbcbcaf 1499 && (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
808f4dfe 1500 && !fndecl_built_in_p (callee_fndecl))
ef7827b0 1501 unknown_side_effects = true;
757bf1df 1502 }
ef7827b0
DM
1503 else
1504 unknown_side_effects = true;
757bf1df 1505
ef7827b0 1506 return unknown_side_effects;
757bf1df
DM
1507}
1508
1509/* Update this model for the CALL stmt, using CTXT to report any
1510 diagnostics - the second half.
1511
1512 Updates to the region_model that should be made *after* sm-states
1513 are updated are done here; other updates to the region_model are done
ef7827b0
DM
1514 in region_model::on_call_pre.
1515
1516 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
1517 to purge state. */
757bf1df
DM
1518
1519void
ef7827b0
DM
1520region_model::on_call_post (const gcall *call,
1521 bool unknown_side_effects,
1522 region_model_context *ctxt)
757bf1df 1523{
757bf1df 1524 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1690a839 1525 {
eafa9d96 1526 call_details cd (call, this, ctxt);
6bd31b33 1527 if (const known_function *kf = get_known_function (callee_fndecl, cd))
55e04240 1528 {
6bd31b33 1529 kf->impl_call_post (cd);
55e04240
DM
1530 return;
1531 }
c7e276b8
DM
1532 /* Was this fndecl referenced by
1533 __attribute__((malloc(FOO)))? */
1534 if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
1535 {
c7e276b8
DM
1536 impl_deallocation_call (cd);
1537 return;
1538 }
1690a839 1539 }
ef7827b0
DM
1540
1541 if (unknown_side_effects)
1542 handle_unrecognized_call (call, ctxt);
1543}
1544
33255ad3
DM
1545/* Purge state involving SVAL from this region_model, using CTXT
1546 (if non-NULL) to purge other state in a program_state.
1547
1548 For example, if we're at the def-stmt of an SSA name, then we need to
1549 purge any state for svalues that involve that SSA name. This avoids
1550 false positives in loops, since a symbolic value referring to the
1551 SSA name will be referring to the previous value of that SSA name.
1552
1553 For example, in:
1554 while ((e = hashmap_iter_next(&iter))) {
1555 struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
1556 free (e_strbuf->value);
1557 }
1558 at the def-stmt of e_8:
1559 e_8 = hashmap_iter_next (&iter);
1560 we should purge the "freed" state of:
1561 INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
1562 which is the "e_strbuf->value" value from the previous iteration,
1563 or we will erroneously report a double-free - the "e_8" within it
1564 refers to the previous value. */
1565
1566void
1567region_model::purge_state_involving (const svalue *sval,
1568 region_model_context *ctxt)
1569{
a113b143
DM
1570 if (!sval->can_have_associated_state_p ())
1571 return;
33255ad3
DM
1572 m_store.purge_state_involving (sval, m_mgr);
1573 m_constraints->purge_state_involving (sval);
1574 m_dynamic_extents.purge_state_involving (sval);
1575 if (ctxt)
1576 ctxt->purge_state_involving (sval);
1577}
1578
c65d3c7f
DM
1579/* A pending_note subclass for adding a note about an
1580 __attribute__((access, ...)) to a diagnostic. */
1581
1582class reason_attr_access : public pending_note_subclass<reason_attr_access>
1583{
1584public:
1585 reason_attr_access (tree callee_fndecl, const attr_access &access)
1586 : m_callee_fndecl (callee_fndecl),
1587 m_ptr_argno (access.ptrarg),
1588 m_access_str (TREE_STRING_POINTER (access.to_external_string ()))
1589 {
1590 }
1591
ff171cb1 1592 const char *get_kind () const final override { return "reason_attr_access"; }
c65d3c7f 1593
2ac1459f 1594 void emit () const final override
c65d3c7f
DM
1595 {
1596 inform (DECL_SOURCE_LOCATION (m_callee_fndecl),
1597 "parameter %i of %qD marked with attribute %qs",
1598 m_ptr_argno + 1, m_callee_fndecl, m_access_str);
1599 }
1600
1601 bool operator== (const reason_attr_access &other) const
1602 {
1603 return (m_callee_fndecl == other.m_callee_fndecl
1604 && m_ptr_argno == other.m_ptr_argno
1605 && !strcmp (m_access_str, other.m_access_str));
1606 }
1607
1608private:
1609 tree m_callee_fndecl;
1610 unsigned m_ptr_argno;
1611 const char *m_access_str;
1612};
1613
b6eaf90c
DM
1614/* Check CALL a call to external function CALLEE_FNDECL based on
1615 any __attribute__ ((access, ....) on the latter, complaining to
1616 CTXT about any issues.
1617
1618 Currently we merely call check_region_for_write on any regions
1619 pointed to by arguments marked with a "write_only" or "read_write"
1620 attribute. */
1621
1622void
1623region_model::
1624check_external_function_for_access_attr (const gcall *call,
1625 tree callee_fndecl,
1626 region_model_context *ctxt) const
1627{
1628 gcc_assert (call);
1629 gcc_assert (callee_fndecl);
1630 gcc_assert (ctxt);
1631
1632 tree fntype = TREE_TYPE (callee_fndecl);
1633 if (!fntype)
1634 return;
1635
1636 if (!TYPE_ATTRIBUTES (fntype))
1637 return;
1638
1639 /* Initialize a map of attribute access specifications for arguments
1640 to the function call. */
1641 rdwr_map rdwr_idx;
1642 init_attr_rdwr_indices (&rdwr_idx, TYPE_ATTRIBUTES (fntype));
1643
1644 unsigned argno = 0;
1645
1646 for (tree iter = TYPE_ARG_TYPES (fntype); iter;
1647 iter = TREE_CHAIN (iter), ++argno)
1648 {
1649 const attr_access* access = rdwr_idx.get (argno);
1650 if (!access)
1651 continue;
1652
1653 /* Ignore any duplicate entry in the map for the size argument. */
1654 if (access->ptrarg != argno)
1655 continue;
1656
1657 if (access->mode == access_write_only
1658 || access->mode == access_read_write)
1659 {
c65d3c7f
DM
1660 /* Subclass of decorated_region_model_context that
1661 adds a note about the attr access to any saved diagnostics. */
1662 class annotating_ctxt : public note_adding_context
1663 {
1664 public:
1665 annotating_ctxt (tree callee_fndecl,
1666 const attr_access &access,
1667 region_model_context *ctxt)
1668 : note_adding_context (ctxt),
1669 m_callee_fndecl (callee_fndecl),
1670 m_access (access)
1671 {
1672 }
6341f14e 1673 std::unique_ptr<pending_note> make_note () final override
c65d3c7f 1674 {
6341f14e
DM
1675 return make_unique<reason_attr_access>
1676 (m_callee_fndecl, m_access);
c65d3c7f
DM
1677 }
1678 private:
1679 tree m_callee_fndecl;
1680 const attr_access &m_access;
1681 };
1682
1683 /* Use this ctxt below so that any diagnostics get the
1684 note added to them. */
1685 annotating_ctxt my_ctxt (callee_fndecl, *access, ctxt);
1686
b6eaf90c 1687 tree ptr_tree = gimple_call_arg (call, access->ptrarg);
c65d3c7f
DM
1688 const svalue *ptr_sval = get_rvalue (ptr_tree, &my_ctxt);
1689 const region *reg = deref_rvalue (ptr_sval, ptr_tree, &my_ctxt);
1690 check_region_for_write (reg, &my_ctxt);
b6eaf90c
DM
1691 /* We don't use the size arg for now. */
1692 }
1693 }
1694}
1695
ef7827b0
DM
1696/* Handle a call CALL to a function with unknown behavior.
1697
1698 Traverse the regions in this model, determining what regions are
1699 reachable from pointer arguments to CALL and from global variables,
1700 recursively.
1701
1702 Set all reachable regions to new unknown values and purge sm-state
1703 from their values, and from values that point to them. */
1704
1705void
1706region_model::handle_unrecognized_call (const gcall *call,
1707 region_model_context *ctxt)
1708{
1709 tree fndecl = get_fndecl_for_call (call, ctxt);
1710
b6eaf90c
DM
1711 if (fndecl && ctxt)
1712 check_external_function_for_access_attr (call, fndecl, ctxt);
1713
c710051a 1714 reachable_regions reachable_regs (this);
ef7827b0
DM
1715
1716 /* Determine the reachable regions and their mutability. */
1717 {
808f4dfe
DM
1718 /* Add globals and regions that already escaped in previous
1719 unknown calls. */
1720 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1721 &reachable_regs);
ef7827b0
DM
1722
1723 /* Params that are pointers. */
1724 tree iter_param_types = NULL_TREE;
1725 if (fndecl)
1726 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
1727 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
1728 {
1729 /* Track expected param type, where available. */
1730 tree param_type = NULL_TREE;
1731 if (iter_param_types)
1732 {
1733 param_type = TREE_VALUE (iter_param_types);
1734 gcc_assert (param_type);
1735 iter_param_types = TREE_CHAIN (iter_param_types);
1736 }
1737
1738 tree parm = gimple_call_arg (call, arg_idx);
808f4dfe
DM
1739 const svalue *parm_sval = get_rvalue (parm, ctxt);
1740 reachable_regs.handle_parm (parm_sval, param_type);
ef7827b0
DM
1741 }
1742 }
1743
33255ad3 1744 uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULL;
3a66c289 1745
808f4dfe
DM
1746 /* Purge sm-state for the svalues that were reachable,
1747 both in non-mutable and mutable form. */
1748 for (svalue_set::iterator iter
1749 = reachable_regs.begin_reachable_svals ();
1750 iter != reachable_regs.end_reachable_svals (); ++iter)
ef7827b0 1751 {
808f4dfe 1752 const svalue *sval = (*iter);
33255ad3
DM
1753 if (ctxt)
1754 ctxt->on_unknown_change (sval, false);
808f4dfe
DM
1755 }
1756 for (svalue_set::iterator iter
1757 = reachable_regs.begin_mutable_svals ();
1758 iter != reachable_regs.end_mutable_svals (); ++iter)
1759 {
1760 const svalue *sval = (*iter);
33255ad3
DM
1761 if (ctxt)
1762 ctxt->on_unknown_change (sval, true);
3a66c289
DM
1763 if (uncertainty)
1764 uncertainty->on_mutable_sval_at_unknown_call (sval);
808f4dfe 1765 }
ef7827b0 1766
808f4dfe 1767 /* Mark any clusters that have escaped. */
af66094d 1768 reachable_regs.mark_escaped_clusters (ctxt);
ef7827b0 1769
808f4dfe
DM
1770 /* Update bindings for all clusters that have escaped, whether above,
1771 or previously. */
3734527d
DM
1772 m_store.on_unknown_fncall (call, m_mgr->get_store_manager (),
1773 conjured_purge (this, ctxt));
9a2c9579
DM
1774
1775 /* Purge dynamic extents from any regions that have escaped mutably:
1776 realloc could have been called on them. */
1777 for (hash_set<const region *>::iterator
1778 iter = reachable_regs.begin_mutable_base_regs ();
1779 iter != reachable_regs.end_mutable_base_regs ();
1780 ++iter)
1781 {
1782 const region *base_reg = (*iter);
1783 unset_dynamic_extents (base_reg);
1784 }
808f4dfe 1785}
ef7827b0 1786
808f4dfe
DM
1787/* Traverse the regions in this model, determining what regions are
1788 reachable from the store and populating *OUT.
ef7827b0 1789
808f4dfe
DM
1790 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
1791 for reachability (for handling return values from functions when
1792 analyzing return of the only function on the stack).
1793
3a66c289
DM
1794 If UNCERTAINTY is non-NULL, treat any svalues that were recorded
1795 within it as being maybe-bound as additional "roots" for reachability.
1796
808f4dfe
DM
1797 Find svalues that haven't leaked. */
1798
1799void
1800region_model::get_reachable_svalues (svalue_set *out,
3a66c289
DM
1801 const svalue *extra_sval,
1802 const uncertainty_t *uncertainty)
808f4dfe 1803{
c710051a 1804 reachable_regions reachable_regs (this);
808f4dfe
DM
1805
1806 /* Add globals and regions that already escaped in previous
1807 unknown calls. */
1808 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1809 &reachable_regs);
1810
1811 if (extra_sval)
1812 reachable_regs.handle_sval (extra_sval);
ef7827b0 1813
3a66c289
DM
1814 if (uncertainty)
1815 for (uncertainty_t::iterator iter
1816 = uncertainty->begin_maybe_bound_svals ();
1817 iter != uncertainty->end_maybe_bound_svals (); ++iter)
1818 reachable_regs.handle_sval (*iter);
1819
808f4dfe
DM
1820 /* Get regions for locals that have explicitly bound values. */
1821 for (store::cluster_map_t::iterator iter = m_store.begin ();
1822 iter != m_store.end (); ++iter)
1823 {
1824 const region *base_reg = (*iter).first;
1825 if (const region *parent = base_reg->get_parent_region ())
1826 if (parent->get_kind () == RK_FRAME)
1827 reachable_regs.add (base_reg, false);
1828 }
1829
1830 /* Populate *OUT based on the values that were reachable. */
1831 for (svalue_set::iterator iter
1832 = reachable_regs.begin_reachable_svals ();
1833 iter != reachable_regs.end_reachable_svals (); ++iter)
1834 out->add (*iter);
757bf1df
DM
1835}
1836
1837/* Update this model for the RETURN_STMT, using CTXT to report any
1838 diagnostics. */
1839
1840void
1841region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
1842{
1843 tree callee = get_current_function ()->decl;
1844 tree lhs = DECL_RESULT (callee);
1845 tree rhs = gimple_return_retval (return_stmt);
1846
1847 if (lhs && rhs)
13ad6d9f
DM
1848 {
1849 const svalue *sval = get_rvalue (rhs, ctxt);
1850 const region *ret_reg = get_lvalue (lhs, ctxt);
1851 set_value (ret_reg, sval, ctxt);
1852 }
757bf1df
DM
1853}
1854
342e14ff
DM
1855/* Update this model for a call and return of setjmp/sigsetjmp at CALL within
1856 ENODE, using CTXT to report any diagnostics.
757bf1df 1857
342e14ff
DM
1858 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
1859 0), as opposed to any second return due to longjmp/sigsetjmp. */
757bf1df
DM
1860
1861void
1862region_model::on_setjmp (const gcall *call, const exploded_node *enode,
1863 region_model_context *ctxt)
1864{
808f4dfe
DM
1865 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
1866 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
1867 ctxt);
757bf1df 1868
808f4dfe
DM
1869 /* Create a setjmp_svalue for this call and store it in BUF_REG's
1870 region. */
1871 if (buf_reg)
757bf1df 1872 {
fd9982bb 1873 setjmp_record r (enode, call);
808f4dfe
DM
1874 const svalue *sval
1875 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
1876 set_value (buf_reg, sval, ctxt);
757bf1df
DM
1877 }
1878
1879 /* Direct calls to setjmp return 0. */
1880 if (tree lhs = gimple_call_lhs (call))
1881 {
1aff29d4
DM
1882 const svalue *new_sval
1883 = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
808f4dfe
DM
1884 const region *lhs_reg = get_lvalue (lhs, ctxt);
1885 set_value (lhs_reg, new_sval, ctxt);
757bf1df
DM
1886 }
1887}
1888
1889/* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
1890 to a "setjmp" at SETJMP_CALL where the final stack depth should be
808f4dfe
DM
1891 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
1892 done, and should be done by the caller. */
757bf1df
DM
1893
1894void
1895region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
808f4dfe 1896 int setjmp_stack_depth, region_model_context *ctxt)
757bf1df
DM
1897{
1898 /* Evaluate the val, using the frame of the "longjmp". */
1899 tree fake_retval = gimple_call_arg (longjmp_call, 1);
808f4dfe 1900 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
757bf1df
DM
1901
1902 /* Pop any frames until we reach the stack depth of the function where
1903 setjmp was called. */
1904 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
1905 while (get_stack_depth () > setjmp_stack_depth)
808f4dfe 1906 pop_frame (NULL, NULL, ctxt);
757bf1df
DM
1907
1908 gcc_assert (get_stack_depth () == setjmp_stack_depth);
1909
1910 /* Assign to LHS of "setjmp" in new_state. */
1911 if (tree lhs = gimple_call_lhs (setjmp_call))
1912 {
1913 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
1aff29d4
DM
1914 const svalue *zero_sval
1915 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
808f4dfe 1916 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
757bf1df
DM
1917 /* If we have 0, use 1. */
1918 if (eq_zero.is_true ())
1919 {
808f4dfe 1920 const svalue *one_sval
1aff29d4 1921 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
808f4dfe 1922 fake_retval_sval = one_sval;
757bf1df
DM
1923 }
1924 else
1925 {
1926 /* Otherwise note that the value is nonzero. */
808f4dfe 1927 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
757bf1df
DM
1928 }
1929
808f4dfe
DM
1930 /* Decorate the return value from setjmp as being unmergeable,
1931 so that we don't attempt to merge states with it as zero
1932 with states in which it's nonzero, leading to a clean distinction
1933 in the exploded_graph betweeen the first return and the second
1934 return. */
1935 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
757bf1df 1936
808f4dfe
DM
1937 const region *lhs_reg = get_lvalue (lhs, ctxt);
1938 set_value (lhs_reg, fake_retval_sval, ctxt);
1939 }
757bf1df
DM
1940}
1941
1942/* Update this region_model for a phi stmt of the form
1943 LHS = PHI <...RHS...>.
e0a7a675
DM
1944 where RHS is for the appropriate edge.
1945 Get state from OLD_STATE so that all of the phi stmts for a basic block
1946 are effectively handled simultaneously. */
757bf1df
DM
1947
1948void
8525d1f5 1949region_model::handle_phi (const gphi *phi,
808f4dfe 1950 tree lhs, tree rhs,
e0a7a675 1951 const region_model &old_state,
757bf1df
DM
1952 region_model_context *ctxt)
1953{
1954 /* For now, don't bother tracking the .MEM SSA names. */
1955 if (tree var = SSA_NAME_VAR (lhs))
1956 if (TREE_CODE (var) == VAR_DECL)
1957 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
1958 return;
1959
e0a7a675
DM
1960 const svalue *src_sval = old_state.get_rvalue (rhs, ctxt);
1961 const region *dst_reg = old_state.get_lvalue (lhs, ctxt);
757bf1df 1962
e0a7a675 1963 set_value (dst_reg, src_sval, ctxt);
8525d1f5
DM
1964
1965 if (ctxt)
1966 ctxt->on_phi (phi, rhs);
757bf1df
DM
1967}
1968
1969/* Implementation of region_model::get_lvalue; the latter adds type-checking.
1970
1971 Get the id of the region for PV within this region_model,
1972 emitting any diagnostics to CTXT. */
1973
808f4dfe 1974const region *
53cb324c 1975region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
1976{
1977 tree expr = pv.m_tree;
1978
1979 gcc_assert (expr);
1980
1981 switch (TREE_CODE (expr))
1982 {
1983 default:
808f4dfe
DM
1984 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
1985 dump_location_t ());
757bf1df
DM
1986
1987 case ARRAY_REF:
1988 {
1989 tree array = TREE_OPERAND (expr, 0);
1990 tree index = TREE_OPERAND (expr, 1);
757bf1df 1991
808f4dfe
DM
1992 const region *array_reg = get_lvalue (array, ctxt);
1993 const svalue *index_sval = get_rvalue (index, ctxt);
1994 return m_mgr->get_element_region (array_reg,
1995 TREE_TYPE (TREE_TYPE (array)),
1996 index_sval);
757bf1df
DM
1997 }
1998 break;
1999
93e759fc
DM
2000 case BIT_FIELD_REF:
2001 {
2002 tree inner_expr = TREE_OPERAND (expr, 0);
2003 const region *inner_reg = get_lvalue (inner_expr, ctxt);
2004 tree num_bits = TREE_OPERAND (expr, 1);
2005 tree first_bit_offset = TREE_OPERAND (expr, 2);
2006 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2007 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2008 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2009 TREE_INT_CST_LOW (num_bits));
2010 return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits);
2011 }
2012 break;
2013
757bf1df
DM
2014 case MEM_REF:
2015 {
2016 tree ptr = TREE_OPERAND (expr, 0);
2017 tree offset = TREE_OPERAND (expr, 1);
808f4dfe
DM
2018 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2019 const svalue *offset_sval = get_rvalue (offset, ctxt);
2020 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
2021 return m_mgr->get_offset_region (star_ptr,
2022 TREE_TYPE (expr),
2023 offset_sval);
757bf1df
DM
2024 }
2025 break;
2026
808f4dfe
DM
2027 case FUNCTION_DECL:
2028 return m_mgr->get_region_for_fndecl (expr);
2029
2030 case LABEL_DECL:
2031 return m_mgr->get_region_for_label (expr);
2032
757bf1df
DM
2033 case VAR_DECL:
2034 /* Handle globals. */
2035 if (is_global_var (expr))
808f4dfe 2036 return m_mgr->get_region_for_global (expr);
757bf1df
DM
2037
2038 /* Fall through. */
2039
2040 case SSA_NAME:
2041 case PARM_DECL:
2042 case RESULT_DECL:
2043 {
2044 gcc_assert (TREE_CODE (expr) == SSA_NAME
2045 || TREE_CODE (expr) == PARM_DECL
2046 || TREE_CODE (expr) == VAR_DECL
2047 || TREE_CODE (expr) == RESULT_DECL);
2048
808f4dfe
DM
2049 int stack_index = pv.m_stack_depth;
2050 const frame_region *frame = get_frame_at_index (stack_index);
757bf1df 2051 gcc_assert (frame);
4cebae09 2052 return frame->get_region_for_local (m_mgr, expr, ctxt);
757bf1df
DM
2053 }
2054
2055 case COMPONENT_REF:
2056 {
2057 /* obj.field */
2058 tree obj = TREE_OPERAND (expr, 0);
2059 tree field = TREE_OPERAND (expr, 1);
808f4dfe
DM
2060 const region *obj_reg = get_lvalue (obj, ctxt);
2061 return m_mgr->get_field_region (obj_reg, field);
41a9e940
DM
2062 }
2063 break;
2064
757bf1df 2065 case STRING_CST:
808f4dfe 2066 return m_mgr->get_region_for_string (expr);
757bf1df
DM
2067 }
2068}
2069
2070/* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
2071
09bea584
DM
2072static void
2073assert_compat_types (tree src_type, tree dst_type)
2074{
2075 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
808f4dfe
DM
2076 {
2077#if CHECKING_P
2078 if (!(useless_type_conversion_p (src_type, dst_type)))
2079 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
2080#endif
2081 }
09bea584 2082}
757bf1df 2083
ea4e3218
DM
2084/* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
2085
e66b9f67 2086bool
ea4e3218
DM
2087compat_types_p (tree src_type, tree dst_type)
2088{
2089 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2090 if (!(useless_type_conversion_p (src_type, dst_type)))
2091 return false;
2092 return true;
2093}
2094
808f4dfe 2095/* Get the region for PV within this region_model,
757bf1df
DM
2096 emitting any diagnostics to CTXT. */
2097
808f4dfe 2098const region *
53cb324c 2099region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2100{
2101 if (pv.m_tree == NULL_TREE)
808f4dfe 2102 return NULL;
757bf1df 2103
808f4dfe
DM
2104 const region *result_reg = get_lvalue_1 (pv, ctxt);
2105 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
2106 return result_reg;
757bf1df
DM
2107}
2108
808f4dfe 2109/* Get the region for EXPR within this region_model (assuming the most
757bf1df
DM
2110 recent stack frame if it's a local). */
2111
808f4dfe 2112const region *
53cb324c 2113region_model::get_lvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
2114{
2115 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2116}
2117
2118/* Implementation of region_model::get_rvalue; the latter adds type-checking.
2119
2120 Get the value of PV within this region_model,
2121 emitting any diagnostics to CTXT. */
2122
808f4dfe 2123const svalue *
53cb324c 2124region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2125{
2126 gcc_assert (pv.m_tree);
2127
2128 switch (TREE_CODE (pv.m_tree))
2129 {
2130 default:
2242b975 2131 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
757bf1df
DM
2132
2133 case ADDR_EXPR:
2134 {
2135 /* "&EXPR". */
2136 tree expr = pv.m_tree;
2137 tree op0 = TREE_OPERAND (expr, 0);
808f4dfe
DM
2138 const region *expr_reg = get_lvalue (op0, ctxt);
2139 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
757bf1df
DM
2140 }
2141 break;
2142
808f4dfe 2143 case BIT_FIELD_REF:
d3b1ef7a
DM
2144 {
2145 tree expr = pv.m_tree;
2146 tree op0 = TREE_OPERAND (expr, 0);
2147 const region *reg = get_lvalue (op0, ctxt);
2148 tree num_bits = TREE_OPERAND (expr, 1);
2149 tree first_bit_offset = TREE_OPERAND (expr, 2);
2150 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2151 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2152 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2153 TREE_INT_CST_LOW (num_bits));
9faf8348 2154 return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
d3b1ef7a 2155 }
808f4dfe
DM
2156
2157 case SSA_NAME:
2158 case VAR_DECL:
2159 case PARM_DECL:
2160 case RESULT_DECL:
757bf1df
DM
2161 case ARRAY_REF:
2162 {
da7c2773 2163 const region *reg = get_lvalue (pv, ctxt);
9faf8348 2164 return get_store_value (reg, ctxt);
757bf1df
DM
2165 }
2166
808f4dfe
DM
2167 case REALPART_EXPR:
2168 case IMAGPART_EXPR:
2169 case VIEW_CONVERT_EXPR:
2170 {
2171 tree expr = pv.m_tree;
2172 tree arg = TREE_OPERAND (expr, 0);
2173 const svalue *arg_sval = get_rvalue (arg, ctxt);
2174 const svalue *sval_unaryop
2175 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
2176 arg_sval);
2177 return sval_unaryop;
2178 };
2179
757bf1df
DM
2180 case INTEGER_CST:
2181 case REAL_CST:
808f4dfe
DM
2182 case COMPLEX_CST:
2183 case VECTOR_CST:
757bf1df 2184 case STRING_CST:
808f4dfe
DM
2185 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
2186
2187 case POINTER_PLUS_EXPR:
2188 {
2189 tree expr = pv.m_tree;
2190 tree ptr = TREE_OPERAND (expr, 0);
2191 tree offset = TREE_OPERAND (expr, 1);
2192 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2193 const svalue *offset_sval = get_rvalue (offset, ctxt);
2194 const svalue *sval_binop
2195 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
2196 ptr_sval, offset_sval);
2197 return sval_binop;
2198 }
2199
2200 /* Binary ops. */
2201 case PLUS_EXPR:
2202 case MULT_EXPR:
2203 {
2204 tree expr = pv.m_tree;
2205 tree arg0 = TREE_OPERAND (expr, 0);
2206 tree arg1 = TREE_OPERAND (expr, 1);
2207 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
2208 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
2209 const svalue *sval_binop
2210 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
2211 arg0_sval, arg1_sval);
2212 return sval_binop;
2213 }
757bf1df
DM
2214
2215 case COMPONENT_REF:
2216 case MEM_REF:
757bf1df 2217 {
808f4dfe 2218 const region *ref_reg = get_lvalue (pv, ctxt);
9faf8348 2219 return get_store_value (ref_reg, ctxt);
757bf1df 2220 }
1b342485
AS
2221 case OBJ_TYPE_REF:
2222 {
2223 tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
2224 return get_rvalue (expr, ctxt);
2225 }
757bf1df
DM
2226 }
2227}
2228
2229/* Get the value of PV within this region_model,
2230 emitting any diagnostics to CTXT. */
2231
808f4dfe 2232const svalue *
53cb324c 2233region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2234{
2235 if (pv.m_tree == NULL_TREE)
808f4dfe 2236 return NULL;
757bf1df 2237
808f4dfe 2238 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
757bf1df 2239
808f4dfe
DM
2240 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
2241
2fdc8546 2242 result_sval = check_for_poison (result_sval, pv.m_tree, NULL, ctxt);
33255ad3 2243
808f4dfe 2244 return result_sval;
757bf1df
DM
2245}
2246
2247/* Get the value of EXPR within this region_model (assuming the most
2248 recent stack frame if it's a local). */
2249
808f4dfe 2250const svalue *
53cb324c 2251region_model::get_rvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
2252{
2253 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2254}
2255
623bc027
DM
2256/* Return true if this model is on a path with "main" as the entrypoint
2257 (as opposed to one in which we're merely analyzing a subset of the
2258 path through the code). */
2259
2260bool
2261region_model::called_from_main_p () const
2262{
2263 if (!m_current_frame)
2264 return false;
2265 /* Determine if the oldest stack frame in this model is for "main". */
2266 const frame_region *frame0 = get_frame_at_index (0);
2267 gcc_assert (frame0);
2268 return id_equal (DECL_NAME (frame0->get_function ()->decl), "main");
2269}
2270
2271/* Subroutine of region_model::get_store_value for when REG is (or is within)
2272 a global variable that hasn't been touched since the start of this path
2273 (or was implicitly touched due to a call to an unknown function). */
2274
2275const svalue *
2276region_model::get_initial_value_for_global (const region *reg) const
2277{
2278 /* Get the decl that REG is for (or is within). */
2279 const decl_region *base_reg
2280 = reg->get_base_region ()->dyn_cast_decl_region ();
2281 gcc_assert (base_reg);
2282 tree decl = base_reg->get_decl ();
2283
2284 /* Special-case: to avoid having to explicitly update all previously
2285 untracked globals when calling an unknown fn, they implicitly have
2286 an unknown value if an unknown call has occurred, unless this is
2287 static to-this-TU and hasn't escaped. Globals that have escaped
2288 are explicitly tracked, so we shouldn't hit this case for them. */
af66094d
DM
2289 if (m_store.called_unknown_fn_p ()
2290 && TREE_PUBLIC (decl)
2291 && !TREE_READONLY (decl))
623bc027
DM
2292 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2293
2294 /* If we are on a path from the entrypoint from "main" and we have a
2295 global decl defined in this TU that hasn't been touched yet, then
2296 the initial value of REG can be taken from the initialization value
2297 of the decl. */
16ad9ae8 2298 if (called_from_main_p () || TREE_READONLY (decl))
623bc027 2299 {
61a43de5
DM
2300 /* Attempt to get the initializer value for base_reg. */
2301 if (const svalue *base_reg_init
2302 = base_reg->get_svalue_for_initializer (m_mgr))
623bc027 2303 {
61a43de5
DM
2304 if (reg == base_reg)
2305 return base_reg_init;
2306 else
623bc027 2307 {
61a43de5
DM
2308 /* Get the value for REG within base_reg_init. */
2309 binding_cluster c (base_reg);
e61ffa20 2310 c.bind (m_mgr->get_store_manager (), base_reg, base_reg_init);
61a43de5
DM
2311 const svalue *sval
2312 = c.get_any_binding (m_mgr->get_store_manager (), reg);
2313 if (sval)
2314 {
2315 if (reg->get_type ())
2316 sval = m_mgr->get_or_create_cast (reg->get_type (),
2317 sval);
2318 return sval;
2319 }
623bc027
DM
2320 }
2321 }
2322 }
2323
2324 /* Otherwise, return INIT_VAL(REG). */
2325 return m_mgr->get_or_create_initial_value (reg);
2326}
2327
808f4dfe 2328/* Get a value for REG, looking it up in the store, or otherwise falling
9faf8348
DM
2329 back to "initial" or "unknown" values.
2330 Use CTXT to report any warnings associated with reading from REG. */
757bf1df 2331
808f4dfe 2332const svalue *
9faf8348
DM
2333region_model::get_store_value (const region *reg,
2334 region_model_context *ctxt) const
757bf1df 2335{
dfe2ef7f
DM
2336 /* Getting the value of an empty region gives an unknown_svalue. */
2337 if (reg->empty_p ())
2338 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2339
9faf8348
DM
2340 check_region_for_read (reg, ctxt);
2341
2867118d
DM
2342 /* Special-case: handle var_decls in the constant pool. */
2343 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
2344 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
2345 return sval;
2346
808f4dfe
DM
2347 const svalue *sval
2348 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
2349 if (sval)
757bf1df 2350 {
808f4dfe
DM
2351 if (reg->get_type ())
2352 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
2353 return sval;
757bf1df 2354 }
757bf1df 2355
808f4dfe
DM
2356 /* Special-case: read at a constant index within a STRING_CST. */
2357 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
2358 if (tree byte_offset_cst
2359 = offset_reg->get_byte_offset ()->maybe_get_constant ())
2360 if (const string_region *str_reg
2361 = reg->get_parent_region ()->dyn_cast_string_region ())
757bf1df 2362 {
808f4dfe
DM
2363 tree string_cst = str_reg->get_string_cst ();
2364 if (const svalue *char_sval
2365 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2366 byte_offset_cst))
2367 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
757bf1df 2368 }
757bf1df 2369
808f4dfe
DM
2370 /* Special-case: read the initial char of a STRING_CST. */
2371 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
2372 if (const string_region *str_reg
2373 = cast_reg->get_original_region ()->dyn_cast_string_region ())
2374 {
2375 tree string_cst = str_reg->get_string_cst ();
2376 tree byte_offset_cst = build_int_cst (integer_type_node, 0);
2377 if (const svalue *char_sval
2378 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2379 byte_offset_cst))
2380 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2381 }
757bf1df 2382
808f4dfe
DM
2383 /* Otherwise we implicitly have the initial value of the region
2384 (if the cluster had been touched, binding_cluster::get_any_binding,
2385 would have returned UNKNOWN, and we would already have returned
2386 that above). */
757bf1df 2387
623bc027
DM
2388 /* Handle globals. */
2389 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
2390 == RK_GLOBALS)
2391 return get_initial_value_for_global (reg);
757bf1df 2392
808f4dfe 2393 return m_mgr->get_or_create_initial_value (reg);
757bf1df
DM
2394}
2395
808f4dfe
DM
2396/* Return false if REG does not exist, true if it may do.
2397 This is for detecting regions within the stack that don't exist anymore
2398 after frames are popped. */
757bf1df 2399
808f4dfe
DM
2400bool
2401region_model::region_exists_p (const region *reg) const
757bf1df 2402{
808f4dfe
DM
2403 /* If within a stack frame, check that the stack frame is live. */
2404 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
757bf1df 2405 {
808f4dfe
DM
2406 /* Check that the current frame is the enclosing frame, or is called
2407 by it. */
2408 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
2409 iter_frame = iter_frame->get_calling_frame ())
2410 if (iter_frame == enclosing_frame)
2411 return true;
2412 return false;
757bf1df 2413 }
808f4dfe
DM
2414
2415 return true;
757bf1df
DM
2416}
2417
808f4dfe
DM
2418/* Get a region for referencing PTR_SVAL, creating a region if need be, and
2419 potentially generating warnings via CTXT.
35e3f082 2420 PTR_SVAL must be of pointer type.
808f4dfe 2421 PTR_TREE if non-NULL can be used when emitting diagnostics. */
757bf1df 2422
808f4dfe
DM
2423const region *
2424region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
53cb324c 2425 region_model_context *ctxt) const
757bf1df 2426{
808f4dfe 2427 gcc_assert (ptr_sval);
35e3f082 2428 gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
757bf1df 2429
49bfbf18
DM
2430 /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
2431 as a constraint. This suppresses false positives from
2432 -Wanalyzer-null-dereference for the case where we later have an
2433 if (PTR_SVAL) that would occur if we considered the false branch
2434 and transitioned the malloc state machine from start->null. */
2435 tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
2436 const svalue *null_ptr = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
2437 m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
2438
808f4dfe 2439 switch (ptr_sval->get_kind ())
757bf1df 2440 {
808f4dfe 2441 default:
23ebfda0 2442 break;
808f4dfe 2443
757bf1df
DM
2444 case SK_REGION:
2445 {
808f4dfe
DM
2446 const region_svalue *region_sval
2447 = as_a <const region_svalue *> (ptr_sval);
757bf1df
DM
2448 return region_sval->get_pointee ();
2449 }
2450
808f4dfe
DM
2451 case SK_BINOP:
2452 {
2453 const binop_svalue *binop_sval
2454 = as_a <const binop_svalue *> (ptr_sval);
2455 switch (binop_sval->get_op ())
2456 {
2457 case POINTER_PLUS_EXPR:
2458 {
2459 /* If we have a symbolic value expressing pointer arithmentic,
2460 try to convert it to a suitable region. */
2461 const region *parent_region
2462 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
2463 const svalue *offset = binop_sval->get_arg1 ();
2464 tree type= TREE_TYPE (ptr_sval->get_type ());
2465 return m_mgr->get_offset_region (parent_region, type, offset);
2466 }
2467 default:
23ebfda0 2468 break;
808f4dfe
DM
2469 }
2470 }
23ebfda0 2471 break;
757bf1df
DM
2472
2473 case SK_POISONED:
2474 {
2475 if (ctxt)
808f4dfe
DM
2476 {
2477 tree ptr = get_representative_tree (ptr_sval);
2478 /* If we can't get a representative tree for PTR_SVAL
2479 (e.g. if it hasn't been bound into the store), then
2480 fall back on PTR_TREE, if non-NULL. */
2481 if (!ptr)
2482 ptr = ptr_tree;
2483 if (ptr)
2484 {
2485 const poisoned_svalue *poisoned_sval
2486 = as_a <const poisoned_svalue *> (ptr_sval);
2487 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
6341f14e
DM
2488 ctxt->warn (make_unique<poisoned_value_diagnostic>
2489 (ptr, pkind, NULL));
808f4dfe
DM
2490 }
2491 }
757bf1df 2492 }
23ebfda0 2493 break;
757bf1df
DM
2494 }
2495
23ebfda0 2496 return m_mgr->get_symbolic_region (ptr_sval);
757bf1df
DM
2497}
2498
d3b1ef7a
DM
2499/* Attempt to get BITS within any value of REG, as TYPE.
2500 In particular, extract values from compound_svalues for the case
2501 where there's a concrete binding at BITS.
9faf8348
DM
2502 Return an unknown svalue if we can't handle the given case.
2503 Use CTXT to report any warnings associated with reading from REG. */
d3b1ef7a
DM
2504
2505const svalue *
2506region_model::get_rvalue_for_bits (tree type,
2507 const region *reg,
9faf8348
DM
2508 const bit_range &bits,
2509 region_model_context *ctxt) const
d3b1ef7a 2510{
9faf8348 2511 const svalue *sval = get_store_value (reg, ctxt);
e61ffa20 2512 return m_mgr->get_or_create_bits_within (type, bits, sval);
d3b1ef7a
DM
2513}
2514
3175d40f
DM
2515/* A subclass of pending_diagnostic for complaining about writes to
2516 constant regions of memory. */
2517
2518class write_to_const_diagnostic
2519: public pending_diagnostic_subclass<write_to_const_diagnostic>
2520{
2521public:
2522 write_to_const_diagnostic (const region *reg, tree decl)
2523 : m_reg (reg), m_decl (decl)
2524 {}
2525
ff171cb1 2526 const char *get_kind () const final override
3175d40f
DM
2527 {
2528 return "write_to_const_diagnostic";
2529 }
2530
2531 bool operator== (const write_to_const_diagnostic &other) const
2532 {
2533 return (m_reg == other.m_reg
2534 && m_decl == other.m_decl);
2535 }
2536
ff171cb1 2537 int get_controlling_option () const final override
7fd6e36e
DM
2538 {
2539 return OPT_Wanalyzer_write_to_const;
2540 }
2541
ff171cb1 2542 bool emit (rich_location *rich_loc) final override
3175d40f 2543 {
111fd515
DM
2544 auto_diagnostic_group d;
2545 bool warned;
2546 switch (m_reg->get_kind ())
2547 {
2548 default:
7fd6e36e 2549 warned = warning_at (rich_loc, get_controlling_option (),
111fd515
DM
2550 "write to %<const%> object %qE", m_decl);
2551 break;
2552 case RK_FUNCTION:
7fd6e36e 2553 warned = warning_at (rich_loc, get_controlling_option (),
111fd515
DM
2554 "write to function %qE", m_decl);
2555 break;
2556 case RK_LABEL:
7fd6e36e 2557 warned = warning_at (rich_loc, get_controlling_option (),
111fd515
DM
2558 "write to label %qE", m_decl);
2559 break;
2560 }
3175d40f
DM
2561 if (warned)
2562 inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
2563 return warned;
2564 }
2565
ff171cb1 2566 label_text describe_final_event (const evdesc::final_event &ev) final override
3175d40f 2567 {
111fd515
DM
2568 switch (m_reg->get_kind ())
2569 {
2570 default:
2571 return ev.formatted_print ("write to %<const%> object %qE here", m_decl);
2572 case RK_FUNCTION:
2573 return ev.formatted_print ("write to function %qE here", m_decl);
2574 case RK_LABEL:
2575 return ev.formatted_print ("write to label %qE here", m_decl);
2576 }
3175d40f
DM
2577 }
2578
2579private:
2580 const region *m_reg;
2581 tree m_decl;
2582};
2583
2584/* A subclass of pending_diagnostic for complaining about writes to
2585 string literals. */
2586
2587class write_to_string_literal_diagnostic
2588: public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
2589{
2590public:
2591 write_to_string_literal_diagnostic (const region *reg)
2592 : m_reg (reg)
2593 {}
2594
ff171cb1 2595 const char *get_kind () const final override
3175d40f
DM
2596 {
2597 return "write_to_string_literal_diagnostic";
2598 }
2599
2600 bool operator== (const write_to_string_literal_diagnostic &other) const
2601 {
2602 return m_reg == other.m_reg;
2603 }
2604
ff171cb1 2605 int get_controlling_option () const final override
7fd6e36e
DM
2606 {
2607 return OPT_Wanalyzer_write_to_string_literal;
2608 }
2609
ff171cb1 2610 bool emit (rich_location *rich_loc) final override
3175d40f 2611 {
7fd6e36e 2612 return warning_at (rich_loc, get_controlling_option (),
3175d40f
DM
2613 "write to string literal");
2614 /* Ideally we would show the location of the STRING_CST as well,
2615 but it is not available at this point. */
2616 }
2617
ff171cb1 2618 label_text describe_final_event (const evdesc::final_event &ev) final override
3175d40f
DM
2619 {
2620 return ev.formatted_print ("write to string literal here");
2621 }
2622
2623private:
2624 const region *m_reg;
2625};
2626
2627/* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
2628
2629void
2630region_model::check_for_writable_region (const region* dest_reg,
2631 region_model_context *ctxt) const
2632{
2633 /* Fail gracefully if CTXT is NULL. */
2634 if (!ctxt)
2635 return;
2636
2637 const region *base_reg = dest_reg->get_base_region ();
2638 switch (base_reg->get_kind ())
2639 {
2640 default:
2641 break;
111fd515
DM
2642 case RK_FUNCTION:
2643 {
2644 const function_region *func_reg = as_a <const function_region *> (base_reg);
2645 tree fndecl = func_reg->get_fndecl ();
6341f14e
DM
2646 ctxt->warn (make_unique<write_to_const_diagnostic>
2647 (func_reg, fndecl));
111fd515
DM
2648 }
2649 break;
2650 case RK_LABEL:
2651 {
2652 const label_region *label_reg = as_a <const label_region *> (base_reg);
2653 tree label = label_reg->get_label ();
6341f14e
DM
2654 ctxt->warn (make_unique<write_to_const_diagnostic>
2655 (label_reg, label));
111fd515
DM
2656 }
2657 break;
3175d40f
DM
2658 case RK_DECL:
2659 {
2660 const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
2661 tree decl = decl_reg->get_decl ();
2662 /* Warn about writes to const globals.
2663 Don't warn for writes to const locals, and params in particular,
2664 since we would warn in push_frame when setting them up (e.g the
2665 "this" param is "T* const"). */
2666 if (TREE_READONLY (decl)
2667 && is_global_var (decl))
6341f14e 2668 ctxt->warn (make_unique<write_to_const_diagnostic> (dest_reg, decl));
3175d40f
DM
2669 }
2670 break;
2671 case RK_STRING:
6341f14e 2672 ctxt->warn (make_unique<write_to_string_literal_diagnostic> (dest_reg));
3175d40f
DM
2673 break;
2674 }
2675}
2676
9a2c9579
DM
2677/* Get the capacity of REG in bytes. */
2678
2679const svalue *
2680region_model::get_capacity (const region *reg) const
2681{
2682 switch (reg->get_kind ())
2683 {
2684 default:
2685 break;
2686 case RK_DECL:
2687 {
2688 const decl_region *decl_reg = as_a <const decl_region *> (reg);
2689 tree decl = decl_reg->get_decl ();
2690 if (TREE_CODE (decl) == SSA_NAME)
2691 {
2692 tree type = TREE_TYPE (decl);
2693 tree size = TYPE_SIZE (type);
2694 return get_rvalue (size, NULL);
2695 }
2696 else
2697 {
2698 tree size = decl_init_size (decl, false);
2699 if (size)
2700 return get_rvalue (size, NULL);
2701 }
2702 }
2703 break;
e61ffa20
DM
2704 case RK_SIZED:
2705 /* Look through sized regions to get at the capacity
2706 of the underlying regions. */
2707 return get_capacity (reg->get_parent_region ());
9a2c9579
DM
2708 }
2709
2710 if (const svalue *recorded = get_dynamic_extents (reg))
2711 return recorded;
2712
2713 return m_mgr->get_or_create_unknown_svalue (sizetype);
2714}
2715
0a9c0d4a
TL
2716/* Return the string size, including the 0-terminator, if SVAL is a
2717 constant_svalue holding a string. Otherwise, return an unknown_svalue. */
2718
2719const svalue *
2720region_model::get_string_size (const svalue *sval) const
2721{
2722 tree cst = sval->maybe_get_constant ();
2723 if (!cst || TREE_CODE (cst) != STRING_CST)
2724 return m_mgr->get_or_create_unknown_svalue (size_type_node);
2725
2726 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
2727 return m_mgr->get_or_create_constant_svalue (out);
2728}
2729
2730/* Return the string size, including the 0-terminator, if REG is a
2731 string_region. Otherwise, return an unknown_svalue. */
2732
2733const svalue *
2734region_model::get_string_size (const region *reg) const
2735{
2736 const string_region *str_reg = dyn_cast <const string_region *> (reg);
2737 if (!str_reg)
2738 return m_mgr->get_or_create_unknown_svalue (size_type_node);
2739
2740 tree cst = str_reg->get_string_cst ();
2741 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
2742 return m_mgr->get_or_create_constant_svalue (out);
2743}
2744
9faf8348
DM
2745/* If CTXT is non-NULL, use it to warn about any problems accessing REG,
2746 using DIR to determine if this access is a read or write. */
2747
2748void
2749region_model::check_region_access (const region *reg,
2750 enum access_direction dir,
2751 region_model_context *ctxt) const
2752{
2753 /* Fail gracefully if CTXT is NULL. */
2754 if (!ctxt)
2755 return;
2756
b9365b93 2757 check_region_for_taint (reg, dir, ctxt);
7e3b45be 2758 check_region_bounds (reg, dir, ctxt);
b9365b93 2759
9faf8348
DM
2760 switch (dir)
2761 {
2762 default:
2763 gcc_unreachable ();
2764 case DIR_READ:
2765 /* Currently a no-op. */
2766 break;
2767 case DIR_WRITE:
2768 check_for_writable_region (reg, ctxt);
2769 break;
2770 }
2771}
2772
2773/* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
2774
2775void
2776region_model::check_region_for_write (const region *dest_reg,
2777 region_model_context *ctxt) const
2778{
2779 check_region_access (dest_reg, DIR_WRITE, ctxt);
2780}
2781
2782/* If CTXT is non-NULL, use it to warn about any problems reading from REG. */
2783
2784void
2785region_model::check_region_for_read (const region *src_reg,
2786 region_model_context *ctxt) const
2787{
2788 check_region_access (src_reg, DIR_READ, ctxt);
2789}
2790
e6c3bb37
TL
2791/* Concrete subclass for casts of pointers that lead to trailing bytes. */
2792
2793class dubious_allocation_size
2794: public pending_diagnostic_subclass<dubious_allocation_size>
2795{
2796public:
2797 dubious_allocation_size (const region *lhs, const region *rhs)
f5758fe5
DM
2798 : m_lhs (lhs), m_rhs (rhs), m_expr (NULL_TREE),
2799 m_has_allocation_event (false)
e6c3bb37
TL
2800 {}
2801
2802 dubious_allocation_size (const region *lhs, const region *rhs,
2803 tree expr)
f5758fe5
DM
2804 : m_lhs (lhs), m_rhs (rhs), m_expr (expr),
2805 m_has_allocation_event (false)
e6c3bb37
TL
2806 {}
2807
2808 const char *get_kind () const final override
2809 {
2810 return "dubious_allocation_size";
2811 }
2812
2813 bool operator== (const dubious_allocation_size &other) const
2814 {
2815 return m_lhs == other.m_lhs && m_rhs == other.m_rhs
2816 && pending_diagnostic::same_tree_p (m_expr, other.m_expr);
2817 }
2818
2819 int get_controlling_option () const final override
2820 {
2821 return OPT_Wanalyzer_allocation_size;
2822 }
2823
2824 bool emit (rich_location *rich_loc) final override
2825 {
2826 diagnostic_metadata m;
2827 m.add_cwe (131);
2828
2829 return warning_meta (rich_loc, m, get_controlling_option (),
c83e9731
TL
2830 "allocated buffer size is not a multiple"
2831 " of the pointee's size");
e6c3bb37
TL
2832 }
2833
e6c3bb37
TL
2834 label_text describe_final_event (const evdesc::final_event &ev) final
2835 override
2836 {
2837 tree pointee_type = TREE_TYPE (m_lhs->get_type ());
f5758fe5 2838 if (m_has_allocation_event)
e6c3bb37
TL
2839 return ev.formatted_print ("assigned to %qT here;"
2840 " %<sizeof (%T)%> is %qE",
2841 m_lhs->get_type (), pointee_type,
2842 size_in_bytes (pointee_type));
f5758fe5
DM
2843 /* Fallback: Typically, we should always see an allocation_event
2844 before. */
e6c3bb37
TL
2845 if (m_expr)
2846 {
2847 if (TREE_CODE (m_expr) == INTEGER_CST)
2848 return ev.formatted_print ("allocated %E bytes and assigned to"
2849 " %qT here; %<sizeof (%T)%> is %qE",
2850 m_expr, m_lhs->get_type (), pointee_type,
2851 size_in_bytes (pointee_type));
2852 else
2853 return ev.formatted_print ("allocated %qE bytes and assigned to"
2854 " %qT here; %<sizeof (%T)%> is %qE",
2855 m_expr, m_lhs->get_type (), pointee_type,
2856 size_in_bytes (pointee_type));
2857 }
2858
2859 return ev.formatted_print ("allocated and assigned to %qT here;"
2860 " %<sizeof (%T)%> is %qE",
2861 m_lhs->get_type (), pointee_type,
2862 size_in_bytes (pointee_type));
2863 }
2864
f5758fe5
DM
2865 void
2866 add_region_creation_events (const region *,
2867 tree capacity,
e24fe128 2868 const event_loc_info &loc_info,
f5758fe5
DM
2869 checker_path &emission_path) final override
2870 {
2871 emission_path.add_event
e24fe128 2872 (make_unique<region_creation_event_allocation_size> (capacity, loc_info));
f5758fe5
DM
2873
2874 m_has_allocation_event = true;
2875 }
2876
e6c3bb37
TL
2877 void mark_interesting_stuff (interesting_t *interest) final override
2878 {
2879 interest->add_region_creation (m_rhs);
2880 }
2881
2882private:
2883 const region *m_lhs;
2884 const region *m_rhs;
2885 const tree m_expr;
f5758fe5 2886 bool m_has_allocation_event;
e6c3bb37
TL
2887};
2888
2889/* Return true on dubious allocation sizes for constant sizes. */
2890
2891static bool
2892capacity_compatible_with_type (tree cst, tree pointee_size_tree,
2893 bool is_struct)
2894{
2895 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
2896 gcc_assert (TREE_CODE (pointee_size_tree) == INTEGER_CST);
2897
2898 unsigned HOST_WIDE_INT pointee_size = TREE_INT_CST_LOW (pointee_size_tree);
2899 unsigned HOST_WIDE_INT alloc_size = TREE_INT_CST_LOW (cst);
2900
2901 if (is_struct)
b4cc945c 2902 return alloc_size == 0 || alloc_size >= pointee_size;
e6c3bb37
TL
2903 return alloc_size % pointee_size == 0;
2904}
2905
2906static bool
2907capacity_compatible_with_type (tree cst, tree pointee_size_tree)
2908{
2909 return capacity_compatible_with_type (cst, pointee_size_tree, false);
2910}
2911
2912/* Checks whether SVAL could be a multiple of SIZE_CST.
2913
2914 It works by visiting all svalues inside SVAL until it reaches
2915 atomic nodes. From those, it goes back up again and adds each
2916 node that might be a multiple of SIZE_CST to the RESULT_SET. */
2917
2918class size_visitor : public visitor
2919{
2920public:
c83e9731
TL
2921 size_visitor (tree size_cst, const svalue *root_sval, constraint_manager *cm)
2922 : m_size_cst (size_cst), m_root_sval (root_sval), m_cm (cm)
e6c3bb37 2923 {
c83e9731 2924 m_root_sval->accept (this);
e6c3bb37
TL
2925 }
2926
2927 bool get_result ()
2928 {
c83e9731 2929 return result_set.contains (m_root_sval);
e6c3bb37
TL
2930 }
2931
2932 void visit_constant_svalue (const constant_svalue *sval) final override
2933 {
c83e9731 2934 check_constant (sval->get_constant (), sval);
e6c3bb37
TL
2935 }
2936
2937 void visit_unknown_svalue (const unknown_svalue *sval ATTRIBUTE_UNUSED)
2938 final override
2939 {
2940 result_set.add (sval);
2941 }
2942
2943 void visit_poisoned_svalue (const poisoned_svalue *sval ATTRIBUTE_UNUSED)
2944 final override
2945 {
2946 result_set.add (sval);
2947 }
2948
bdd385b2 2949 void visit_unaryop_svalue (const unaryop_svalue *sval) final override
e6c3bb37
TL
2950 {
2951 const svalue *arg = sval->get_arg ();
2952 if (result_set.contains (arg))
2953 result_set.add (sval);
2954 }
2955
2956 void visit_binop_svalue (const binop_svalue *sval) final override
2957 {
2958 const svalue *arg0 = sval->get_arg0 ();
2959 const svalue *arg1 = sval->get_arg1 ();
2960
2961 if (sval->get_op () == MULT_EXPR)
2962 {
2963 if (result_set.contains (arg0) || result_set.contains (arg1))
2964 result_set.add (sval);
2965 }
2966 else
2967 {
2968 if (result_set.contains (arg0) && result_set.contains (arg1))
2969 result_set.add (sval);
2970 }
2971 }
2972
bdd385b2 2973 void visit_repeated_svalue (const repeated_svalue *sval) final override
e6c3bb37
TL
2974 {
2975 sval->get_inner_svalue ()->accept (this);
2976 if (result_set.contains (sval->get_inner_svalue ()))
2977 result_set.add (sval);
2978 }
2979
2980 void visit_unmergeable_svalue (const unmergeable_svalue *sval) final override
2981 {
2982 sval->get_arg ()->accept (this);
2983 if (result_set.contains (sval->get_arg ()))
2984 result_set.add (sval);
2985 }
2986
2987 void visit_widening_svalue (const widening_svalue *sval) final override
2988 {
2989 const svalue *base = sval->get_base_svalue ();
2990 const svalue *iter = sval->get_iter_svalue ();
2991
2992 if (result_set.contains (base) && result_set.contains (iter))
2993 result_set.add (sval);
2994 }
2995
2996 void visit_conjured_svalue (const conjured_svalue *sval ATTRIBUTE_UNUSED)
2997 final override
2998 {
2999 equiv_class_id id (-1);
3000 if (m_cm->get_equiv_class_by_svalue (sval, &id))
3001 {
c83e9731
TL
3002 if (tree cst = id.get_obj (*m_cm).get_any_constant ())
3003 check_constant (cst, sval);
e6c3bb37 3004 else
c83e9731 3005 result_set.add (sval);
e6c3bb37
TL
3006 }
3007 }
3008
3009 void visit_asm_output_svalue (const asm_output_svalue *sval ATTRIBUTE_UNUSED)
3010 final override
3011 {
3012 result_set.add (sval);
3013 }
3014
3015 void visit_const_fn_result_svalue (const const_fn_result_svalue
3016 *sval ATTRIBUTE_UNUSED) final override
3017 {
3018 result_set.add (sval);
3019 }
3020
3021private:
c83e9731
TL
3022 void check_constant (tree cst, const svalue *sval)
3023 {
3024 switch (TREE_CODE (cst))
3025 {
3026 default:
3027 /* Assume all unhandled operands are compatible. */
3028 result_set.add (sval);
3029 break;
3030 case INTEGER_CST:
3031 if (capacity_compatible_with_type (cst, m_size_cst))
3032 result_set.add (sval);
3033 break;
3034 }
3035 }
3036
e6c3bb37 3037 tree m_size_cst;
c83e9731 3038 const svalue *m_root_sval;
e6c3bb37
TL
3039 constraint_manager *m_cm;
3040 svalue_set result_set; /* Used as a mapping of svalue*->bool. */
3041};
3042
3043/* Return true if a struct or union either uses the inheritance pattern,
3044 where the first field is a base struct, or the flexible array member
3045 pattern, where the last field is an array without a specified size. */
3046
3047static bool
3048struct_or_union_with_inheritance_p (tree struc)
3049{
3050 tree iter = TYPE_FIELDS (struc);
3051 if (iter == NULL_TREE)
3052 return false;
3053 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (iter)))
3054 return true;
3055
3056 tree last_field;
3057 while (iter != NULL_TREE)
3058 {
3059 last_field = iter;
3060 iter = DECL_CHAIN (iter);
3061 }
3062
3063 if (last_field != NULL_TREE
3064 && TREE_CODE (TREE_TYPE (last_field)) == ARRAY_TYPE)
3065 return true;
3066
3067 return false;
3068}
3069
3070/* Return true if the lhs and rhs of an assignment have different types. */
3071
3072static bool
3073is_any_cast_p (const gimple *stmt)
3074{
c83e9731 3075 if (const gassign *assign = dyn_cast <const gassign *> (stmt))
e6c3bb37
TL
3076 return gimple_assign_cast_p (assign)
3077 || !pending_diagnostic::same_tree_p (
3078 TREE_TYPE (gimple_assign_lhs (assign)),
3079 TREE_TYPE (gimple_assign_rhs1 (assign)));
c83e9731 3080 else if (const gcall *call = dyn_cast <const gcall *> (stmt))
e6c3bb37
TL
3081 {
3082 tree lhs = gimple_call_lhs (call);
3083 return lhs != NULL_TREE && !pending_diagnostic::same_tree_p (
3084 TREE_TYPE (gimple_call_lhs (call)),
3085 gimple_call_return_type (call));
3086 }
3087
3088 return false;
3089}
3090
3091/* On pointer assignments, check whether the buffer size of
3092 RHS_SVAL is compatible with the type of the LHS_REG.
3093 Use a non-null CTXT to report allocation size warnings. */
3094
3095void
3096region_model::check_region_size (const region *lhs_reg, const svalue *rhs_sval,
3097 region_model_context *ctxt) const
3098{
3099 if (!ctxt || ctxt->get_stmt () == NULL)
3100 return;
3101 /* Only report warnings on assignments that actually change the type. */
3102 if (!is_any_cast_p (ctxt->get_stmt ()))
3103 return;
3104
3105 const region_svalue *reg_sval = dyn_cast <const region_svalue *> (rhs_sval);
3106 if (!reg_sval)
3107 return;
3108
3109 tree pointer_type = lhs_reg->get_type ();
3110 if (pointer_type == NULL_TREE || !POINTER_TYPE_P (pointer_type))
3111 return;
3112
3113 tree pointee_type = TREE_TYPE (pointer_type);
3114 /* Make sure that the type on the left-hand size actually has a size. */
3115 if (pointee_type == NULL_TREE || VOID_TYPE_P (pointee_type)
3116 || TYPE_SIZE_UNIT (pointee_type) == NULL_TREE)
3117 return;
3118
3119 /* Bail out early on pointers to structs where we can
3120 not deduce whether the buffer size is compatible. */
3121 bool is_struct = RECORD_OR_UNION_TYPE_P (pointee_type);
3122 if (is_struct && struct_or_union_with_inheritance_p (pointee_type))
3123 return;
3124
3125 tree pointee_size_tree = size_in_bytes (pointee_type);
3126 /* We give up if the type size is not known at compile-time or the
3127 type size is always compatible regardless of the buffer size. */
3128 if (TREE_CODE (pointee_size_tree) != INTEGER_CST
3129 || integer_zerop (pointee_size_tree)
3130 || integer_onep (pointee_size_tree))
3131 return;
3132
3133 const region *rhs_reg = reg_sval->get_pointee ();
3134 const svalue *capacity = get_capacity (rhs_reg);
3135 switch (capacity->get_kind ())
3136 {
3137 case svalue_kind::SK_CONSTANT:
3138 {
3139 const constant_svalue *cst_cap_sval
c83e9731 3140 = as_a <const constant_svalue *> (capacity);
e6c3bb37 3141 tree cst_cap = cst_cap_sval->get_constant ();
c83e9731
TL
3142 if (TREE_CODE (cst_cap) == INTEGER_CST
3143 && !capacity_compatible_with_type (cst_cap, pointee_size_tree,
3144 is_struct))
6341f14e
DM
3145 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg, rhs_reg,
3146 cst_cap));
e6c3bb37
TL
3147 }
3148 break;
3149 default:
3150 {
3151 if (!is_struct)
3152 {
3153 size_visitor v (pointee_size_tree, capacity, m_constraints);
3154 if (!v.get_result ())
3155 {
3156 tree expr = get_representative_tree (capacity);
6341f14e
DM
3157 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg,
3158 rhs_reg,
3159 expr));
e6c3bb37
TL
3160 }
3161 }
3162 break;
3163 }
3164 }
3165}
3166
808f4dfe 3167/* Set the value of the region given by LHS_REG to the value given
9faf8348
DM
3168 by RHS_SVAL.
3169 Use CTXT to report any warnings associated with writing to LHS_REG. */
757bf1df 3170
808f4dfe
DM
3171void
3172region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
3175d40f 3173 region_model_context *ctxt)
757bf1df 3174{
808f4dfe
DM
3175 gcc_assert (lhs_reg);
3176 gcc_assert (rhs_sval);
3177
dfe2ef7f
DM
3178 /* Setting the value of an empty region is a no-op. */
3179 if (lhs_reg->empty_p ())
3180 return;
3181
e6c3bb37
TL
3182 check_region_size (lhs_reg, rhs_sval, ctxt);
3183
9faf8348 3184 check_region_for_write (lhs_reg, ctxt);
3175d40f 3185
808f4dfe 3186 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 3187 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
3188}
3189
808f4dfe 3190/* Set the value of the region given by LHS to the value given by RHS. */
757bf1df
DM
3191
3192void
808f4dfe 3193region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
757bf1df 3194{
808f4dfe
DM
3195 const region *lhs_reg = get_lvalue (lhs, ctxt);
3196 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
3197 gcc_assert (lhs_reg);
3198 gcc_assert (rhs_sval);
3199 set_value (lhs_reg, rhs_sval, ctxt);
757bf1df
DM
3200}
3201
808f4dfe 3202/* Remove all bindings overlapping REG within the store. */
884d9141
DM
3203
3204void
808f4dfe
DM
3205region_model::clobber_region (const region *reg)
3206{
3207 m_store.clobber_region (m_mgr->get_store_manager(), reg);
3208}
3209
3210/* Remove any bindings for REG within the store. */
3211
3212void
3213region_model::purge_region (const region *reg)
3214{
3215 m_store.purge_region (m_mgr->get_store_manager(), reg);
3216}
3217
e61ffa20
DM
3218/* Fill REG with SVAL. */
3219
3220void
3221region_model::fill_region (const region *reg, const svalue *sval)
3222{
3223 m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
3224}
3225
808f4dfe
DM
3226/* Zero-fill REG. */
3227
3228void
3229region_model::zero_fill_region (const region *reg)
3230{
3231 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
3232}
3233
3234/* Mark REG as having unknown content. */
3235
3236void
3a66c289
DM
3237region_model::mark_region_as_unknown (const region *reg,
3238 uncertainty_t *uncertainty)
884d9141 3239{
3a66c289
DM
3240 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
3241 uncertainty);
884d9141
DM
3242}
3243
808f4dfe 3244/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
757bf1df
DM
3245 this model. */
3246
3247tristate
808f4dfe
DM
3248region_model::eval_condition (const svalue *lhs,
3249 enum tree_code op,
3250 const svalue *rhs) const
757bf1df 3251{
757bf1df
DM
3252 gcc_assert (lhs);
3253 gcc_assert (rhs);
3254
808f4dfe
DM
3255 /* For now, make no attempt to capture constraints on floating-point
3256 values. */
3257 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
3258 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
3259 return tristate::unknown ();
3260
9bbcee45
DM
3261 /* See what we know based on the values. */
3262
808f4dfe
DM
3263 /* Unwrap any unmergeable values. */
3264 lhs = lhs->unwrap_any_unmergeable ();
3265 rhs = rhs->unwrap_any_unmergeable ();
3266
3267 if (lhs == rhs)
757bf1df 3268 {
808f4dfe
DM
3269 /* If we have the same svalue, then we have equality
3270 (apart from NaN-handling).
3271 TODO: should this definitely be the case for poisoned values? */
3272 /* Poisoned and unknown values are "unknowable". */
3273 if (lhs->get_kind () == SK_POISONED
3274 || lhs->get_kind () == SK_UNKNOWN)
3275 return tristate::TS_UNKNOWN;
e978955d 3276
808f4dfe 3277 switch (op)
757bf1df 3278 {
808f4dfe
DM
3279 case EQ_EXPR:
3280 case GE_EXPR:
3281 case LE_EXPR:
3282 return tristate::TS_TRUE;
07c86323 3283
808f4dfe
DM
3284 case NE_EXPR:
3285 case GT_EXPR:
3286 case LT_EXPR:
3287 return tristate::TS_FALSE;
3288
3289 default:
3290 /* For other ops, use the logic below. */
3291 break;
757bf1df 3292 }
808f4dfe 3293 }
757bf1df 3294
808f4dfe
DM
3295 /* If we have a pair of region_svalues, compare them. */
3296 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
3297 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
3298 {
3299 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
3300 if (res.is_known ())
3301 return res;
3302 /* Otherwise, only known through constraints. */
3303 }
757bf1df 3304
808f4dfe 3305 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
18faaeb3
DM
3306 {
3307 /* If we have a pair of constants, compare them. */
3308 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
3309 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
3310 else
3311 {
3312 /* When we have one constant, put it on the RHS. */
3313 std::swap (lhs, rhs);
3314 op = swap_tree_comparison (op);
3315 }
3316 }
3317 gcc_assert (lhs->get_kind () != SK_CONSTANT);
757bf1df 3318
e82e0f14
DM
3319 /* Handle comparison against zero. */
3320 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
3321 if (zerop (cst_rhs->get_constant ()))
3322 {
3323 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
3324 {
3325 /* A region_svalue is a non-NULL pointer, except in certain
3326 special cases (see the comment for region::non_null_p). */
3327 const region *pointee = ptr->get_pointee ();
3328 if (pointee->non_null_p ())
3329 {
3330 switch (op)
3331 {
3332 default:
3333 gcc_unreachable ();
3334
3335 case EQ_EXPR:
3336 case GE_EXPR:
3337 case LE_EXPR:
3338 return tristate::TS_FALSE;
3339
3340 case NE_EXPR:
3341 case GT_EXPR:
3342 case LT_EXPR:
3343 return tristate::TS_TRUE;
3344 }
3345 }
3346 }
3347 else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
3348 {
3349 /* Treat offsets from a non-NULL pointer as being non-NULL. This
3350 isn't strictly true, in that eventually ptr++ will wrap
3351 around and be NULL, but it won't occur in practise and thus
3352 can be used to suppress effectively false positives that we
3353 shouldn't warn for. */
3354 if (binop->get_op () == POINTER_PLUS_EXPR)
3355 {
9bbcee45 3356 tristate lhs_ts = eval_condition (binop->get_arg0 (), op, rhs);
e82e0f14
DM
3357 if (lhs_ts.is_known ())
3358 return lhs_ts;
3359 }
3360 }
0b737090
DM
3361 else if (const unaryop_svalue *unaryop
3362 = lhs->dyn_cast_unaryop_svalue ())
3363 {
3364 if (unaryop->get_op () == NEGATE_EXPR)
3365 {
3366 /* e.g. "-X <= 0" is equivalent to X >= 0". */
3367 tristate lhs_ts = eval_condition (unaryop->get_arg (),
3368 swap_tree_comparison (op),
3369 rhs);
3370 if (lhs_ts.is_known ())
3371 return lhs_ts;
3372 }
3373 }
e82e0f14 3374 }
808f4dfe
DM
3375
3376 /* Handle rejection of equality for comparisons of the initial values of
3377 "external" values (such as params) with the address of locals. */
3378 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
3379 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
3380 {
3381 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
3382 if (res.is_known ())
3383 return res;
3384 }
3385 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
3386 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
3387 {
3388 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
3389 if (res.is_known ())
3390 return res;
3391 }
3392
3393 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
3394 if (tree rhs_cst = rhs->maybe_get_constant ())
3395 {
3396 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
3397 if (res.is_known ())
3398 return res;
3399 }
3400
7a6564c9 3401 /* Handle comparisons between two svalues with more than one operand. */
9bbcee45 3402 if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
7a6564c9
TL
3403 {
3404 switch (op)
3405 {
3406 default:
3407 break;
3408 case EQ_EXPR:
3409 {
3410 /* TODO: binops can be equal even if they are not structurally
3411 equal in case of commutative operators. */
3412 tristate res = structural_equality (lhs, rhs);
3413 if (res.is_true ())
3414 return res;
3415 }
3416 break;
3417 case LE_EXPR:
3418 {
3419 tristate res = structural_equality (lhs, rhs);
3420 if (res.is_true ())
3421 return res;
3422 }
3423 break;
3424 case GE_EXPR:
3425 {
3426 tristate res = structural_equality (lhs, rhs);
3427 if (res.is_true ())
3428 return res;
3429 res = symbolic_greater_than (binop, rhs);
3430 if (res.is_true ())
3431 return res;
3432 }
3433 break;
3434 case GT_EXPR:
3435 {
3436 tristate res = symbolic_greater_than (binop, rhs);
3437 if (res.is_true ())
3438 return res;
3439 }
3440 break;
3441 }
3442 }
3443
9bbcee45
DM
3444 /* Otherwise, try constraints.
3445 Cast to const to ensure we don't change the constraint_manager as we
3446 do this (e.g. by creating equivalence classes). */
3447 const constraint_manager *constraints = m_constraints;
3448 return constraints->eval_condition (lhs, op, rhs);
808f4dfe
DM
3449}
3450
9bbcee45 3451/* Subroutine of region_model::eval_condition, for rejecting
808f4dfe
DM
3452 equality of INIT_VAL(PARM) with &LOCAL. */
3453
3454tristate
3455region_model::compare_initial_and_pointer (const initial_svalue *init,
3456 const region_svalue *ptr) const
3457{
3458 const region *pointee = ptr->get_pointee ();
3459
3460 /* If we have a pointer to something within a stack frame, it can't be the
3461 initial value of a param. */
3462 if (pointee->maybe_get_frame_region ())
e0139b2a
DM
3463 if (init->initial_value_of_param_p ())
3464 return tristate::TS_FALSE;
757bf1df
DM
3465
3466 return tristate::TS_UNKNOWN;
3467}
3468
7a6564c9
TL
3469/* Return true if SVAL is definitely positive. */
3470
3471static bool
3472is_positive_svalue (const svalue *sval)
3473{
3474 if (tree cst = sval->maybe_get_constant ())
3475 return !zerop (cst) && get_range_pos_neg (cst) == 1;
3476 tree type = sval->get_type ();
3477 if (!type)
3478 return false;
3479 /* Consider a binary operation size_t + int. The analyzer wraps the int in
3480 an unaryop_svalue, converting it to a size_t, but in the dynamic execution
3481 the result is smaller than the first operand. Thus, we have to look if
3482 the argument of the unaryop_svalue is also positive. */
3483 if (const unaryop_svalue *un_op = dyn_cast <const unaryop_svalue *> (sval))
3484 return CONVERT_EXPR_CODE_P (un_op->get_op ()) && TYPE_UNSIGNED (type)
3485 && is_positive_svalue (un_op->get_arg ());
3486 return TYPE_UNSIGNED (type);
3487}
3488
3489/* Return true if A is definitely larger than B.
3490
3491 Limitation: does not account for integer overflows and does not try to
3492 return false, so it can not be used negated. */
3493
3494tristate
3495region_model::symbolic_greater_than (const binop_svalue *bin_a,
3496 const svalue *b) const
3497{
3498 if (bin_a->get_op () == PLUS_EXPR || bin_a->get_op () == MULT_EXPR)
3499 {
3500 /* Eliminate the right-hand side of both svalues. */
3501 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
3502 if (bin_a->get_op () == bin_b->get_op ()
9bbcee45
DM
3503 && eval_condition (bin_a->get_arg1 (),
3504 GT_EXPR,
3505 bin_b->get_arg1 ()).is_true ()
3506 && eval_condition (bin_a->get_arg0 (),
3507 GE_EXPR,
3508 bin_b->get_arg0 ()).is_true ())
7a6564c9
TL
3509 return tristate (tristate::TS_TRUE);
3510
3511 /* Otherwise, try to remove a positive offset or factor from BIN_A. */
3512 if (is_positive_svalue (bin_a->get_arg1 ())
9bbcee45
DM
3513 && eval_condition (bin_a->get_arg0 (),
3514 GE_EXPR, b).is_true ())
7a6564c9
TL
3515 return tristate (tristate::TS_TRUE);
3516 }
3517 return tristate::unknown ();
3518}
3519
3520/* Return true if A and B are equal structurally.
3521
3522 Structural equality means that A and B are equal if the svalues A and B have
3523 the same nodes at the same positions in the tree and the leafs are equal.
3524 Equality for conjured_svalues and initial_svalues is determined by comparing
3525 the pointers while constants are compared by value. That behavior is useful
3526 to check for binaryop_svlaues that evaluate to the same concrete value but
3527 might use one operand with a different type but the same constant value.
3528
3529 For example,
3530 binop_svalue (mult_expr,
3531 initial_svalue (‘size_t’, decl_region (..., 'some_var')),
3532 constant_svalue (‘size_t’, 4))
3533 and
3534 binop_svalue (mult_expr,
3535 initial_svalue (‘size_t’, decl_region (..., 'some_var'),
3536 constant_svalue (‘sizetype’, 4))
3537 are structurally equal. A concrete C code example, where this occurs, can
3538 be found in test7 of out-of-bounds-5.c. */
3539
3540tristate
3541region_model::structural_equality (const svalue *a, const svalue *b) const
3542{
3543 /* If A and B are referentially equal, they are also structurally equal. */
3544 if (a == b)
3545 return tristate (tristate::TS_TRUE);
3546
3547 switch (a->get_kind ())
3548 {
3549 default:
3550 return tristate::unknown ();
3551 /* SK_CONJURED and SK_INITIAL are already handled
3552 by the referential equality above. */
3553 case SK_CONSTANT:
3554 {
3555 tree a_cst = a->maybe_get_constant ();
3556 tree b_cst = b->maybe_get_constant ();
3557 if (a_cst && b_cst)
3558 return tristate (tree_int_cst_equal (a_cst, b_cst));
3559 }
3560 return tristate (tristate::TS_FALSE);
3561 case SK_UNARYOP:
3562 {
3563 const unaryop_svalue *un_a = as_a <const unaryop_svalue *> (a);
3564 if (const unaryop_svalue *un_b = dyn_cast <const unaryop_svalue *> (b))
3565 return tristate (pending_diagnostic::same_tree_p (un_a->get_type (),
3566 un_b->get_type ())
3567 && un_a->get_op () == un_b->get_op ()
3568 && structural_equality (un_a->get_arg (),
3569 un_b->get_arg ()));
3570 }
3571 return tristate (tristate::TS_FALSE);
3572 case SK_BINOP:
3573 {
3574 const binop_svalue *bin_a = as_a <const binop_svalue *> (a);
3575 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
3576 return tristate (bin_a->get_op () == bin_b->get_op ()
3577 && structural_equality (bin_a->get_arg0 (),
3578 bin_b->get_arg0 ())
3579 && structural_equality (bin_a->get_arg1 (),
3580 bin_b->get_arg1 ()));
3581 }
3582 return tristate (tristate::TS_FALSE);
3583 }
3584}
3585
48e8a7a6
DM
3586/* Handle various constraints of the form:
3587 LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
3588 OP : == or !=
3589 RHS: zero
3590 and (with a cast):
3591 LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
3592 OP : == or !=
3593 RHS: zero
3594 by adding constraints for INNER_LHS INNEROP INNER_RHS.
3595
3596 Return true if this function can fully handle the constraint; if
3597 so, add the implied constraint(s) and write true to *OUT if they
3598 are consistent with existing constraints, or write false to *OUT
3599 if they contradicts existing constraints.
3600
3601 Return false for cases that this function doeesn't know how to handle.
3602
3603 For example, if we're checking a stored conditional, we'll have
3604 something like:
3605 LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
3606 OP : NE_EXPR
3607 RHS: zero
3608 which this function can turn into an add_constraint of:
3609 (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
3610
3611 Similarly, optimized && and || conditionals lead to e.g.
3612 if (p && q)
3613 becoming gimple like this:
3614 _1 = p_6 == 0B;
3615 _2 = q_8 == 0B
3616 _3 = _1 | _2
3617 On the "_3 is false" branch we can have constraints of the form:
3618 ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
3619 | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
3620 == 0
3621 which implies that both _1 and _2 are false,
3622 which this function can turn into a pair of add_constraints of
3623 (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
3624 and:
3625 (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
3626
3627bool
3628region_model::add_constraints_from_binop (const svalue *outer_lhs,
3629 enum tree_code outer_op,
3630 const svalue *outer_rhs,
3631 bool *out,
3632 region_model_context *ctxt)
3633{
3634 while (const svalue *cast = outer_lhs->maybe_undo_cast ())
3635 outer_lhs = cast;
3636 const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
3637 if (!binop_sval)
3638 return false;
3639 if (!outer_rhs->all_zeroes_p ())
3640 return false;
3641
3642 const svalue *inner_lhs = binop_sval->get_arg0 ();
3643 enum tree_code inner_op = binop_sval->get_op ();
3644 const svalue *inner_rhs = binop_sval->get_arg1 ();
3645
3646 if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
3647 return false;
3648
3649 /* We have either
3650 - "OUTER_LHS != false" (i.e. OUTER is true), or
3651 - "OUTER_LHS == false" (i.e. OUTER is false). */
3652 bool is_true = outer_op == NE_EXPR;
3653
3654 switch (inner_op)
3655 {
3656 default:
3657 return false;
3658
3659 case EQ_EXPR:
3660 case NE_EXPR:
3661 {
3662 /* ...and "(inner_lhs OP inner_rhs) == 0"
3663 then (inner_lhs OP inner_rhs) must have the same
3664 logical value as LHS. */
3665 if (!is_true)
3666 inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
3667 *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
3668 return true;
3669 }
3670 break;
3671
3672 case BIT_AND_EXPR:
3673 if (is_true)
3674 {
3675 /* ...and "(inner_lhs & inner_rhs) != 0"
3676 then both inner_lhs and inner_rhs must be true. */
3677 const svalue *false_sval
3678 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
3679 bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
3680 bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
3681 *out = sat1 && sat2;
3682 return true;
3683 }
3684 return false;
3685
3686 case BIT_IOR_EXPR:
3687 if (!is_true)
3688 {
3689 /* ...and "(inner_lhs | inner_rhs) == 0"
3690 i.e. "(inner_lhs | inner_rhs)" is false
3691 then both inner_lhs and inner_rhs must be false. */
3692 const svalue *false_sval
3693 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
3694 bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
3695 bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
3696 *out = sat1 && sat2;
3697 return true;
3698 }
3699 return false;
3700 }
3701}
3702
757bf1df
DM
3703/* Attempt to add the constraint "LHS OP RHS" to this region_model.
3704 If it is consistent with existing constraints, add it, and return true.
3705 Return false if it contradicts existing constraints.
3706 Use CTXT for reporting any diagnostics associated with the accesses. */
3707
3708bool
3709region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
3710 region_model_context *ctxt)
3711{
e978955d
DM
3712 /* For now, make no attempt to capture constraints on floating-point
3713 values. */
3714 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
3715 return true;
3716
808f4dfe
DM
3717 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
3718 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
757bf1df 3719
48e8a7a6
DM
3720 return add_constraint (lhs_sval, op, rhs_sval, ctxt);
3721}
3722
3723/* Attempt to add the constraint "LHS OP RHS" to this region_model.
3724 If it is consistent with existing constraints, add it, and return true.
3725 Return false if it contradicts existing constraints.
3726 Use CTXT for reporting any diagnostics associated with the accesses. */
3727
3728bool
3729region_model::add_constraint (const svalue *lhs,
3730 enum tree_code op,
3731 const svalue *rhs,
3732 region_model_context *ctxt)
3733{
3734 tristate t_cond = eval_condition (lhs, op, rhs);
757bf1df
DM
3735
3736 /* If we already have the condition, do nothing. */
3737 if (t_cond.is_true ())
3738 return true;
3739
3740 /* Reject a constraint that would contradict existing knowledge, as
3741 unsatisfiable. */
3742 if (t_cond.is_false ())
3743 return false;
3744
48e8a7a6
DM
3745 bool out;
3746 if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
3747 return out;
757bf1df 3748
c4b8f373
DM
3749 /* Attempt to store the constraint. */
3750 if (!m_constraints->add_constraint (lhs, op, rhs))
3751 return false;
757bf1df
DM
3752
3753 /* Notify the context, if any. This exists so that the state machines
3754 in a program_state can be notified about the condition, and so can
3755 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
3756 when synthesizing constraints as above. */
3757 if (ctxt)
3758 ctxt->on_condition (lhs, op, rhs);
3759
9a2c9579
DM
3760 /* If we have &REGION == NULL, then drop dynamic extents for REGION (for
3761 the case where REGION is heap-allocated and thus could be NULL). */
48e8a7a6
DM
3762 if (tree rhs_cst = rhs->maybe_get_constant ())
3763 if (op == EQ_EXPR && zerop (rhs_cst))
3764 if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
3765 unset_dynamic_extents (region_sval->get_pointee ());
9a2c9579 3766
757bf1df
DM
3767 return true;
3768}
3769
84fb3546
DM
3770/* As above, but when returning false, if OUT is non-NULL, write a
3771 new rejected_constraint to *OUT. */
3772
3773bool
3774region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
3775 region_model_context *ctxt,
3776 rejected_constraint **out)
3777{
3778 bool sat = add_constraint (lhs, op, rhs, ctxt);
3779 if (!sat && out)
8ca7fa84 3780 *out = new rejected_op_constraint (*this, lhs, op, rhs);
84fb3546
DM
3781 return sat;
3782}
3783
757bf1df
DM
3784/* Determine what is known about the condition "LHS OP RHS" within
3785 this model.
3786 Use CTXT for reporting any diagnostics associated with the accesses. */
3787
3788tristate
3789region_model::eval_condition (tree lhs,
3790 enum tree_code op,
3791 tree rhs,
5c6546ca 3792 region_model_context *ctxt) const
757bf1df 3793{
e978955d
DM
3794 /* For now, make no attempt to model constraints on floating-point
3795 values. */
3796 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
3797 return tristate::unknown ();
3798
757bf1df
DM
3799 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
3800}
3801
467a4820
DM
3802/* Implementation of region_model::get_representative_path_var.
3803 Attempt to return a path_var that represents SVAL, or return NULL_TREE.
808f4dfe
DM
3804 Use VISITED to prevent infinite mutual recursion with the overload for
3805 regions. */
757bf1df 3806
808f4dfe 3807path_var
467a4820
DM
3808region_model::get_representative_path_var_1 (const svalue *sval,
3809 svalue_set *visited) const
757bf1df 3810{
467a4820 3811 gcc_assert (sval);
757bf1df 3812
808f4dfe
DM
3813 /* Prevent infinite recursion. */
3814 if (visited->contains (sval))
3815 return path_var (NULL_TREE, 0);
3816 visited->add (sval);
757bf1df 3817
467a4820
DM
3818 /* Handle casts by recursion into get_representative_path_var. */
3819 if (const svalue *cast_sval = sval->maybe_undo_cast ())
3820 {
3821 path_var result = get_representative_path_var (cast_sval, visited);
3822 tree orig_type = sval->get_type ();
3823 /* If necessary, wrap the result in a cast. */
3824 if (result.m_tree && orig_type)
3825 result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
3826 return result;
3827 }
3828
808f4dfe
DM
3829 auto_vec<path_var> pvs;
3830 m_store.get_representative_path_vars (this, visited, sval, &pvs);
757bf1df 3831
808f4dfe
DM
3832 if (tree cst = sval->maybe_get_constant ())
3833 pvs.safe_push (path_var (cst, 0));
757bf1df 3834
90f7c300 3835 /* Handle string literals and various other pointers. */
808f4dfe
DM
3836 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
3837 {
3838 const region *reg = ptr_sval->get_pointee ();
3839 if (path_var pv = get_representative_path_var (reg, visited))
3840 return path_var (build1 (ADDR_EXPR,
467a4820 3841 sval->get_type (),
808f4dfe
DM
3842 pv.m_tree),
3843 pv.m_stack_depth);
3844 }
3845
3846 /* If we have a sub_svalue, look for ways to represent the parent. */
3847 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
90f7c300 3848 {
808f4dfe
DM
3849 const svalue *parent_sval = sub_sval->get_parent ();
3850 const region *subreg = sub_sval->get_subregion ();
3851 if (path_var parent_pv
3852 = get_representative_path_var (parent_sval, visited))
3853 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
3854 return path_var (build3 (COMPONENT_REF,
3855 sval->get_type (),
3856 parent_pv.m_tree,
3857 field_reg->get_field (),
3858 NULL_TREE),
3859 parent_pv.m_stack_depth);
90f7c300
DM
3860 }
3861
b9365b93
DM
3862 /* Handle binops. */
3863 if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
3864 if (path_var lhs_pv
3865 = get_representative_path_var (binop_sval->get_arg0 (), visited))
3866 if (path_var rhs_pv
3867 = get_representative_path_var (binop_sval->get_arg1 (), visited))
3868 return path_var (build2 (binop_sval->get_op (),
3869 sval->get_type (),
3870 lhs_pv.m_tree, rhs_pv.m_tree),
3871 lhs_pv.m_stack_depth);
3872
808f4dfe
DM
3873 if (pvs.length () < 1)
3874 return path_var (NULL_TREE, 0);
3875
3876 pvs.qsort (readability_comparator);
3877 return pvs[0];
757bf1df
DM
3878}
3879
467a4820
DM
3880/* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
3881 Use VISITED to prevent infinite mutual recursion with the overload for
3882 regions
3883
3884 This function defers to get_representative_path_var_1 to do the work;
3885 it adds verification that get_representative_path_var_1 returned a tree
3886 of the correct type. */
3887
3888path_var
3889region_model::get_representative_path_var (const svalue *sval,
3890 svalue_set *visited) const
3891{
3892 if (sval == NULL)
3893 return path_var (NULL_TREE, 0);
3894
3895 tree orig_type = sval->get_type ();
3896
3897 path_var result = get_representative_path_var_1 (sval, visited);
3898
3899 /* Verify that the result has the same type as SVAL, if any. */
3900 if (result.m_tree && orig_type)
3901 gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
3902
3903 return result;
3904}
3905
3906/* Attempt to return a tree that represents SVAL, or return NULL_TREE.
3907
3908 Strip off any top-level cast, to avoid messages like
3909 double-free of '(void *)ptr'
3910 from analyzer diagnostics. */
757bf1df 3911
808f4dfe
DM
3912tree
3913region_model::get_representative_tree (const svalue *sval) const
757bf1df 3914{
808f4dfe 3915 svalue_set visited;
467a4820
DM
3916 tree expr = get_representative_path_var (sval, &visited).m_tree;
3917
3918 /* Strip off any top-level cast. */
7e3b45be
TL
3919 if (expr && TREE_CODE (expr) == NOP_EXPR)
3920 expr = TREE_OPERAND (expr, 0);
3921
3922 return fixup_tree_for_diagnostic (expr);
3923}
3924
3925tree
3926region_model::get_representative_tree (const region *reg) const
3927{
3928 svalue_set visited;
3929 tree expr = get_representative_path_var (reg, &visited).m_tree;
3930
3931 /* Strip off any top-level cast. */
467a4820 3932 if (expr && TREE_CODE (expr) == NOP_EXPR)
e4bb1bd6 3933 expr = TREE_OPERAND (expr, 0);
467a4820 3934
e4bb1bd6 3935 return fixup_tree_for_diagnostic (expr);
808f4dfe
DM
3936}
3937
467a4820
DM
3938/* Implementation of region_model::get_representative_path_var.
3939
3940 Attempt to return a path_var that represents REG, or return
808f4dfe
DM
3941 the NULL path_var.
3942 For example, a region for a field of a local would be a path_var
3943 wrapping a COMPONENT_REF.
3944 Use VISITED to prevent infinite mutual recursion with the overload for
3945 svalues. */
757bf1df 3946
808f4dfe 3947path_var
467a4820
DM
3948region_model::get_representative_path_var_1 (const region *reg,
3949 svalue_set *visited) const
808f4dfe
DM
3950{
3951 switch (reg->get_kind ())
757bf1df 3952 {
808f4dfe
DM
3953 default:
3954 gcc_unreachable ();
e516294a 3955
808f4dfe
DM
3956 case RK_FRAME:
3957 case RK_GLOBALS:
3958 case RK_CODE:
3959 case RK_HEAP:
3960 case RK_STACK:
358dab90 3961 case RK_THREAD_LOCAL:
808f4dfe
DM
3962 case RK_ROOT:
3963 /* Regions that represent memory spaces are not expressible as trees. */
3964 return path_var (NULL_TREE, 0);
757bf1df 3965
808f4dfe 3966 case RK_FUNCTION:
884d9141 3967 {
808f4dfe
DM
3968 const function_region *function_reg
3969 = as_a <const function_region *> (reg);
3970 return path_var (function_reg->get_fndecl (), 0);
884d9141 3971 }
808f4dfe 3972 case RK_LABEL:
9e78634c
DM
3973 {
3974 const label_region *label_reg = as_a <const label_region *> (reg);
3975 return path_var (label_reg->get_label (), 0);
3976 }
90f7c300 3977
808f4dfe
DM
3978 case RK_SYMBOLIC:
3979 {
3980 const symbolic_region *symbolic_reg
3981 = as_a <const symbolic_region *> (reg);
3982 const svalue *pointer = symbolic_reg->get_pointer ();
3983 path_var pointer_pv = get_representative_path_var (pointer, visited);
3984 if (!pointer_pv)
3985 return path_var (NULL_TREE, 0);
3986 tree offset = build_int_cst (pointer->get_type (), 0);
3987 return path_var (build2 (MEM_REF,
3988 reg->get_type (),
3989 pointer_pv.m_tree,
3990 offset),
3991 pointer_pv.m_stack_depth);
3992 }
3993 case RK_DECL:
3994 {
3995 const decl_region *decl_reg = as_a <const decl_region *> (reg);
3996 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
3997 }
3998 case RK_FIELD:
3999 {
4000 const field_region *field_reg = as_a <const field_region *> (reg);
4001 path_var parent_pv
4002 = get_representative_path_var (reg->get_parent_region (), visited);
4003 if (!parent_pv)
4004 return path_var (NULL_TREE, 0);
4005 return path_var (build3 (COMPONENT_REF,
4006 reg->get_type (),
4007 parent_pv.m_tree,
4008 field_reg->get_field (),
4009 NULL_TREE),
4010 parent_pv.m_stack_depth);
4011 }
757bf1df 4012
808f4dfe
DM
4013 case RK_ELEMENT:
4014 {
4015 const element_region *element_reg
4016 = as_a <const element_region *> (reg);
4017 path_var parent_pv
4018 = get_representative_path_var (reg->get_parent_region (), visited);
4019 if (!parent_pv)
4020 return path_var (NULL_TREE, 0);
4021 path_var index_pv
4022 = get_representative_path_var (element_reg->get_index (), visited);
4023 if (!index_pv)
4024 return path_var (NULL_TREE, 0);
4025 return path_var (build4 (ARRAY_REF,
4026 reg->get_type (),
4027 parent_pv.m_tree, index_pv.m_tree,
4028 NULL_TREE, NULL_TREE),
4029 parent_pv.m_stack_depth);
4030 }
757bf1df 4031
808f4dfe 4032 case RK_OFFSET:
757bf1df 4033 {
808f4dfe
DM
4034 const offset_region *offset_reg
4035 = as_a <const offset_region *> (reg);
4036 path_var parent_pv
4037 = get_representative_path_var (reg->get_parent_region (), visited);
4038 if (!parent_pv)
4039 return path_var (NULL_TREE, 0);
4040 path_var offset_pv
4041 = get_representative_path_var (offset_reg->get_byte_offset (),
4042 visited);
29f5db8e 4043 if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
808f4dfe 4044 return path_var (NULL_TREE, 0);
29f5db8e
DM
4045 tree addr_parent = build1 (ADDR_EXPR,
4046 build_pointer_type (reg->get_type ()),
4047 parent_pv.m_tree);
808f4dfe
DM
4048 return path_var (build2 (MEM_REF,
4049 reg->get_type (),
29f5db8e 4050 addr_parent, offset_pv.m_tree),
808f4dfe 4051 parent_pv.m_stack_depth);
757bf1df 4052 }
757bf1df 4053
e61ffa20
DM
4054 case RK_SIZED:
4055 return path_var (NULL_TREE, 0);
4056
808f4dfe
DM
4057 case RK_CAST:
4058 {
4059 path_var parent_pv
4060 = get_representative_path_var (reg->get_parent_region (), visited);
4061 if (!parent_pv)
4062 return path_var (NULL_TREE, 0);
4063 return path_var (build1 (NOP_EXPR,
4064 reg->get_type (),
4065 parent_pv.m_tree),
4066 parent_pv.m_stack_depth);
4067 }
757bf1df 4068
808f4dfe
DM
4069 case RK_HEAP_ALLOCATED:
4070 case RK_ALLOCA:
4071 /* No good way to express heap-allocated/alloca regions as trees. */
4072 return path_var (NULL_TREE, 0);
757bf1df 4073
808f4dfe
DM
4074 case RK_STRING:
4075 {
4076 const string_region *string_reg = as_a <const string_region *> (reg);
4077 return path_var (string_reg->get_string_cst (), 0);
4078 }
757bf1df 4079
2402dc6b 4080 case RK_VAR_ARG:
358dab90 4081 case RK_ERRNO:
808f4dfe
DM
4082 case RK_UNKNOWN:
4083 return path_var (NULL_TREE, 0);
4084 }
757bf1df
DM
4085}
4086
467a4820
DM
4087/* Attempt to return a path_var that represents REG, or return
4088 the NULL path_var.
4089 For example, a region for a field of a local would be a path_var
4090 wrapping a COMPONENT_REF.
4091 Use VISITED to prevent infinite mutual recursion with the overload for
4092 svalues.
4093
4094 This function defers to get_representative_path_var_1 to do the work;
4095 it adds verification that get_representative_path_var_1 returned a tree
4096 of the correct type. */
4097
4098path_var
4099region_model::get_representative_path_var (const region *reg,
4100 svalue_set *visited) const
4101{
4102 path_var result = get_representative_path_var_1 (reg, visited);
4103
4104 /* Verify that the result has the same type as REG, if any. */
4105 if (result.m_tree && reg->get_type ())
4106 gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
4107
4108 return result;
4109}
4110
757bf1df
DM
4111/* Update this model for any phis in SNODE, assuming we came from
4112 LAST_CFG_SUPEREDGE. */
4113
4114void
4115region_model::update_for_phis (const supernode *snode,
4116 const cfg_superedge *last_cfg_superedge,
4117 region_model_context *ctxt)
4118{
4119 gcc_assert (last_cfg_superedge);
4120
e0a7a675
DM
4121 /* Copy this state and pass it to handle_phi so that all of the phi stmts
4122 are effectively handled simultaneously. */
4123 const region_model old_state (*this);
4124
757bf1df
DM
4125 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
4126 !gsi_end_p (gpi); gsi_next (&gpi))
4127 {
4128 gphi *phi = gpi.phi ();
4129
4130 tree src = last_cfg_superedge->get_phi_arg (phi);
4131 tree lhs = gimple_phi_result (phi);
4132
e0a7a675
DM
4133 /* Update next_state based on phi and old_state. */
4134 handle_phi (phi, lhs, src, old_state, ctxt);
757bf1df
DM
4135 }
4136}
4137
4138/* Attempt to update this model for taking EDGE (where the last statement
4139 was LAST_STMT), returning true if the edge can be taken, false
4140 otherwise.
84fb3546
DM
4141 When returning false, if OUT is non-NULL, write a new rejected_constraint
4142 to it.
757bf1df
DM
4143
4144 For CFG superedges where LAST_STMT is a conditional or a switch
4145 statement, attempt to add the relevant conditions for EDGE to this
4146 model, returning true if they are feasible, or false if they are
4147 impossible.
4148
4149 For call superedges, push frame information and store arguments
4150 into parameters.
4151
4152 For return superedges, pop frame information and store return
4153 values into any lhs.
4154
4155 Rejection of call/return superedges happens elsewhere, in
4156 program_point::on_edge (i.e. based on program point, rather
4157 than program state). */
4158
4159bool
4160region_model::maybe_update_for_edge (const superedge &edge,
4161 const gimple *last_stmt,
84fb3546
DM
4162 region_model_context *ctxt,
4163 rejected_constraint **out)
757bf1df
DM
4164{
4165 /* Handle frame updates for interprocedural edges. */
4166 switch (edge.m_kind)
4167 {
4168 default:
4169 break;
4170
4171 case SUPEREDGE_CALL:
4172 {
4173 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
4174 update_for_call_superedge (*call_edge, ctxt);
4175 }
4176 break;
4177
4178 case SUPEREDGE_RETURN:
4179 {
4180 const return_superedge *return_edge
4181 = as_a <const return_superedge *> (&edge);
4182 update_for_return_superedge (*return_edge, ctxt);
4183 }
4184 break;
4185
4186 case SUPEREDGE_INTRAPROCEDURAL_CALL:
bfca9505
DM
4187 /* This is a no-op for call summaries; we should already
4188 have handled the effect of the call summary at the call stmt. */
757bf1df
DM
4189 break;
4190 }
4191
4192 if (last_stmt == NULL)
4193 return true;
4194
4195 /* Apply any constraints for conditionals/switch statements. */
4196
4197 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
4198 {
4199 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
84fb3546 4200 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out);
757bf1df
DM
4201 }
4202
4203 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
4204 {
4205 const switch_cfg_superedge *switch_sedge
4206 = as_a <const switch_cfg_superedge *> (&edge);
84fb3546
DM
4207 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt,
4208 ctxt, out);
757bf1df
DM
4209 }
4210
1690a839
DM
4211 /* Apply any constraints due to an exception being thrown. */
4212 if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge))
4213 if (cfg_sedge->get_flags () & EDGE_EH)
84fb3546 4214 return apply_constraints_for_exception (last_stmt, ctxt, out);
1690a839 4215
757bf1df
DM
4216 return true;
4217}
4218
4219/* Push a new frame_region on to the stack region.
4220 Populate the frame_region with child regions for the function call's
4221 parameters, using values from the arguments at the callsite in the
4222 caller's frame. */
4223
4224void
aef703cf 4225region_model::update_for_gcall (const gcall *call_stmt,
e92d0ff6
AS
4226 region_model_context *ctxt,
4227 function *callee)
757bf1df 4228{
808f4dfe 4229 /* Build a vec of argument svalues, using the current top
757bf1df 4230 frame for resolving tree expressions. */
808f4dfe 4231 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
757bf1df
DM
4232
4233 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
4234 {
4235 tree arg = gimple_call_arg (call_stmt, i);
808f4dfe 4236 arg_svals.quick_push (get_rvalue (arg, ctxt));
757bf1df
DM
4237 }
4238
e92d0ff6
AS
4239 if(!callee)
4240 {
4241 /* Get the function * from the gcall. */
4242 tree fn_decl = get_fndecl_for_call (call_stmt,ctxt);
4243 callee = DECL_STRUCT_FUNCTION (fn_decl);
4244 }
4245
4246 push_frame (callee, &arg_svals, ctxt);
757bf1df
DM
4247}
4248
a96f1c38
DM
4249/* Pop the top-most frame_region from the stack, and copy the return
4250 region's values (if any) into the region for the lvalue of the LHS of
757bf1df 4251 the call (if any). */
aef703cf 4252
757bf1df 4253void
aef703cf
AS
4254region_model::update_for_return_gcall (const gcall *call_stmt,
4255 region_model_context *ctxt)
757bf1df 4256{
4cebae09
DM
4257 /* Get the lvalue for the result of the call, passing it to pop_frame,
4258 so that pop_frame can determine the region with respect to the
4259 *caller* frame. */
757bf1df 4260 tree lhs = gimple_call_lhs (call_stmt);
4cebae09 4261 pop_frame (lhs, NULL, ctxt);
757bf1df
DM
4262}
4263
aef703cf
AS
4264/* Extract calling information from the superedge and update the model for the
4265 call */
4266
4267void
4268region_model::update_for_call_superedge (const call_superedge &call_edge,
4269 region_model_context *ctxt)
4270{
4271 const gcall *call_stmt = call_edge.get_call_stmt ();
e92d0ff6 4272 update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ());
aef703cf
AS
4273}
4274
4275/* Extract calling information from the return superedge and update the model
4276 for the returning call */
4277
4278void
4279region_model::update_for_return_superedge (const return_superedge &return_edge,
4280 region_model_context *ctxt)
4281{
4282 const gcall *call_stmt = return_edge.get_call_stmt ();
4283 update_for_return_gcall (call_stmt, ctxt);
4284}
4285
bfca9505
DM
4286/* Attempt to to use R to replay SUMMARY into this object.
4287 Return true if it is possible. */
757bf1df 4288
bfca9505
DM
4289bool
4290region_model::replay_call_summary (call_summary_replay &r,
4291 const region_model &summary)
757bf1df 4292{
bfca9505
DM
4293 gcc_assert (summary.get_stack_depth () == 1);
4294
4295 m_store.replay_call_summary (r, summary.m_store);
757bf1df 4296
bfca9505
DM
4297 if (!m_constraints->replay_call_summary (r, *summary.m_constraints))
4298 return false;
4299
4300 for (auto kv : summary.m_dynamic_extents)
4301 {
4302 const region *summary_reg = kv.first;
4303 const region *caller_reg = r.convert_region_from_summary (summary_reg);
4304 if (!caller_reg)
4305 continue;
4306 const svalue *summary_sval = kv.second;
4307 const svalue *caller_sval = r.convert_svalue_from_summary (summary_sval);
4308 if (!caller_sval)
4309 continue;
4310 m_dynamic_extents.put (caller_reg, caller_sval);
4311 }
4312
4313 return true;
757bf1df
DM
4314}
4315
4316/* Given a true or false edge guarded by conditional statement COND_STMT,
4317 determine appropriate constraints for the edge to be taken.
4318
4319 If they are feasible, add the constraints and return true.
4320
4321 Return false if the constraints contradict existing knowledge
84fb3546
DM
4322 (and so the edge should not be taken).
4323 When returning false, if OUT is non-NULL, write a new rejected_constraint
4324 to it. */
757bf1df
DM
4325
4326bool
4327region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
4328 const gcond *cond_stmt,
84fb3546
DM
4329 region_model_context *ctxt,
4330 rejected_constraint **out)
757bf1df
DM
4331{
4332 ::edge cfg_edge = sedge.get_cfg_edge ();
4333 gcc_assert (cfg_edge != NULL);
4334 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
4335
4336 enum tree_code op = gimple_cond_code (cond_stmt);
4337 tree lhs = gimple_cond_lhs (cond_stmt);
4338 tree rhs = gimple_cond_rhs (cond_stmt);
4339 if (cfg_edge->flags & EDGE_FALSE_VALUE)
4340 op = invert_tree_comparison (op, false /* honor_nans */);
84fb3546 4341 return add_constraint (lhs, op, rhs, ctxt, out);
757bf1df
DM
4342}
4343
ccd4df81
DM
4344/* Return true iff SWITCH_STMT has a non-default label that contains
4345 INT_CST. */
4346
4347static bool
4348has_nondefault_case_for_value_p (const gswitch *switch_stmt, tree int_cst)
4349{
4350 /* We expect the initial label to be the default; skip it. */
4351 gcc_assert (CASE_LOW (gimple_switch_label (switch_stmt, 0)) == NULL);
4352 unsigned min_idx = 1;
4353 unsigned max_idx = gimple_switch_num_labels (switch_stmt) - 1;
4354
4355 /* Binary search: try to find the label containing INT_CST.
4356 This requires the cases to be sorted by CASE_LOW (done by the
4357 gimplifier). */
4358 while (max_idx >= min_idx)
4359 {
4360 unsigned case_idx = (min_idx + max_idx) / 2;
4361 tree label = gimple_switch_label (switch_stmt, case_idx);
4362 tree low = CASE_LOW (label);
4363 gcc_assert (low);
4364 tree high = CASE_HIGH (label);
4365 if (!high)
4366 high = low;
4367 if (tree_int_cst_compare (int_cst, low) < 0)
4368 {
4369 /* INT_CST is below the range of this label. */
4370 gcc_assert (case_idx > 0);
4371 max_idx = case_idx - 1;
4372 }
4373 else if (tree_int_cst_compare (int_cst, high) > 0)
4374 {
4375 /* INT_CST is above the range of this case. */
4376 min_idx = case_idx + 1;
4377 }
4378 else
4379 /* This case contains INT_CST. */
4380 return true;
4381 }
4382 /* Not found. */
4383 return false;
4384}
4385
4386/* Return true iff SWITCH_STMT (which must be on an enum value)
4387 has nondefault cases handling all values in the enum. */
4388
4389static bool
4390has_nondefault_cases_for_all_enum_values_p (const gswitch *switch_stmt)
4391{
4392 gcc_assert (switch_stmt);
4393 tree type = TREE_TYPE (gimple_switch_index (switch_stmt));
4394 gcc_assert (TREE_CODE (type) == ENUMERAL_TYPE);
4395
4396 for (tree enum_val_iter = TYPE_VALUES (type);
4397 enum_val_iter;
4398 enum_val_iter = TREE_CHAIN (enum_val_iter))
4399 {
4400 tree enum_val = TREE_VALUE (enum_val_iter);
4401 gcc_assert (TREE_CODE (enum_val) == CONST_DECL);
4402 gcc_assert (TREE_CODE (DECL_INITIAL (enum_val)) == INTEGER_CST);
4403 if (!has_nondefault_case_for_value_p (switch_stmt,
4404 DECL_INITIAL (enum_val)))
4405 return false;
4406 }
4407 return true;
4408}
4409
757bf1df
DM
4410/* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
4411 for the edge to be taken.
4412
4413 If they are feasible, add the constraints and return true.
4414
4415 Return false if the constraints contradict existing knowledge
84fb3546
DM
4416 (and so the edge should not be taken).
4417 When returning false, if OUT is non-NULL, write a new rejected_constraint
4418 to it. */
757bf1df
DM
4419
4420bool
4421region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
4422 const gswitch *switch_stmt,
84fb3546
DM
4423 region_model_context *ctxt,
4424 rejected_constraint **out)
757bf1df 4425{
ccd4df81
DM
4426 tree index = gimple_switch_index (switch_stmt);
4427 const svalue *index_sval = get_rvalue (index, ctxt);
4428
4429 /* If we're switching based on an enum type, assume that the user is only
4430 working with values from the enum. Hence if this is an
4431 implicitly-created "default", assume it doesn't get followed.
4432 This fixes numerous "uninitialized" false positives where we otherwise
4433 consider jumping past the initialization cases. */
4434
4435 if (/* Don't check during feasibility-checking (when ctxt is NULL). */
4436 ctxt
4437 /* Must be an enum value. */
4438 && index_sval->get_type ()
4439 && TREE_CODE (TREE_TYPE (index)) == ENUMERAL_TYPE
4440 && TREE_CODE (index_sval->get_type ()) == ENUMERAL_TYPE
4441 /* If we have a constant, then we can check it directly. */
4442 && index_sval->get_kind () != SK_CONSTANT
4443 && edge.implicitly_created_default_p ()
4444 && has_nondefault_cases_for_all_enum_values_p (switch_stmt)
4445 /* Don't do this if there's a chance that the index is
4446 attacker-controlled. */
4447 && !ctxt->possibly_tainted_p (index_sval))
4448 {
4449 if (out)
4450 *out = new rejected_default_case (*this);
4451 return false;
4452 }
4453
8ca7fa84
DM
4454 bounded_ranges_manager *ranges_mgr = get_range_manager ();
4455 const bounded_ranges *all_cases_ranges
4456 = ranges_mgr->get_or_create_ranges_for_switch (&edge, switch_stmt);
8ca7fa84
DM
4457 bool sat = m_constraints->add_bounded_ranges (index_sval, all_cases_ranges);
4458 if (!sat && out)
4459 *out = new rejected_ranges_constraint (*this, index, all_cases_ranges);
2c044ff1
DM
4460 if (sat && ctxt && !all_cases_ranges->empty_p ())
4461 ctxt->on_bounded_ranges (*index_sval, *all_cases_ranges);
8ca7fa84 4462 return sat;
757bf1df
DM
4463}
4464
1690a839
DM
4465/* Apply any constraints due to an exception being thrown at LAST_STMT.
4466
4467 If they are feasible, add the constraints and return true.
4468
4469 Return false if the constraints contradict existing knowledge
84fb3546
DM
4470 (and so the edge should not be taken).
4471 When returning false, if OUT is non-NULL, write a new rejected_constraint
4472 to it. */
1690a839
DM
4473
4474bool
4475region_model::apply_constraints_for_exception (const gimple *last_stmt,
84fb3546
DM
4476 region_model_context *ctxt,
4477 rejected_constraint **out)
1690a839
DM
4478{
4479 gcc_assert (last_stmt);
4480 if (const gcall *call = dyn_cast <const gcall *> (last_stmt))
4481 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
4482 if (is_named_call_p (callee_fndecl, "operator new", call, 1)
4483 || is_named_call_p (callee_fndecl, "operator new []", call, 1))
4484 {
4485 /* We have an exception thrown from operator new.
4486 Add a constraint that the result was NULL, to avoid a false
4487 leak report due to the result being lost when following
4488 the EH edge. */
4489 if (tree lhs = gimple_call_lhs (call))
84fb3546 4490 return add_constraint (lhs, EQ_EXPR, null_pointer_node, ctxt, out);
1690a839
DM
4491 return true;
4492 }
4493 return true;
4494}
4495
808f4dfe
DM
4496/* For use with push_frame when handling a top-level call within the analysis.
4497 PARAM has a defined but unknown initial value.
4498 Anything it points to has escaped, since the calling context "knows"
4499 the pointer, and thus calls to unknown functions could read/write into
dcfc7ac9
DM
4500 the region.
4501 If NONNULL is true, then assume that PARAM must be non-NULL. */
757bf1df
DM
4502
4503void
808f4dfe 4504region_model::on_top_level_param (tree param,
dcfc7ac9
DM
4505 bool nonnull,
4506 region_model_context *ctxt)
757bf1df 4507{
808f4dfe 4508 if (POINTER_TYPE_P (TREE_TYPE (param)))
5eae0ac7 4509 {
808f4dfe
DM
4510 const region *param_reg = get_lvalue (param, ctxt);
4511 const svalue *init_ptr_sval
4512 = m_mgr->get_or_create_initial_value (param_reg);
4513 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
4514 m_store.mark_as_escaped (pointee_reg);
dcfc7ac9
DM
4515 if (nonnull)
4516 {
4517 const svalue *null_ptr_sval
4518 = m_mgr->get_or_create_null_ptr (TREE_TYPE (param));
4519 add_constraint (init_ptr_sval, NE_EXPR, null_ptr_sval, ctxt);
4520 }
5eae0ac7 4521 }
757bf1df
DM
4522}
4523
808f4dfe
DM
4524/* Update this region_model to reflect pushing a frame onto the stack
4525 for a call to FUN.
757bf1df 4526
808f4dfe
DM
4527 If ARG_SVALS is non-NULL, use it to populate the parameters
4528 in the new frame.
4529 Otherwise, the params have their initial_svalues.
757bf1df 4530
808f4dfe 4531 Return the frame_region for the new frame. */
757bf1df 4532
808f4dfe
DM
4533const region *
4534region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
4535 region_model_context *ctxt)
757bf1df 4536{
808f4dfe
DM
4537 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
4538 if (arg_svals)
757bf1df 4539 {
808f4dfe
DM
4540 /* Arguments supplied from a caller frame. */
4541 tree fndecl = fun->decl;
4542 unsigned idx = 0;
4543 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
4544 iter_parm = DECL_CHAIN (iter_parm), ++idx)
757bf1df 4545 {
808f4dfe
DM
4546 /* If there's a mismatching declaration, the call stmt might
4547 not have enough args. Handle this case by leaving the
4548 rest of the params as uninitialized. */
4549 if (idx >= arg_svals->length ())
4550 break;
294b6da2
DM
4551 tree parm_lval = iter_parm;
4552 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
4553 parm_lval = parm_default_ssa;
4554 const region *parm_reg = get_lvalue (parm_lval, ctxt);
808f4dfe 4555 const svalue *arg_sval = (*arg_svals)[idx];
808f4dfe 4556 set_value (parm_reg, arg_sval, ctxt);
757bf1df 4557 }
2402dc6b
DM
4558
4559 /* Handle any variadic args. */
4560 unsigned va_arg_idx = 0;
4561 for (; idx < arg_svals->length (); idx++, va_arg_idx++)
4562 {
4563 const svalue *arg_sval = (*arg_svals)[idx];
4564 const region *var_arg_reg
4565 = m_mgr->get_var_arg_region (m_current_frame,
4566 va_arg_idx);
4567 set_value (var_arg_reg, arg_sval, ctxt);
4568 }
757bf1df 4569 }
808f4dfe 4570 else
757bf1df 4571 {
808f4dfe
DM
4572 /* Otherwise we have a top-level call within the analysis. The params
4573 have defined but unknown initial values.
4574 Anything they point to has escaped. */
4575 tree fndecl = fun->decl;
dcfc7ac9
DM
4576
4577 /* Handle "__attribute__((nonnull))". */
4578 tree fntype = TREE_TYPE (fndecl);
4579 bitmap nonnull_args = get_nonnull_args (fntype);
4580
4581 unsigned parm_idx = 0;
808f4dfe
DM
4582 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
4583 iter_parm = DECL_CHAIN (iter_parm))
757bf1df 4584 {
dcfc7ac9
DM
4585 bool non_null = (nonnull_args
4586 ? (bitmap_empty_p (nonnull_args)
4587 || bitmap_bit_p (nonnull_args, parm_idx))
4588 : false);
294b6da2 4589 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
dcfc7ac9 4590 on_top_level_param (parm_default_ssa, non_null, ctxt);
294b6da2 4591 else
dcfc7ac9
DM
4592 on_top_level_param (iter_parm, non_null, ctxt);
4593 parm_idx++;
757bf1df 4594 }
dcfc7ac9
DM
4595
4596 BITMAP_FREE (nonnull_args);
757bf1df 4597 }
757bf1df 4598
808f4dfe 4599 return m_current_frame;
757bf1df
DM
4600}
4601
808f4dfe
DM
4602/* Get the function of the top-most frame in this region_model's stack.
4603 There must be such a frame. */
757bf1df 4604
808f4dfe
DM
4605function *
4606region_model::get_current_function () const
757bf1df 4607{
808f4dfe
DM
4608 const frame_region *frame = get_current_frame ();
4609 gcc_assert (frame);
4610 return frame->get_function ();
757bf1df
DM
4611}
4612
808f4dfe 4613/* Pop the topmost frame_region from this region_model's stack;
757bf1df 4614
4cebae09
DM
4615 If RESULT_LVALUE is non-null, copy any return value from the frame
4616 into the corresponding region (evaluated with respect to the *caller*
4617 frame, rather than the called frame).
808f4dfe
DM
4618 If OUT_RESULT is non-null, copy any return value from the frame
4619 into *OUT_RESULT.
757bf1df 4620
808f4dfe
DM
4621 Purge the frame region and all its descendent regions.
4622 Convert any pointers that point into such regions into
4623 POISON_KIND_POPPED_STACK svalues. */
757bf1df 4624
808f4dfe 4625void
4cebae09 4626region_model::pop_frame (tree result_lvalue,
808f4dfe
DM
4627 const svalue **out_result,
4628 region_model_context *ctxt)
4629{
4630 gcc_assert (m_current_frame);
757bf1df 4631
808f4dfe 4632 const frame_region *frame_reg = m_current_frame;
5c6546ca
DM
4633
4634 /* Notify state machines. */
4635 if (ctxt)
4636 ctxt->on_pop_frame (frame_reg);
4637
4638 /* Evaluate the result, within the callee frame. */
808f4dfe
DM
4639 tree fndecl = m_current_frame->get_function ()->decl;
4640 tree result = DECL_RESULT (fndecl);
4cebae09 4641 const svalue *retval = NULL;
808f4dfe
DM
4642 if (result && TREE_TYPE (result) != void_type_node)
4643 {
4cebae09 4644 retval = get_rvalue (result, ctxt);
808f4dfe 4645 if (out_result)
13ad6d9f 4646 *out_result = retval;
808f4dfe 4647 }
757bf1df 4648
808f4dfe
DM
4649 /* Pop the frame. */
4650 m_current_frame = m_current_frame->get_calling_frame ();
757bf1df 4651
4cebae09
DM
4652 if (result_lvalue && retval)
4653 {
4654 /* Compute result_dst_reg using RESULT_LVALUE *after* popping
4655 the frame, but before poisoning pointers into the old frame. */
4656 const region *result_dst_reg = get_lvalue (result_lvalue, ctxt);
4657 set_value (result_dst_reg, retval, ctxt);
4658 }
4659
808f4dfe 4660 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
757bf1df
DM
4661}
4662
808f4dfe 4663/* Get the number of frames in this region_model's stack. */
757bf1df 4664
808f4dfe
DM
4665int
4666region_model::get_stack_depth () const
757bf1df 4667{
808f4dfe
DM
4668 const frame_region *frame = get_current_frame ();
4669 if (frame)
4670 return frame->get_stack_depth ();
4671 else
4672 return 0;
757bf1df
DM
4673}
4674
808f4dfe
DM
4675/* Get the frame_region with the given index within the stack.
4676 The frame_region must exist. */
757bf1df 4677
808f4dfe
DM
4678const frame_region *
4679region_model::get_frame_at_index (int index) const
757bf1df 4680{
808f4dfe
DM
4681 const frame_region *frame = get_current_frame ();
4682 gcc_assert (frame);
4683 gcc_assert (index >= 0);
4684 gcc_assert (index <= frame->get_index ());
4685 while (index != frame->get_index ())
4686 {
4687 frame = frame->get_calling_frame ();
4688 gcc_assert (frame);
4689 }
4690 return frame;
757bf1df
DM
4691}
4692
808f4dfe
DM
4693/* Unbind svalues for any regions in REG and below.
4694 Find any pointers to such regions; convert them to
9a2c9579
DM
4695 poisoned values of kind PKIND.
4696 Also purge any dynamic extents. */
757bf1df 4697
808f4dfe
DM
4698void
4699region_model::unbind_region_and_descendents (const region *reg,
4700 enum poison_kind pkind)
757bf1df 4701{
808f4dfe
DM
4702 /* Gather a set of base regions to be unbound. */
4703 hash_set<const region *> base_regs;
4704 for (store::cluster_map_t::iterator iter = m_store.begin ();
4705 iter != m_store.end (); ++iter)
757bf1df 4706 {
808f4dfe
DM
4707 const region *iter_base_reg = (*iter).first;
4708 if (iter_base_reg->descendent_of_p (reg))
4709 base_regs.add (iter_base_reg);
757bf1df 4710 }
808f4dfe
DM
4711 for (hash_set<const region *>::iterator iter = base_regs.begin ();
4712 iter != base_regs.end (); ++iter)
4713 m_store.purge_cluster (*iter);
757bf1df 4714
808f4dfe
DM
4715 /* Find any pointers to REG or its descendents; convert to poisoned. */
4716 poison_any_pointers_to_descendents (reg, pkind);
9a2c9579
DM
4717
4718 /* Purge dynamic extents of any base regions in REG and below
4719 (e.g. VLAs and alloca stack regions). */
4720 for (auto iter : m_dynamic_extents)
4721 {
4722 const region *iter_reg = iter.first;
4723 if (iter_reg->descendent_of_p (reg))
4724 unset_dynamic_extents (iter_reg);
4725 }
757bf1df
DM
4726}
4727
808f4dfe
DM
4728/* Implementation of BindingVisitor.
4729 Update the bound svalues for regions below REG to use poisoned
4730 values instead. */
757bf1df 4731
808f4dfe 4732struct bad_pointer_finder
757bf1df 4733{
808f4dfe
DM
4734 bad_pointer_finder (const region *reg, enum poison_kind pkind,
4735 region_model_manager *mgr)
4736 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
4737 {}
757bf1df 4738
808f4dfe
DM
4739 void on_binding (const binding_key *, const svalue *&sval)
4740 {
4741 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
4742 {
4743 const region *ptr_dst = ptr_sval->get_pointee ();
4744 /* Poison ptrs to descendents of REG, but not to REG itself,
4745 otherwise double-free detection doesn't work (since sm-state
4746 for "free" is stored on the original ptr svalue). */
4747 if (ptr_dst->descendent_of_p (m_reg)
4748 && ptr_dst != m_reg)
4749 {
4750 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
4751 sval->get_type ());
4752 ++m_count;
4753 }
4754 }
4755 }
757bf1df 4756
808f4dfe
DM
4757 const region *m_reg;
4758 enum poison_kind m_pkind;
4759 region_model_manager *const m_mgr;
4760 int m_count;
4761};
757bf1df 4762
808f4dfe
DM
4763/* Find any pointers to REG or its descendents; convert them to
4764 poisoned values of kind PKIND.
4765 Return the number of pointers that were poisoned. */
757bf1df 4766
808f4dfe
DM
4767int
4768region_model::poison_any_pointers_to_descendents (const region *reg,
4769 enum poison_kind pkind)
4770{
4771 bad_pointer_finder bv (reg, pkind, m_mgr);
4772 m_store.for_each_binding (bv);
4773 return bv.m_count;
757bf1df
DM
4774}
4775
808f4dfe
DM
4776/* Attempt to merge THIS with OTHER_MODEL, writing the result
4777 to OUT_MODEL. Use POINT to distinguish values created as a
4778 result of merging. */
757bf1df 4779
808f4dfe
DM
4780bool
4781region_model::can_merge_with_p (const region_model &other_model,
4782 const program_point &point,
f573d351
DM
4783 region_model *out_model,
4784 const extrinsic_state *ext_state,
4785 const program_state *state_a,
4786 const program_state *state_b) const
757bf1df 4787{
808f4dfe
DM
4788 gcc_assert (out_model);
4789 gcc_assert (m_mgr == other_model.m_mgr);
4790 gcc_assert (m_mgr == out_model->m_mgr);
757bf1df 4791
808f4dfe
DM
4792 if (m_current_frame != other_model.m_current_frame)
4793 return false;
4794 out_model->m_current_frame = m_current_frame;
757bf1df 4795
f573d351
DM
4796 model_merger m (this, &other_model, point, out_model,
4797 ext_state, state_a, state_b);
757bf1df 4798
808f4dfe
DM
4799 if (!store::can_merge_p (&m_store, &other_model.m_store,
4800 &out_model->m_store, m_mgr->get_store_manager (),
4801 &m))
4802 return false;
4803
9a2c9579
DM
4804 if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
4805 &out_model->m_dynamic_extents))
4806 return false;
4807
808f4dfe
DM
4808 /* Merge constraints. */
4809 constraint_manager::merge (*m_constraints,
4810 *other_model.m_constraints,
c710051a 4811 out_model->m_constraints);
757bf1df 4812
808f4dfe 4813 return true;
757bf1df
DM
4814}
4815
4816/* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
4817 otherwise. */
4818
4819tree
4820region_model::get_fndecl_for_call (const gcall *call,
4821 region_model_context *ctxt)
4822{
4823 tree fn_ptr = gimple_call_fn (call);
4824 if (fn_ptr == NULL_TREE)
4825 return NULL_TREE;
808f4dfe
DM
4826 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
4827 if (const region_svalue *fn_ptr_ptr
4828 = fn_ptr_sval->dyn_cast_region_svalue ())
757bf1df 4829 {
808f4dfe
DM
4830 const region *reg = fn_ptr_ptr->get_pointee ();
4831 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
757bf1df 4832 {
808f4dfe 4833 tree fn_decl = fn_reg->get_fndecl ();
0ba70d1b
DM
4834 cgraph_node *node = cgraph_node::get (fn_decl);
4835 if (!node)
4836 return NULL_TREE;
4837 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
91f993b7
DM
4838 if (ultimate_node)
4839 return ultimate_node->decl;
757bf1df
DM
4840 }
4841 }
4842
4843 return NULL_TREE;
4844}
4845
808f4dfe 4846/* Would be much simpler to use a lambda here, if it were supported. */
757bf1df 4847
faacafd2 4848struct append_regions_cb_data
757bf1df 4849{
808f4dfe
DM
4850 const region_model *model;
4851 auto_vec<const decl_region *> *out;
4852};
757bf1df 4853
faacafd2 4854/* Populate *OUT with all decl_regions in the current
808f4dfe 4855 frame that have clusters within the store. */
757bf1df
DM
4856
4857void
808f4dfe 4858region_model::
faacafd2 4859get_regions_for_current_frame (auto_vec<const decl_region *> *out) const
757bf1df 4860{
faacafd2 4861 append_regions_cb_data data;
808f4dfe
DM
4862 data.model = this;
4863 data.out = out;
faacafd2 4864 m_store.for_each_cluster (append_regions_cb, &data);
757bf1df
DM
4865}
4866
faacafd2 4867/* Implementation detail of get_regions_for_current_frame. */
757bf1df 4868
808f4dfe 4869void
faacafd2
DM
4870region_model::append_regions_cb (const region *base_reg,
4871 append_regions_cb_data *cb_data)
757bf1df 4872{
808f4dfe
DM
4873 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
4874 return;
4875 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
faacafd2 4876 cb_data->out->safe_push (decl_reg);
757bf1df
DM
4877}
4878
c83e9731
TL
4879
4880/* Abstract class for diagnostics related to the use of
4881 floating-point arithmetic where precision is needed. */
4882
4883class imprecise_floating_point_arithmetic : public pending_diagnostic
4884{
4885public:
4886 int get_controlling_option () const final override
4887 {
4888 return OPT_Wanalyzer_imprecise_fp_arithmetic;
4889 }
4890};
4891
4892/* Concrete diagnostic to complain about uses of floating-point arithmetic
4893 in the size argument of malloc etc. */
4894
4895class float_as_size_arg : public imprecise_floating_point_arithmetic
4896{
4897public:
4898 float_as_size_arg (tree arg) : m_arg (arg)
4899 {}
4900
4901 const char *get_kind () const final override
4902 {
4903 return "float_as_size_arg_diagnostic";
4904 }
4905
ac9230fb 4906 bool subclass_equal_p (const pending_diagnostic &other) const final override
c83e9731
TL
4907 {
4908 return same_tree_p (m_arg, ((const float_as_size_arg &) other).m_arg);
4909 }
4910
4911 bool emit (rich_location *rich_loc) final override
4912 {
4913 diagnostic_metadata m;
4914 bool warned = warning_meta (rich_loc, m, get_controlling_option (),
4915 "use of floating-point arithmetic here might"
4916 " yield unexpected results");
4917 if (warned)
4918 inform (rich_loc->get_loc (), "only use operands of an integer type"
4919 " inside the size argument");
4920 return warned;
4921 }
4922
4923 label_text describe_final_event (const evdesc::final_event &ev) final
4924 override
4925 {
4926 if (m_arg)
4927 return ev.formatted_print ("operand %qE is of type %qT",
4928 m_arg, TREE_TYPE (m_arg));
4929 return ev.formatted_print ("at least one operand of the size argument is"
4930 " of a floating-point type");
4931 }
4932
4933private:
4934 tree m_arg;
4935};
4936
4937/* Visitor to find uses of floating-point variables/constants in an svalue. */
4938
4939class contains_floating_point_visitor : public visitor
4940{
4941public:
4942 contains_floating_point_visitor (const svalue *root_sval) : m_result (NULL)
4943 {
4944 root_sval->accept (this);
4945 }
4946
4947 const svalue *get_svalue_to_report ()
4948 {
4949 return m_result;
4950 }
4951
4952 void visit_constant_svalue (const constant_svalue *sval) final override
4953 {
4954 /* At the point the analyzer runs, constant integer operands in a floating
4955 point expression are already implictly converted to floating-points.
4956 Thus, we do prefer to report non-constants such that the diagnostic
4957 always reports a floating-point operand. */
4958 tree type = sval->get_type ();
4959 if (type && FLOAT_TYPE_P (type) && !m_result)
4960 m_result = sval;
4961 }
4962
4963 void visit_conjured_svalue (const conjured_svalue *sval) final override
4964 {
4965 tree type = sval->get_type ();
4966 if (type && FLOAT_TYPE_P (type))
4967 m_result = sval;
4968 }
4969
4970 void visit_initial_svalue (const initial_svalue *sval) final override
4971 {
4972 tree type = sval->get_type ();
4973 if (type && FLOAT_TYPE_P (type))
4974 m_result = sval;
4975 }
4976
4977private:
4978 /* Non-null if at least one floating-point operand was found. */
4979 const svalue *m_result;
4980};
4981
4982/* May complain about uses of floating-point operands in SIZE_IN_BYTES. */
4983
4984void
4985region_model::check_dynamic_size_for_floats (const svalue *size_in_bytes,
4986 region_model_context *ctxt) const
4987{
4988 gcc_assert (ctxt);
4989
4990 contains_floating_point_visitor v (size_in_bytes);
4991 if (const svalue *float_sval = v.get_svalue_to_report ())
4992 {
4993 tree diag_arg = get_representative_tree (float_sval);
6341f14e 4994 ctxt->warn (make_unique<float_as_size_arg> (diag_arg));
c83e9731
TL
4995 }
4996}
4997
ce917b04
DM
4998/* Return a region describing a heap-allocated block of memory.
4999 Use CTXT to complain about tainted sizes.
5000
5001 Reuse an existing heap_allocated_region if it's not being referenced by
5002 this region_model; otherwise create a new one. */
757bf1df 5003
808f4dfe 5004const region *
ce917b04
DM
5005region_model::get_or_create_region_for_heap_alloc (const svalue *size_in_bytes,
5006 region_model_context *ctxt)
5007{
5008 /* Determine which regions are referenced in this region_model, so that
5009 we can reuse an existing heap_allocated_region if it's not in use on
5010 this path. */
7dc0ecaf 5011 auto_bitmap base_regs_in_use;
ce917b04
DM
5012 get_referenced_base_regions (base_regs_in_use);
5013 const region *reg
5014 = m_mgr->get_or_create_region_for_heap_alloc (base_regs_in_use);
688fc162
DM
5015 if (size_in_bytes)
5016 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
5017 set_dynamic_extents (reg, size_in_bytes, ctxt);
808f4dfe 5018 return reg;
757bf1df
DM
5019}
5020
ce917b04
DM
5021/* Populate OUT_IDS with the set of IDs of those base regions which are
5022 reachable in this region_model. */
5023
5024void
7dc0ecaf 5025region_model::get_referenced_base_regions (auto_bitmap &out_ids) const
ce917b04
DM
5026{
5027 reachable_regions reachable_regs (const_cast<region_model *> (this));
5028 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
5029 &reachable_regs);
5030 /* Get regions for locals that have explicitly bound values. */
5031 for (store::cluster_map_t::iterator iter = m_store.begin ();
5032 iter != m_store.end (); ++iter)
5033 {
5034 const region *base_reg = (*iter).first;
5035 if (const region *parent = base_reg->get_parent_region ())
5036 if (parent->get_kind () == RK_FRAME)
5037 reachable_regs.add (base_reg, false);
5038 }
5039
5040 bitmap_clear (out_ids);
5041 for (auto iter_reg : reachable_regs)
5042 bitmap_set_bit (out_ids, iter_reg->get_id ());
5043}
5044
808f4dfe 5045/* Return a new region describing a block of memory allocated within the
b9365b93
DM
5046 current frame.
5047 Use CTXT to complain about tainted sizes. */
757bf1df 5048
808f4dfe 5049const region *
b9365b93
DM
5050region_model::create_region_for_alloca (const svalue *size_in_bytes,
5051 region_model_context *ctxt)
757bf1df 5052{
808f4dfe 5053 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
ea4e3218 5054 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
b9365b93 5055 set_dynamic_extents (reg, size_in_bytes, ctxt);
808f4dfe 5056 return reg;
757bf1df
DM
5057}
5058
b9365b93
DM
5059/* Record that the size of REG is SIZE_IN_BYTES.
5060 Use CTXT to complain about tainted sizes. */
757bf1df
DM
5061
5062void
9a2c9579 5063region_model::set_dynamic_extents (const region *reg,
b9365b93
DM
5064 const svalue *size_in_bytes,
5065 region_model_context *ctxt)
9a2c9579
DM
5066{
5067 assert_compat_types (size_in_bytes->get_type (), size_type_node);
b9365b93 5068 if (ctxt)
c83e9731
TL
5069 {
5070 check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
5071 ctxt);
5072 check_dynamic_size_for_floats (size_in_bytes, ctxt);
5073 }
9a2c9579
DM
5074 m_dynamic_extents.put (reg, size_in_bytes);
5075}
5076
5077/* Get the recording of REG in bytes, or NULL if no dynamic size was
5078 recorded. */
5079
5080const svalue *
5081region_model::get_dynamic_extents (const region *reg) const
757bf1df 5082{
9a2c9579
DM
5083 if (const svalue * const *slot = m_dynamic_extents.get (reg))
5084 return *slot;
5085 return NULL;
5086}
5087
5088/* Unset any recorded dynamic size of REG. */
5089
5090void
5091region_model::unset_dynamic_extents (const region *reg)
5092{
5093 m_dynamic_extents.remove (reg);
757bf1df
DM
5094}
5095
c81b60b8
DM
5096/* Information of the layout of a RECORD_TYPE, capturing it as a vector
5097 of items, where each item is either a field or padding. */
5098
5099class record_layout
5100{
5101public:
5102 /* An item within a record; either a field, or padding after a field. */
5103 struct item
5104 {
5105 public:
5106 item (const bit_range &br,
5107 tree field,
5108 bool is_padding)
5109 : m_bit_range (br),
5110 m_field (field),
5111 m_is_padding (is_padding)
5112 {
5113 }
5114
5115 bit_offset_t get_start_bit_offset () const
5116 {
5117 return m_bit_range.get_start_bit_offset ();
5118 }
5119 bit_offset_t get_next_bit_offset () const
5120 {
5121 return m_bit_range.get_next_bit_offset ();
5122 }
5123
5124 bool contains_p (bit_offset_t offset) const
5125 {
5126 return m_bit_range.contains_p (offset);
5127 }
5128
5129 void dump_to_pp (pretty_printer *pp) const
5130 {
5131 if (m_is_padding)
5132 pp_printf (pp, "padding after %qD", m_field);
5133 else
5134 pp_printf (pp, "%qD", m_field);
5135 pp_string (pp, ", ");
5136 m_bit_range.dump_to_pp (pp);
5137 }
5138
5139 bit_range m_bit_range;
5140 tree m_field;
5141 bool m_is_padding;
5142 };
5143
5144 record_layout (tree record_type)
c81b60b8
DM
5145 {
5146 gcc_assert (TREE_CODE (record_type) == RECORD_TYPE);
5147
5148 for (tree iter = TYPE_FIELDS (record_type); iter != NULL_TREE;
5149 iter = DECL_CHAIN (iter))
5150 {
5151 if (TREE_CODE (iter) == FIELD_DECL)
5152 {
5153 int iter_field_offset = int_bit_position (iter);
5154 bit_size_t size_in_bits;
5155 if (!int_size_in_bits (TREE_TYPE (iter), &size_in_bits))
5156 size_in_bits = 0;
5157
5158 maybe_pad_to (iter_field_offset);
5159
5160 /* Add field. */
5161 m_items.safe_push (item (bit_range (iter_field_offset,
5162 size_in_bits),
5163 iter, false));
5164 }
5165 }
5166
5167 /* Add any trailing padding. */
5168 bit_size_t size_in_bits;
5169 if (int_size_in_bits (record_type, &size_in_bits))
5170 maybe_pad_to (size_in_bits);
5171 }
5172
5173 void dump_to_pp (pretty_printer *pp) const
5174 {
5175 unsigned i;
5176 item *it;
5177 FOR_EACH_VEC_ELT (m_items, i, it)
5178 {
5179 it->dump_to_pp (pp);
5180 pp_newline (pp);
5181 }
5182 }
5183
5184 DEBUG_FUNCTION void dump () const
5185 {
5186 pretty_printer pp;
5187 pp_format_decoder (&pp) = default_tree_printer;
5188 pp.buffer->stream = stderr;
5189 dump_to_pp (&pp);
5190 pp_flush (&pp);
5191 }
5192
5193 const record_layout::item *get_item_at (bit_offset_t offset) const
5194 {
5195 unsigned i;
5196 item *it;
5197 FOR_EACH_VEC_ELT (m_items, i, it)
5198 if (it->contains_p (offset))
5199 return it;
5200 return NULL;
5201 }
5202
5203private:
5204 /* Subroutine of ctor. Add padding item to NEXT_OFFSET if necessary. */
5205
5206 void maybe_pad_to (bit_offset_t next_offset)
5207 {
5208 if (m_items.length () > 0)
5209 {
5210 const item &last_item = m_items[m_items.length () - 1];
5211 bit_offset_t offset_after_last_item
5212 = last_item.get_next_bit_offset ();
5213 if (next_offset > offset_after_last_item)
5214 {
5215 bit_size_t padding_size
5216 = next_offset - offset_after_last_item;
5217 m_items.safe_push (item (bit_range (offset_after_last_item,
5218 padding_size),
5219 last_item.m_field, true));
5220 }
5221 }
5222 }
5223
c81b60b8
DM
5224 auto_vec<item> m_items;
5225};
5226
5227/* A subclass of pending_diagnostic for complaining about uninitialized data
5228 being copied across a trust boundary to an untrusted output
5229 (e.g. copy_to_user infoleaks in the Linux kernel). */
5230
5231class exposure_through_uninit_copy
5232 : public pending_diagnostic_subclass<exposure_through_uninit_copy>
5233{
5234public:
5235 exposure_through_uninit_copy (const region *src_region,
5236 const region *dest_region,
ffaeb9dc 5237 const svalue *copied_sval)
c81b60b8
DM
5238 : m_src_region (src_region),
5239 m_dest_region (dest_region),
ffaeb9dc 5240 m_copied_sval (copied_sval)
c81b60b8
DM
5241 {
5242 gcc_assert (m_copied_sval->get_kind () == SK_POISONED
5243 || m_copied_sval->get_kind () == SK_COMPOUND);
5244 }
5245
5246 const char *get_kind () const final override
5247 {
5248 return "exposure_through_uninit_copy";
5249 }
5250
5251 bool operator== (const exposure_through_uninit_copy &other) const
5252 {
5253 return (m_src_region == other.m_src_region
5254 && m_dest_region == other.m_dest_region
5255 && m_copied_sval == other.m_copied_sval);
5256 }
5257
5258 int get_controlling_option () const final override
5259 {
5260 return OPT_Wanalyzer_exposure_through_uninit_copy;
5261 }
5262
5263 bool emit (rich_location *rich_loc) final override
5264 {
5265 diagnostic_metadata m;
5266 /* CWE-200: Exposure of Sensitive Information to an Unauthorized Actor. */
5267 m.add_cwe (200);
5268 enum memory_space mem_space = get_src_memory_space ();
5269 bool warned;
5270 switch (mem_space)
5271 {
5272 default:
5273 warned = warning_meta
5274 (rich_loc, m, get_controlling_option (),
5275 "potential exposure of sensitive information"
5276 " by copying uninitialized data across trust boundary");
5277 break;
5278 case MEMSPACE_STACK:
5279 warned = warning_meta
5280 (rich_loc, m, get_controlling_option (),
5281 "potential exposure of sensitive information"
5282 " by copying uninitialized data from stack across trust boundary");
5283 break;
5284 case MEMSPACE_HEAP:
5285 warned = warning_meta
5286 (rich_loc, m, get_controlling_option (),
5287 "potential exposure of sensitive information"
5288 " by copying uninitialized data from heap across trust boundary");
5289 break;
5290 }
5291 if (warned)
5292 {
5293 location_t loc = rich_loc->get_loc ();
5294 inform_number_of_uninit_bits (loc);
5295 complain_about_uninit_ranges (loc);
5296
5297 if (mem_space == MEMSPACE_STACK)
5298 maybe_emit_fixit_hint ();
5299 }
5300 return warned;
5301 }
5302
5303 label_text describe_final_event (const evdesc::final_event &) final override
5304 {
5305 enum memory_space mem_space = get_src_memory_space ();
5306 switch (mem_space)
5307 {
5308 default:
5309 return label_text::borrow ("uninitialized data copied here");
5310
5311 case MEMSPACE_STACK:
5312 return label_text::borrow ("uninitialized data copied from stack here");
5313
5314 case MEMSPACE_HEAP:
5315 return label_text::borrow ("uninitialized data copied from heap here");
5316 }
5317 }
5318
5319 void mark_interesting_stuff (interesting_t *interest) final override
5320 {
5321 if (m_src_region)
5322 interest->add_region_creation (m_src_region);
5323 }
5324
5325private:
5326 enum memory_space get_src_memory_space () const
5327 {
5328 return m_src_region ? m_src_region->get_memory_space () : MEMSPACE_UNKNOWN;
5329 }
5330
5331 bit_size_t calc_num_uninit_bits () const
5332 {
5333 switch (m_copied_sval->get_kind ())
5334 {
5335 default:
5336 gcc_unreachable ();
5337 break;
5338 case SK_POISONED:
5339 {
5340 const poisoned_svalue *poisoned_sval
5341 = as_a <const poisoned_svalue *> (m_copied_sval);
5342 gcc_assert (poisoned_sval->get_poison_kind () == POISON_KIND_UNINIT);
5343
5344 /* Give up if don't have type information. */
5345 if (m_copied_sval->get_type () == NULL_TREE)
5346 return 0;
5347
5348 bit_size_t size_in_bits;
5349 if (int_size_in_bits (m_copied_sval->get_type (), &size_in_bits))
5350 return size_in_bits;
5351
5352 /* Give up if we can't get the size of the type. */
5353 return 0;
5354 }
5355 break;
5356 case SK_COMPOUND:
5357 {
5358 const compound_svalue *compound_sval
5359 = as_a <const compound_svalue *> (m_copied_sval);
5360 bit_size_t result = 0;
5361 /* Find keys for uninit svals. */
5362 for (auto iter : *compound_sval)
5363 {
5364 const svalue *sval = iter.second;
5365 if (const poisoned_svalue *psval
5366 = sval->dyn_cast_poisoned_svalue ())
5367 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
5368 {
5369 const binding_key *key = iter.first;
5370 const concrete_binding *ckey
5371 = key->dyn_cast_concrete_binding ();
5372 gcc_assert (ckey);
5373 result += ckey->get_size_in_bits ();
5374 }
5375 }
5376 return result;
5377 }
5378 }
5379 }
5380
5381 void inform_number_of_uninit_bits (location_t loc) const
5382 {
5383 bit_size_t num_uninit_bits = calc_num_uninit_bits ();
5384 if (num_uninit_bits <= 0)
5385 return;
5386 if (num_uninit_bits % BITS_PER_UNIT == 0)
5387 {
5388 /* Express in bytes. */
5389 byte_size_t num_uninit_bytes = num_uninit_bits / BITS_PER_UNIT;
5390 if (num_uninit_bytes == 1)
5391 inform (loc, "1 byte is uninitialized");
5392 else
5393 inform (loc,
5394 "%wu bytes are uninitialized", num_uninit_bytes.to_uhwi ());
5395 }
5396 else
5397 {
5398 /* Express in bits. */
5399 if (num_uninit_bits == 1)
5400 inform (loc, "1 bit is uninitialized");
5401 else
5402 inform (loc,
5403 "%wu bits are uninitialized", num_uninit_bits.to_uhwi ());
5404 }
5405 }
5406
5407 void complain_about_uninit_ranges (location_t loc) const
5408 {
5409 if (const compound_svalue *compound_sval
5410 = m_copied_sval->dyn_cast_compound_svalue ())
5411 {
5412 /* Find keys for uninit svals. */
5413 auto_vec<const concrete_binding *> uninit_keys;
5414 for (auto iter : *compound_sval)
5415 {
5416 const svalue *sval = iter.second;
5417 if (const poisoned_svalue *psval
5418 = sval->dyn_cast_poisoned_svalue ())
5419 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
5420 {
5421 const binding_key *key = iter.first;
5422 const concrete_binding *ckey
5423 = key->dyn_cast_concrete_binding ();
5424 gcc_assert (ckey);
5425 uninit_keys.safe_push (ckey);
5426 }
5427 }
5428 /* Complain about them in sorted order. */
5429 uninit_keys.qsort (concrete_binding::cmp_ptr_ptr);
5430
5431 std::unique_ptr<record_layout> layout;
5432
5433 tree type = m_copied_sval->get_type ();
5434 if (type && TREE_CODE (type) == RECORD_TYPE)
5435 {
5436 // (std::make_unique is C++14)
5437 layout = std::unique_ptr<record_layout> (new record_layout (type));
5438
5439 if (0)
5440 layout->dump ();
5441 }
5442
5443 unsigned i;
5444 const concrete_binding *ckey;
5445 FOR_EACH_VEC_ELT (uninit_keys, i, ckey)
5446 {
5447 bit_offset_t start_bit = ckey->get_start_bit_offset ();
5448 bit_offset_t next_bit = ckey->get_next_bit_offset ();
5449 complain_about_uninit_range (loc, start_bit, next_bit,
5450 layout.get ());
5451 }
5452 }
5453 }
5454
5455 void complain_about_uninit_range (location_t loc,
5456 bit_offset_t start_bit,
5457 bit_offset_t next_bit,
5458 const record_layout *layout) const
5459 {
5460 if (layout)
5461 {
5462 while (start_bit < next_bit)
5463 {
5464 if (const record_layout::item *item
5465 = layout->get_item_at (start_bit))
5466 {
5467 gcc_assert (start_bit >= item->get_start_bit_offset ());
5468 gcc_assert (start_bit < item->get_next_bit_offset ());
5469 if (item->get_start_bit_offset () == start_bit
5470 && item->get_next_bit_offset () <= next_bit)
5471 complain_about_fully_uninit_item (*item);
5472 else
5473 complain_about_partially_uninit_item (*item);
5474 start_bit = item->get_next_bit_offset ();
5475 continue;
5476 }
5477 else
5478 break;
5479 }
5480 }
5481
5482 if (start_bit >= next_bit)
5483 return;
5484
5485 if (start_bit % 8 == 0 && next_bit % 8 == 0)
5486 {
5487 /* Express in bytes. */
5488 byte_offset_t start_byte = start_bit / 8;
5489 byte_offset_t last_byte = (next_bit / 8) - 1;
5490 if (last_byte == start_byte)
5491 inform (loc,
5492 "byte %wu is uninitialized",
5493 start_byte.to_uhwi ());
5494 else
5495 inform (loc,
5496 "bytes %wu - %wu are uninitialized",
5497 start_byte.to_uhwi (),
5498 last_byte.to_uhwi ());
5499 }
5500 else
5501 {
5502 /* Express in bits. */
5503 bit_offset_t last_bit = next_bit - 1;
5504 if (last_bit == start_bit)
5505 inform (loc,
5506 "bit %wu is uninitialized",
5507 start_bit.to_uhwi ());
5508 else
5509 inform (loc,
5510 "bits %wu - %wu are uninitialized",
5511 start_bit.to_uhwi (),
5512 last_bit.to_uhwi ());
5513 }
5514 }
5515
5516 static void
5517 complain_about_fully_uninit_item (const record_layout::item &item)
5518 {
5519 tree field = item.m_field;
5520 bit_size_t num_bits = item.m_bit_range.m_size_in_bits;
5521 if (item.m_is_padding)
5522 {
5523 if (num_bits % 8 == 0)
5524 {
5525 /* Express in bytes. */
5526 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
5527 if (num_bytes == 1)
5528 inform (DECL_SOURCE_LOCATION (field),
5529 "padding after field %qD is uninitialized (1 byte)",
5530 field);
5531 else
5532 inform (DECL_SOURCE_LOCATION (field),
5533 "padding after field %qD is uninitialized (%wu bytes)",
5534 field, num_bytes.to_uhwi ());
5535 }
5536 else
5537 {
5538 /* Express in bits. */
5539 if (num_bits == 1)
5540 inform (DECL_SOURCE_LOCATION (field),
5541 "padding after field %qD is uninitialized (1 bit)",
5542 field);
5543 else
5544 inform (DECL_SOURCE_LOCATION (field),
5545 "padding after field %qD is uninitialized (%wu bits)",
5546 field, num_bits.to_uhwi ());
5547 }
5548 }
5549 else
5550 {
5551 if (num_bits % 8 == 0)
5552 {
5553 /* Express in bytes. */
5554 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
5555 if (num_bytes == 1)
5556 inform (DECL_SOURCE_LOCATION (field),
5557 "field %qD is uninitialized (1 byte)", field);
5558 else
5559 inform (DECL_SOURCE_LOCATION (field),
5560 "field %qD is uninitialized (%wu bytes)",
5561 field, num_bytes.to_uhwi ());
5562 }
5563 else
5564 {
5565 /* Express in bits. */
5566 if (num_bits == 1)
5567 inform (DECL_SOURCE_LOCATION (field),
5568 "field %qD is uninitialized (1 bit)", field);
5569 else
5570 inform (DECL_SOURCE_LOCATION (field),
5571 "field %qD is uninitialized (%wu bits)",
5572 field, num_bits.to_uhwi ());
5573 }
5574 }
5575 }
5576
5577 static void
5578 complain_about_partially_uninit_item (const record_layout::item &item)
5579 {
5580 tree field = item.m_field;
5581 if (item.m_is_padding)
5582 inform (DECL_SOURCE_LOCATION (field),
5583 "padding after field %qD is partially uninitialized",
5584 field);
5585 else
5586 inform (DECL_SOURCE_LOCATION (field),
5587 "field %qD is partially uninitialized",
5588 field);
5589 /* TODO: ideally we'd describe what parts are uninitialized. */
5590 }
5591
5592 void maybe_emit_fixit_hint () const
5593 {
5594 if (tree decl = m_src_region->maybe_get_decl ())
5595 {
5596 gcc_rich_location hint_richloc (DECL_SOURCE_LOCATION (decl));
5597 hint_richloc.add_fixit_insert_after (" = {0}");
5598 inform (&hint_richloc,
5599 "suggest forcing zero-initialization by"
5600 " providing a %<{0}%> initializer");
5601 }
5602 }
5603
5604private:
5605 const region *m_src_region;
5606 const region *m_dest_region;
5607 const svalue *m_copied_sval;
c81b60b8
DM
5608};
5609
5610/* Return true if any part of SVAL is uninitialized. */
5611
5612static bool
5613contains_uninit_p (const svalue *sval)
5614{
5615 struct uninit_finder : public visitor
5616 {
5617 public:
5618 uninit_finder () : m_found_uninit (false) {}
5619 void visit_poisoned_svalue (const poisoned_svalue *sval)
5620 {
5621 if (sval->get_poison_kind () == POISON_KIND_UNINIT)
5622 m_found_uninit = true;
5623 }
5624 bool m_found_uninit;
5625 };
5626
5627 uninit_finder v;
5628 sval->accept (&v);
5629
5630 return v.m_found_uninit;
5631}
5632
5633/* Function for use by plugins when simulating writing data through a
5634 pointer to an "untrusted" region DST_REG (and thus crossing a security
5635 boundary), such as copying data to user space in an OS kernel.
5636
5637 Check that COPIED_SVAL is fully initialized. If not, complain about
5638 an infoleak to CTXT.
5639
5640 SRC_REG can be NULL; if non-NULL it is used as a hint in the diagnostic
5641 as to where COPIED_SVAL came from. */
5642
5643void
5644region_model::maybe_complain_about_infoleak (const region *dst_reg,
5645 const svalue *copied_sval,
5646 const region *src_reg,
5647 region_model_context *ctxt)
5648{
5649 /* Check for exposure. */
5650 if (contains_uninit_p (copied_sval))
6341f14e
DM
5651 ctxt->warn (make_unique<exposure_through_uninit_copy> (src_reg,
5652 dst_reg,
5653 copied_sval));
c81b60b8
DM
5654}
5655
3d2d04cd
DM
5656/* Set errno to a positive symbolic int, as if some error has occurred. */
5657
5658void
5659region_model::set_errno (const call_details &cd)
5660{
5661 const region *errno_reg = m_mgr->get_errno_region ();
5662 conjured_purge p (this, cd.get_ctxt ());
5663 const svalue *new_errno_sval
5664 = m_mgr->get_or_create_conjured_svalue (integer_type_node,
5665 cd.get_call_stmt (),
5666 errno_reg, p);
5667 const svalue *zero
5668 = m_mgr->get_or_create_int_cst (integer_type_node, 0);
5669 add_constraint (new_errno_sval, GT_EXPR, zero, cd.get_ctxt ());
5670 set_value (errno_reg, new_errno_sval, cd.get_ctxt ());
5671}
5672
eafa9d96
DM
5673/* class noop_region_model_context : public region_model_context. */
5674
c65d3c7f 5675void
6341f14e 5676noop_region_model_context::add_note (std::unique_ptr<pending_note>)
c65d3c7f 5677{
c65d3c7f
DM
5678}
5679
eafa9d96 5680void
accece8c 5681noop_region_model_context::bifurcate (std::unique_ptr<custom_edge_info>)
eafa9d96 5682{
eafa9d96
DM
5683}
5684
5685void
5686noop_region_model_context::terminate_path ()
5687{
5688}
5689
808f4dfe 5690/* struct model_merger. */
757bf1df 5691
808f4dfe 5692/* Dump a multiline representation of this merger to PP. */
757bf1df
DM
5693
5694void
808f4dfe 5695model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
757bf1df 5696{
808f4dfe
DM
5697 pp_string (pp, "model A:");
5698 pp_newline (pp);
5699 m_model_a->dump_to_pp (pp, simple, true);
5700 pp_newline (pp);
757bf1df 5701
808f4dfe 5702 pp_string (pp, "model B:");
757bf1df 5703 pp_newline (pp);
808f4dfe 5704 m_model_b->dump_to_pp (pp, simple, true);
757bf1df
DM
5705 pp_newline (pp);
5706
808f4dfe 5707 pp_string (pp, "merged model:");
757bf1df 5708 pp_newline (pp);
808f4dfe 5709 m_merged_model->dump_to_pp (pp, simple, true);
757bf1df
DM
5710 pp_newline (pp);
5711}
5712
808f4dfe 5713/* Dump a multiline representation of this merger to FILE. */
757bf1df
DM
5714
5715void
808f4dfe 5716model_merger::dump (FILE *fp, bool simple) const
757bf1df
DM
5717{
5718 pretty_printer pp;
5719 pp_format_decoder (&pp) = default_tree_printer;
5720 pp_show_color (&pp) = pp_show_color (global_dc->printer);
5721 pp.buffer->stream = fp;
808f4dfe 5722 dump_to_pp (&pp, simple);
757bf1df
DM
5723 pp_flush (&pp);
5724}
5725
808f4dfe 5726/* Dump a multiline representation of this merger to stderr. */
757bf1df
DM
5727
5728DEBUG_FUNCTION void
808f4dfe 5729model_merger::dump (bool simple) const
757bf1df 5730{
808f4dfe 5731 dump (stderr, simple);
757bf1df
DM
5732}
5733
f573d351
DM
5734/* Return true if it's OK to merge SVAL with other svalues. */
5735
5736bool
5737model_merger::mergeable_svalue_p (const svalue *sval) const
5738{
5739 if (m_ext_state)
5740 {
5741 /* Reject merging svalues that have non-purgable sm-state,
5742 to avoid falsely reporting memory leaks by merging them
5743 with something else. For example, given a local var "p",
5744 reject the merger of a:
5745 store_a mapping "p" to a malloc-ed ptr
5746 with:
5747 store_b mapping "p" to a NULL ptr. */
5748 if (m_state_a)
5749 if (!m_state_a->can_purge_p (*m_ext_state, sval))
5750 return false;
5751 if (m_state_b)
5752 if (!m_state_b->can_purge_p (*m_ext_state, sval))
5753 return false;
5754 }
5755 return true;
5756}
5757
75038aa6
DM
5758} // namespace ana
5759
808f4dfe 5760/* Dump RMODEL fully to stderr (i.e. without summarization). */
757bf1df 5761
808f4dfe
DM
5762DEBUG_FUNCTION void
5763debug (const region_model &rmodel)
757bf1df 5764{
808f4dfe 5765 rmodel.dump (false);
757bf1df
DM
5766}
5767
8ca7fa84 5768/* class rejected_op_constraint : public rejected_constraint. */
84fb3546
DM
5769
5770void
8ca7fa84 5771rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
84fb3546
DM
5772{
5773 region_model m (m_model);
5774 const svalue *lhs_sval = m.get_rvalue (m_lhs, NULL);
5775 const svalue *rhs_sval = m.get_rvalue (m_rhs, NULL);
5776 lhs_sval->dump_to_pp (pp, true);
5777 pp_printf (pp, " %s ", op_symbol_code (m_op));
5778 rhs_sval->dump_to_pp (pp, true);
5779}
5780
ccd4df81
DM
5781/* class rejected_default_case : public rejected_constraint. */
5782
5783void
5784rejected_default_case::dump_to_pp (pretty_printer *pp) const
5785{
5786 pp_string (pp, "implicit default for enum");
5787}
5788
8ca7fa84
DM
5789/* class rejected_ranges_constraint : public rejected_constraint. */
5790
5791void
5792rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
5793{
5794 region_model m (m_model);
5795 const svalue *sval = m.get_rvalue (m_expr, NULL);
5796 sval->dump_to_pp (pp, true);
5797 pp_string (pp, " in ");
5798 m_ranges->dump_to_pp (pp, true);
5799}
5800
808f4dfe 5801/* class engine. */
757bf1df 5802
11a2ff8d
DM
5803/* engine's ctor. */
5804
4cebae09
DM
5805engine::engine (const supergraph *sg, logger *logger)
5806: m_sg (sg), m_mgr (logger)
11a2ff8d
DM
5807{
5808}
5809
808f4dfe 5810/* Dump the managed objects by class to LOGGER, and the per-class totals. */
757bf1df 5811
808f4dfe
DM
5812void
5813engine::log_stats (logger *logger) const
757bf1df 5814{
808f4dfe 5815 m_mgr.log_stats (logger, true);
757bf1df
DM
5816}
5817
75038aa6
DM
5818namespace ana {
5819
757bf1df
DM
5820#if CHECKING_P
5821
5822namespace selftest {
5823
8c08c983
DM
5824/* Build a constant tree of the given type from STR. */
5825
5826static tree
5827build_real_cst_from_string (tree type, const char *str)
5828{
5829 REAL_VALUE_TYPE real;
5830 real_from_string (&real, str);
5831 return build_real (type, real);
5832}
5833
5834/* Append various "interesting" constants to OUT (e.g. NaN). */
5835
5836static void
5837append_interesting_constants (auto_vec<tree> *out)
5838{
5839 out->safe_push (build_int_cst (integer_type_node, 0));
5840 out->safe_push (build_int_cst (integer_type_node, 42));
5841 out->safe_push (build_int_cst (unsigned_type_node, 0));
5842 out->safe_push (build_int_cst (unsigned_type_node, 42));
5843 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
5844 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
5845 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
5846 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
5847 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
5848 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
5849 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
5850 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
5851}
5852
5853/* Verify that tree_cmp is a well-behaved comparator for qsort, even
5854 if the underlying constants aren't comparable. */
5855
5856static void
5857test_tree_cmp_on_constants ()
5858{
5859 auto_vec<tree> csts;
5860 append_interesting_constants (&csts);
5861
5862 /* Try sorting every triple. */
5863 const unsigned num = csts.length ();
5864 for (unsigned i = 0; i < num; i++)
5865 for (unsigned j = 0; j < num; j++)
5866 for (unsigned k = 0; k < num; k++)
5867 {
5868 auto_vec<tree> v (3);
5869 v.quick_push (csts[i]);
5870 v.quick_push (csts[j]);
5871 v.quick_push (csts[k]);
5872 v.qsort (tree_cmp);
5873 }
5874}
5875
757bf1df
DM
5876/* Implementation detail of the ASSERT_CONDITION_* macros. */
5877
808f4dfe
DM
5878void
5879assert_condition (const location &loc,
5880 region_model &model,
5881 const svalue *lhs, tree_code op, const svalue *rhs,
5882 tristate expected)
5883{
5884 tristate actual = model.eval_condition (lhs, op, rhs);
5885 ASSERT_EQ_AT (loc, actual, expected);
5886}
5887
5888/* Implementation detail of the ASSERT_CONDITION_* macros. */
5889
757bf1df
DM
5890void
5891assert_condition (const location &loc,
5892 region_model &model,
5893 tree lhs, tree_code op, tree rhs,
5894 tristate expected)
5895{
5896 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
5897 ASSERT_EQ_AT (loc, actual, expected);
5898}
5899
90f7c300
DM
5900/* Implementation detail of ASSERT_DUMP_TREE_EQ. */
5901
5902static void
5903assert_dump_tree_eq (const location &loc, tree t, const char *expected)
5904{
5905 auto_fix_quotes sentinel;
5906 pretty_printer pp;
5907 pp_format_decoder (&pp) = default_tree_printer;
5908 dump_tree (&pp, t);
5909 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
5910}
5911
5912/* Assert that dump_tree (T) is EXPECTED. */
5913
5914#define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
5915 SELFTEST_BEGIN_STMT \
5916 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
5917 SELFTEST_END_STMT
5918
757bf1df
DM
5919/* Implementation detail of ASSERT_DUMP_EQ. */
5920
5921static void
5922assert_dump_eq (const location &loc,
5923 const region_model &model,
5924 bool summarize,
5925 const char *expected)
5926{
5927 auto_fix_quotes sentinel;
5928 pretty_printer pp;
5929 pp_format_decoder (&pp) = default_tree_printer;
808f4dfe
DM
5930
5931 model.dump_to_pp (&pp, summarize, true);
757bf1df
DM
5932 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
5933}
5934
5935/* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
5936
5937#define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
5938 SELFTEST_BEGIN_STMT \
5939 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
5940 SELFTEST_END_STMT
5941
5942/* Smoketest for region_model::dump_to_pp. */
5943
5944static void
5945test_dump ()
5946{
808f4dfe
DM
5947 region_model_manager mgr;
5948 region_model model (&mgr);
757bf1df
DM
5949
5950 ASSERT_DUMP_EQ (model, false,
808f4dfe
DM
5951 "stack depth: 0\n"
5952 "m_called_unknown_fn: FALSE\n"
5953 "constraint_manager:\n"
5954 " equiv classes:\n"
5955 " constraints:\n");
5956 ASSERT_DUMP_EQ (model, true,
5957 "stack depth: 0\n"
5958 "m_called_unknown_fn: FALSE\n"
5959 "constraint_manager:\n"
757bf1df
DM
5960 " equiv classes:\n"
5961 " constraints:\n");
757bf1df
DM
5962}
5963
884d9141
DM
5964/* Helper function for selftests. Create a struct or union type named NAME,
5965 with the fields given by the FIELD_DECLS in FIELDS.
5966 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
5967 create a UNION_TYPE. */
5968
5969static tree
5970make_test_compound_type (const char *name, bool is_struct,
5971 const auto_vec<tree> *fields)
5972{
5973 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
5974 TYPE_NAME (t) = get_identifier (name);
5975 TYPE_SIZE (t) = 0;
5976
5977 tree fieldlist = NULL;
5978 int i;
5979 tree field;
5980 FOR_EACH_VEC_ELT (*fields, i, field)
5981 {
5982 gcc_assert (TREE_CODE (field) == FIELD_DECL);
5983 DECL_CONTEXT (field) = t;
5984 fieldlist = chainon (field, fieldlist);
5985 }
5986 fieldlist = nreverse (fieldlist);
5987 TYPE_FIELDS (t) = fieldlist;
5988
5989 layout_type (t);
5990 return t;
5991}
5992
a96f1c38
DM
5993/* Selftest fixture for creating the type "struct coord {int x; int y; };". */
5994
5995struct coord_test
5996{
5997 coord_test ()
5998 {
5999 auto_vec<tree> fields;
6000 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
6001 get_identifier ("x"), integer_type_node);
6002 fields.safe_push (m_x_field);
6003 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
6004 get_identifier ("y"), integer_type_node);
6005 fields.safe_push (m_y_field);
6006 m_coord_type = make_test_compound_type ("coord", true, &fields);
6007 }
6008
6009 tree m_x_field;
6010 tree m_y_field;
6011 tree m_coord_type;
6012};
6013
808f4dfe 6014/* Verify usage of a struct. */
884d9141
DM
6015
6016static void
808f4dfe 6017test_struct ()
884d9141 6018{
a96f1c38
DM
6019 coord_test ct;
6020
6021 tree c = build_global_decl ("c", ct.m_coord_type);
6022 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6023 c, ct.m_x_field, NULL_TREE);
6024 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6025 c, ct.m_y_field, NULL_TREE);
884d9141
DM
6026
6027 tree int_17 = build_int_cst (integer_type_node, 17);
6028 tree int_m3 = build_int_cst (integer_type_node, -3);
6029
808f4dfe
DM
6030 region_model_manager mgr;
6031 region_model model (&mgr);
884d9141
DM
6032 model.set_value (c_x, int_17, NULL);
6033 model.set_value (c_y, int_m3, NULL);
6034
808f4dfe
DM
6035 /* Verify get_offset for "c.x". */
6036 {
6037 const region *c_x_reg = model.get_lvalue (c_x, NULL);
7a6564c9 6038 region_offset offset = c_x_reg->get_offset (&mgr);
808f4dfe
DM
6039 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
6040 ASSERT_EQ (offset.get_bit_offset (), 0);
6041 }
6042
6043 /* Verify get_offset for "c.y". */
6044 {
6045 const region *c_y_reg = model.get_lvalue (c_y, NULL);
7a6564c9 6046 region_offset offset = c_y_reg->get_offset (&mgr);
808f4dfe
DM
6047 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
6048 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
6049 }
884d9141
DM
6050}
6051
808f4dfe 6052/* Verify usage of an array element. */
884d9141
DM
6053
6054static void
808f4dfe 6055test_array_1 ()
884d9141
DM
6056{
6057 tree tlen = size_int (10);
6058 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
6059
6060 tree a = build_global_decl ("a", arr_type);
6061
808f4dfe
DM
6062 region_model_manager mgr;
6063 region_model model (&mgr);
884d9141
DM
6064 tree int_0 = build_int_cst (integer_type_node, 0);
6065 tree a_0 = build4 (ARRAY_REF, char_type_node,
6066 a, int_0, NULL_TREE, NULL_TREE);
6067 tree char_A = build_int_cst (char_type_node, 'A');
6068 model.set_value (a_0, char_A, NULL);
884d9141
DM
6069}
6070
90f7c300
DM
6071/* Verify that region_model::get_representative_tree works as expected. */
6072
6073static void
6074test_get_representative_tree ()
6075{
808f4dfe
DM
6076 region_model_manager mgr;
6077
90f7c300
DM
6078 /* STRING_CST. */
6079 {
6080 tree string_cst = build_string (4, "foo");
808f4dfe
DM
6081 region_model m (&mgr);
6082 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
6083 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
6084 ASSERT_EQ (rep, string_cst);
6085 }
6086
6087 /* String literal. */
6088 {
6089 tree string_cst_ptr = build_string_literal (4, "foo");
808f4dfe
DM
6090 region_model m (&mgr);
6091 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
6092 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
6093 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
6094 }
808f4dfe
DM
6095
6096 /* Value of an element within an array. */
6097 {
6098 tree tlen = size_int (10);
6099 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
6100 tree a = build_global_decl ("a", arr_type);
6101 placeholder_svalue test_sval (char_type_node, "test value");
6102
6103 /* Value of a[3]. */
6104 {
6105 test_region_model_context ctxt;
6106 region_model model (&mgr);
6107 tree int_3 = build_int_cst (integer_type_node, 3);
6108 tree a_3 = build4 (ARRAY_REF, char_type_node,
6109 a, int_3, NULL_TREE, NULL_TREE);
6110 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
6111 model.set_value (a_3_reg, &test_sval, &ctxt);
6112 tree rep = model.get_representative_tree (&test_sval);
6113 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
6114 }
6115
6116 /* Value of a[0]. */
6117 {
6118 test_region_model_context ctxt;
6119 region_model model (&mgr);
6120 tree idx = build_int_cst (integer_type_node, 0);
6121 tree a_0 = build4 (ARRAY_REF, char_type_node,
6122 a, idx, NULL_TREE, NULL_TREE);
6123 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
6124 model.set_value (a_0_reg, &test_sval, &ctxt);
6125 tree rep = model.get_representative_tree (&test_sval);
6126 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
6127 }
6128 }
6129
6130 /* Value of a field within a struct. */
6131 {
6132 coord_test ct;
6133
6134 tree c = build_global_decl ("c", ct.m_coord_type);
6135 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6136 c, ct.m_x_field, NULL_TREE);
6137 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6138 c, ct.m_y_field, NULL_TREE);
6139
6140 test_region_model_context ctxt;
6141
6142 /* Value of initial field. */
6143 {
6144 region_model m (&mgr);
6145 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
6146 placeholder_svalue test_sval_x (integer_type_node, "test x val");
6147 m.set_value (c_x_reg, &test_sval_x, &ctxt);
6148 tree rep = m.get_representative_tree (&test_sval_x);
6149 ASSERT_DUMP_TREE_EQ (rep, "c.x");
6150 }
6151
6152 /* Value of non-initial field. */
6153 {
6154 region_model m (&mgr);
6155 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
6156 placeholder_svalue test_sval_y (integer_type_node, "test y val");
6157 m.set_value (c_y_reg, &test_sval_y, &ctxt);
6158 tree rep = m.get_representative_tree (&test_sval_y);
6159 ASSERT_DUMP_TREE_EQ (rep, "c.y");
6160 }
6161 }
90f7c300
DM
6162}
6163
757bf1df 6164/* Verify that calling region_model::get_rvalue repeatedly on the same
808f4dfe 6165 tree constant retrieves the same svalue *. */
757bf1df
DM
6166
6167static void
6168test_unique_constants ()
6169{
6170 tree int_0 = build_int_cst (integer_type_node, 0);
6171 tree int_42 = build_int_cst (integer_type_node, 42);
6172
6173 test_region_model_context ctxt;
808f4dfe
DM
6174 region_model_manager mgr;
6175 region_model model (&mgr);
757bf1df
DM
6176 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
6177 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
6178 model.get_rvalue (int_42, &ctxt));
6179 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
6180 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
757bf1df 6181
808f4dfe
DM
6182 /* A "(const int)42" will be a different tree from "(int)42)"... */
6183 tree const_int_type_node
6184 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
6185 tree const_int_42 = build_int_cst (const_int_type_node, 42);
6186 ASSERT_NE (int_42, const_int_42);
6187 /* It should have a different const_svalue. */
6188 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
6189 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
6190 ASSERT_NE (int_42_sval, const_int_42_sval);
6191 /* But they should compare as equal. */
6192 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
6193 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
757bf1df
DM
6194}
6195
808f4dfe
DM
6196/* Verify that each type gets its own singleton unknown_svalue within a
6197 region_model_manager, and that NULL_TREE gets its own singleton. */
757bf1df
DM
6198
6199static void
808f4dfe 6200test_unique_unknowns ()
757bf1df 6201{
808f4dfe
DM
6202 region_model_manager mgr;
6203 const svalue *unknown_int
6204 = mgr.get_or_create_unknown_svalue (integer_type_node);
6205 /* Repeated calls with the same type should get the same "unknown"
6206 svalue. */
6207 const svalue *unknown_int_2
6208 = mgr.get_or_create_unknown_svalue (integer_type_node);
6209 ASSERT_EQ (unknown_int, unknown_int_2);
757bf1df 6210
808f4dfe
DM
6211 /* Different types (or the NULL type) should have different
6212 unknown_svalues. */
6213 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
6214 ASSERT_NE (unknown_NULL_type, unknown_int);
757bf1df 6215
808f4dfe
DM
6216 /* Repeated calls with NULL for the type should get the same "unknown"
6217 svalue. */
6218 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
6219 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
757bf1df
DM
6220}
6221
808f4dfe 6222/* Verify that initial_svalue are handled as expected. */
757bf1df 6223
808f4dfe
DM
6224static void
6225test_initial_svalue_folding ()
757bf1df 6226{
808f4dfe
DM
6227 region_model_manager mgr;
6228 tree x = build_global_decl ("x", integer_type_node);
6229 tree y = build_global_decl ("y", integer_type_node);
757bf1df 6230
808f4dfe
DM
6231 test_region_model_context ctxt;
6232 region_model model (&mgr);
6233 const svalue *x_init = model.get_rvalue (x, &ctxt);
6234 const svalue *y_init = model.get_rvalue (y, &ctxt);
6235 ASSERT_NE (x_init, y_init);
6236 const region *x_reg = model.get_lvalue (x, &ctxt);
6237 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
757bf1df 6238
808f4dfe 6239}
757bf1df 6240
808f4dfe 6241/* Verify that unary ops are folded as expected. */
757bf1df
DM
6242
6243static void
808f4dfe 6244test_unaryop_svalue_folding ()
757bf1df 6245{
808f4dfe 6246 region_model_manager mgr;
757bf1df
DM
6247 tree x = build_global_decl ("x", integer_type_node);
6248 tree y = build_global_decl ("y", integer_type_node);
6249
808f4dfe
DM
6250 test_region_model_context ctxt;
6251 region_model model (&mgr);
6252 const svalue *x_init = model.get_rvalue (x, &ctxt);
6253 const svalue *y_init = model.get_rvalue (y, &ctxt);
6254 const region *x_reg = model.get_lvalue (x, &ctxt);
6255 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
6256
6257 /* "(int)x" -> "x". */
6258 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
6259
6260 /* "(void *)x" -> something other than "x". */
6261 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
6262
6263 /* "!(x == y)" -> "x != y". */
6264 ASSERT_EQ (mgr.get_or_create_unaryop
6265 (boolean_type_node, TRUTH_NOT_EXPR,
6266 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
6267 x_init, y_init)),
6268 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
6269 x_init, y_init));
6270 /* "!(x > y)" -> "x <= y". */
6271 ASSERT_EQ (mgr.get_or_create_unaryop
6272 (boolean_type_node, TRUTH_NOT_EXPR,
6273 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
6274 x_init, y_init)),
6275 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
6276 x_init, y_init));
6277}
6278
6279/* Verify that binops on constant svalues are folded. */
757bf1df 6280
808f4dfe
DM
6281static void
6282test_binop_svalue_folding ()
6283{
6284#define NUM_CSTS 10
6285 tree cst_int[NUM_CSTS];
6286 region_model_manager mgr;
6287 const svalue *cst_sval[NUM_CSTS];
6288 for (int i = 0; i < NUM_CSTS; i++)
6289 {
6290 cst_int[i] = build_int_cst (integer_type_node, i);
6291 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
6292 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
6293 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
6294 }
757bf1df 6295
808f4dfe
DM
6296 for (int i = 0; i < NUM_CSTS; i++)
6297 for (int j = 0; j < NUM_CSTS; j++)
6298 {
6299 if (i != j)
6300 ASSERT_NE (cst_sval[i], cst_sval[j]);
6301 if (i + j < NUM_CSTS)
6302 {
6303 const svalue *sum
6304 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6305 cst_sval[i], cst_sval[j]);
6306 ASSERT_EQ (sum, cst_sval[i + j]);
6307 }
6308 if (i - j >= 0)
6309 {
6310 const svalue *difference
6311 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
6312 cst_sval[i], cst_sval[j]);
6313 ASSERT_EQ (difference, cst_sval[i - j]);
6314 }
6315 if (i * j < NUM_CSTS)
6316 {
6317 const svalue *product
6318 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6319 cst_sval[i], cst_sval[j]);
6320 ASSERT_EQ (product, cst_sval[i * j]);
6321 }
6322 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
6323 cst_sval[i], cst_sval[j]);
6324 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
6325 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
6326 cst_sval[i], cst_sval[j]);
6327 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
6328 // etc
6329 }
757bf1df 6330
808f4dfe 6331 tree x = build_global_decl ("x", integer_type_node);
757bf1df 6332
808f4dfe
DM
6333 test_region_model_context ctxt;
6334 region_model model (&mgr);
6335 const svalue *x_init = model.get_rvalue (x, &ctxt);
6336
6337 /* PLUS_EXPR folding. */
6338 const svalue *x_init_plus_zero
6339 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6340 x_init, cst_sval[0]);
6341 ASSERT_EQ (x_init_plus_zero, x_init);
6342 const svalue *zero_plus_x_init
6343 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6344 cst_sval[0], x_init);
6345 ASSERT_EQ (zero_plus_x_init, x_init);
6346
6347 /* MULT_EXPR folding. */
6348 const svalue *x_init_times_zero
6349 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6350 x_init, cst_sval[0]);
6351 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
6352 const svalue *zero_times_x_init
6353 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6354 cst_sval[0], x_init);
6355 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
6356
6357 const svalue *x_init_times_one
6358 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6359 x_init, cst_sval[1]);
6360 ASSERT_EQ (x_init_times_one, x_init);
6361 const svalue *one_times_x_init
6362 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6363 cst_sval[1], x_init);
6364 ASSERT_EQ (one_times_x_init, x_init);
6365
6366 // etc
6367 // TODO: do we want to use the match-and-simplify DSL for this?
6368
6369 /* Verify that binops put any constants on the RHS. */
6370 const svalue *four_times_x_init
6371 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6372 cst_sval[4], x_init);
6373 const svalue *x_init_times_four
6374 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6375 x_init, cst_sval[4]);
6376 ASSERT_EQ (four_times_x_init, x_init_times_four);
6377 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
6378 ASSERT_EQ (binop->get_op (), MULT_EXPR);
6379 ASSERT_EQ (binop->get_arg0 (), x_init);
6380 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
6381
6382 /* Verify that ((x + 1) + 1) == (x + 2). */
6383 const svalue *x_init_plus_one
6384 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6385 x_init, cst_sval[1]);
6386 const svalue *x_init_plus_two
6387 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6388 x_init, cst_sval[2]);
6389 const svalue *x_init_plus_one_plus_one
6390 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6391 x_init_plus_one, cst_sval[1]);
6392 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
4f34f8cc
DM
6393
6394 /* Verify various binops on booleans. */
6395 {
6396 const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1);
6397 const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0);
6398 const svalue *sval_unknown
6399 = mgr.get_or_create_unknown_svalue (boolean_type_node);
6400 const placeholder_svalue sval_placeholder (boolean_type_node, "v");
6401 for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR})
6402 {
6403 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6404 sval_true, sval_unknown),
6405 sval_true);
6406 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6407 sval_false, sval_unknown),
6408 sval_unknown);
6409 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6410 sval_false, &sval_placeholder),
6411 &sval_placeholder);
6412 }
6413 for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR})
6414 {
6415 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6416 sval_false, sval_unknown),
6417 sval_false);
6418 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6419 sval_true, sval_unknown),
6420 sval_unknown);
6421 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6422 sval_true, &sval_placeholder),
6423 &sval_placeholder);
6424 }
6425 }
808f4dfe
DM
6426}
6427
6428/* Verify that sub_svalues are folded as expected. */
757bf1df 6429
808f4dfe
DM
6430static void
6431test_sub_svalue_folding ()
6432{
6433 coord_test ct;
6434 tree c = build_global_decl ("c", ct.m_coord_type);
6435 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6436 c, ct.m_x_field, NULL_TREE);
757bf1df 6437
808f4dfe
DM
6438 region_model_manager mgr;
6439 region_model model (&mgr);
6440 test_region_model_context ctxt;
6441 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
757bf1df 6442
808f4dfe
DM
6443 /* Verify that sub_svalue of "unknown" simply
6444 yields an unknown. */
757bf1df 6445
808f4dfe
DM
6446 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
6447 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
6448 unknown, c_x_reg);
6449 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
6450 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
757bf1df
DM
6451}
6452
f09b9955
DM
6453/* Get BIT within VAL as a symbolic value within MGR. */
6454
6455static const svalue *
6456get_bit (region_model_manager *mgr,
6457 bit_offset_t bit,
6458 unsigned HOST_WIDE_INT val)
6459{
6460 const svalue *inner_svalue
6461 = mgr->get_or_create_int_cst (unsigned_type_node, val);
6462 return mgr->get_or_create_bits_within (boolean_type_node,
6463 bit_range (bit, 1),
6464 inner_svalue);
6465}
6466
6467/* Verify that bits_within_svalues are folded as expected. */
6468
6469static void
6470test_bits_within_svalue_folding ()
6471{
6472 region_model_manager mgr;
6473
6474 const svalue *zero = mgr.get_or_create_int_cst (boolean_type_node, 0);
6475 const svalue *one = mgr.get_or_create_int_cst (boolean_type_node, 1);
6476
6477 {
6478 const unsigned val = 0x0000;
6479 for (unsigned bit = 0; bit < 16; bit++)
6480 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
6481 }
6482
6483 {
6484 const unsigned val = 0x0001;
6485 ASSERT_EQ (get_bit (&mgr, 0, val), one);
6486 for (unsigned bit = 1; bit < 16; bit++)
6487 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
6488 }
6489
6490 {
6491 const unsigned val = 0x8000;
6492 for (unsigned bit = 0; bit < 15; bit++)
6493 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
6494 ASSERT_EQ (get_bit (&mgr, 15, val), one);
6495 }
6496
6497 {
6498 const unsigned val = 0xFFFF;
6499 for (unsigned bit = 0; bit < 16; bit++)
6500 ASSERT_EQ (get_bit (&mgr, bit, val), one);
6501 }
6502}
6503
808f4dfe 6504/* Test that region::descendent_of_p works as expected. */
757bf1df
DM
6505
6506static void
808f4dfe 6507test_descendent_of_p ()
757bf1df 6508{
808f4dfe
DM
6509 region_model_manager mgr;
6510 const region *stack = mgr.get_stack_region ();
6511 const region *heap = mgr.get_heap_region ();
6512 const region *code = mgr.get_code_region ();
6513 const region *globals = mgr.get_globals_region ();
757bf1df 6514
808f4dfe
DM
6515 /* descendent_of_p should return true when used on the region itself. */
6516 ASSERT_TRUE (stack->descendent_of_p (stack));
6517 ASSERT_FALSE (stack->descendent_of_p (heap));
6518 ASSERT_FALSE (stack->descendent_of_p (code));
6519 ASSERT_FALSE (stack->descendent_of_p (globals));
757bf1df 6520
808f4dfe
DM
6521 tree x = build_global_decl ("x", integer_type_node);
6522 const region *x_reg = mgr.get_region_for_global (x);
6523 ASSERT_TRUE (x_reg->descendent_of_p (globals));
757bf1df 6524
808f4dfe
DM
6525 /* A cast_region should be a descendent of the original region. */
6526 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
6527 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
757bf1df
DM
6528}
6529
391512ad
DM
6530/* Verify that bit_range_region works as expected. */
6531
6532static void
6533test_bit_range_regions ()
6534{
6535 tree x = build_global_decl ("x", integer_type_node);
6536 region_model_manager mgr;
6537 const region *x_reg = mgr.get_region_for_global (x);
6538 const region *byte0
6539 = mgr.get_bit_range (x_reg, char_type_node, bit_range (0, 8));
6540 const region *byte1
6541 = mgr.get_bit_range (x_reg, char_type_node, bit_range (8, 8));
6542 ASSERT_TRUE (byte0->descendent_of_p (x_reg));
6543 ASSERT_TRUE (byte1->descendent_of_p (x_reg));
6544 ASSERT_NE (byte0, byte1);
6545}
6546
757bf1df
DM
6547/* Verify that simple assignments work as expected. */
6548
6549static void
6550test_assignment ()
6551{
6552 tree int_0 = build_int_cst (integer_type_node, 0);
6553 tree x = build_global_decl ("x", integer_type_node);
6554 tree y = build_global_decl ("y", integer_type_node);
6555
6556 /* "x == 0", then use of y, then "y = 0;". */
808f4dfe
DM
6557 region_model_manager mgr;
6558 region_model model (&mgr);
757bf1df
DM
6559 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
6560 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
6561 model.set_value (model.get_lvalue (y, NULL),
6562 model.get_rvalue (int_0, NULL),
6563 NULL);
6564 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
6565 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
757bf1df
DM
6566}
6567
a96f1c38
DM
6568/* Verify that compound assignments work as expected. */
6569
6570static void
6571test_compound_assignment ()
6572{
6573 coord_test ct;
6574
6575 tree c = build_global_decl ("c", ct.m_coord_type);
6576 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6577 c, ct.m_x_field, NULL_TREE);
6578 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6579 c, ct.m_y_field, NULL_TREE);
6580 tree d = build_global_decl ("d", ct.m_coord_type);
6581 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6582 d, ct.m_x_field, NULL_TREE);
6583 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6584 d, ct.m_y_field, NULL_TREE);
6585
6586 tree int_17 = build_int_cst (integer_type_node, 17);
6587 tree int_m3 = build_int_cst (integer_type_node, -3);
6588
808f4dfe
DM
6589 region_model_manager mgr;
6590 region_model model (&mgr);
a96f1c38
DM
6591 model.set_value (c_x, int_17, NULL);
6592 model.set_value (c_y, int_m3, NULL);
6593
a96f1c38 6594 /* Copy c to d. */
13ad6d9f
DM
6595 const svalue *sval = model.get_rvalue (c, NULL);
6596 model.set_value (model.get_lvalue (d, NULL), sval, NULL);
6597
a96f1c38
DM
6598 /* Check that the fields have the same svalues. */
6599 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
6600 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
6601}
6602
757bf1df
DM
6603/* Verify the details of pushing and popping stack frames. */
6604
6605static void
6606test_stack_frames ()
6607{
6608 tree int_42 = build_int_cst (integer_type_node, 42);
6609 tree int_10 = build_int_cst (integer_type_node, 10);
6610 tree int_5 = build_int_cst (integer_type_node, 5);
6611 tree int_0 = build_int_cst (integer_type_node, 0);
6612
6613 auto_vec <tree> param_types;
6614 tree parent_fndecl = make_fndecl (integer_type_node,
6615 "parent_fn",
6616 param_types);
6617 allocate_struct_function (parent_fndecl, true);
6618
6619 tree child_fndecl = make_fndecl (integer_type_node,
6620 "child_fn",
6621 param_types);
6622 allocate_struct_function (child_fndecl, true);
6623
6624 /* "a" and "b" in the parent frame. */
6625 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6626 get_identifier ("a"),
6627 integer_type_node);
4cebae09 6628 DECL_CONTEXT (a) = parent_fndecl;
757bf1df
DM
6629 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6630 get_identifier ("b"),
6631 integer_type_node);
4cebae09 6632 DECL_CONTEXT (b) = parent_fndecl;
757bf1df
DM
6633 /* "x" and "y" in a child frame. */
6634 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6635 get_identifier ("x"),
6636 integer_type_node);
4cebae09 6637 DECL_CONTEXT (x) = child_fndecl;
757bf1df
DM
6638 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6639 get_identifier ("y"),
6640 integer_type_node);
4cebae09 6641 DECL_CONTEXT (y) = child_fndecl;
757bf1df
DM
6642
6643 /* "p" global. */
6644 tree p = build_global_decl ("p", ptr_type_node);
6645
6646 /* "q" global. */
6647 tree q = build_global_decl ("q", ptr_type_node);
6648
808f4dfe 6649 region_model_manager mgr;
757bf1df 6650 test_region_model_context ctxt;
808f4dfe 6651 region_model model (&mgr);
757bf1df
DM
6652
6653 /* Push stack frame for "parent_fn". */
808f4dfe
DM
6654 const region *parent_frame_reg
6655 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl),
6656 NULL, &ctxt);
6657 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
6658 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
6659 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
6660 model.set_value (a_in_parent_reg,
6661 model.get_rvalue (int_42, &ctxt),
6662 &ctxt);
6663 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
6664
757bf1df
DM
6665 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
6666 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
6667 tristate (tristate::TS_TRUE));
6668
6669 /* Push stack frame for "child_fn". */
808f4dfe 6670 const region *child_frame_reg
757bf1df 6671 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
808f4dfe
DM
6672 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
6673 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
6674 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
6675 model.set_value (x_in_child_reg,
6676 model.get_rvalue (int_0, &ctxt),
6677 &ctxt);
6678 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
6679
757bf1df
DM
6680 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
6681 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
6682 tristate (tristate::TS_TRUE));
6683
6684 /* Point a global pointer at a local in the child frame: p = &x. */
808f4dfe
DM
6685 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
6686 model.set_value (p_in_globals_reg,
6687 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
757bf1df 6688 &ctxt);
808f4dfe 6689 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
757bf1df
DM
6690
6691 /* Point another global pointer at p: q = &p. */
808f4dfe
DM
6692 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
6693 model.set_value (q_in_globals_reg,
6694 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
757bf1df
DM
6695 &ctxt);
6696
808f4dfe
DM
6697 /* Test region::descendent_of_p. */
6698 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
6699 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
6700 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
757bf1df
DM
6701
6702 /* Pop the "child_fn" frame from the stack. */
808f4dfe
DM
6703 model.pop_frame (NULL, NULL, &ctxt);
6704 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
6705 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
757bf1df
DM
6706
6707 /* Verify that p (which was pointing at the local "x" in the popped
6708 frame) has been poisoned. */
33255ad3 6709 const svalue *new_p_sval = model.get_rvalue (p, NULL);
757bf1df
DM
6710 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
6711 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
6712 POISON_KIND_POPPED_STACK);
6713
6714 /* Verify that q still points to p, in spite of the region
6715 renumbering. */
808f4dfe 6716 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
757bf1df 6717 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
5932dd35 6718 ASSERT_EQ (new_q_sval->maybe_get_region (),
757bf1df
DM
6719 model.get_lvalue (p, &ctxt));
6720
6721 /* Verify that top of stack has been updated. */
808f4dfe 6722 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
757bf1df
DM
6723
6724 /* Verify locals in parent frame. */
6725 /* Verify "a" still has its value. */
808f4dfe 6726 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
757bf1df
DM
6727 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
6728 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
6729 int_42);
6730 /* Verify "b" still has its constraint. */
6731 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
6732 tristate (tristate::TS_TRUE));
6733}
6734
6735/* Verify that get_representative_path_var works as expected, that
808f4dfe 6736 we can map from regions to parms and back within a recursive call
757bf1df
DM
6737 stack. */
6738
6739static void
6740test_get_representative_path_var ()
6741{
6742 auto_vec <tree> param_types;
6743 tree fndecl = make_fndecl (integer_type_node,
6744 "factorial",
6745 param_types);
6746 allocate_struct_function (fndecl, true);
6747
6748 /* Parm "n". */
6749 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6750 get_identifier ("n"),
6751 integer_type_node);
4cebae09 6752 DECL_CONTEXT (n) = fndecl;
757bf1df 6753
808f4dfe
DM
6754 region_model_manager mgr;
6755 test_region_model_context ctxt;
6756 region_model model (&mgr);
757bf1df
DM
6757
6758 /* Push 5 stack frames for "factorial", each with a param */
808f4dfe
DM
6759 auto_vec<const region *> parm_regs;
6760 auto_vec<const svalue *> parm_svals;
757bf1df
DM
6761 for (int depth = 0; depth < 5; depth++)
6762 {
808f4dfe
DM
6763 const region *frame_n_reg
6764 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
6765 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
6766 parm_regs.safe_push (parm_n_reg);
757bf1df 6767
808f4dfe
DM
6768 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
6769 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
6770 parm_svals.safe_push (sval_n);
757bf1df
DM
6771 }
6772
6773 /* Verify that we can recognize that the regions are the parms,
6774 at every depth. */
6775 for (int depth = 0; depth < 5; depth++)
6776 {
808f4dfe
DM
6777 {
6778 svalue_set visited;
6779 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
6780 &visited),
6781 path_var (n, depth + 1));
6782 }
757bf1df
DM
6783 /* ...and that we can lookup lvalues for locals for all frames,
6784 not just the top. */
6785 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
808f4dfe 6786 parm_regs[depth]);
757bf1df 6787 /* ...and that we can locate the svalues. */
808f4dfe
DM
6788 {
6789 svalue_set visited;
6790 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
6791 &visited),
6792 path_var (n, depth + 1));
6793 }
757bf1df
DM
6794 }
6795}
6796
808f4dfe 6797/* Ensure that region_model::operator== works as expected. */
757bf1df
DM
6798
6799static void
808f4dfe 6800test_equality_1 ()
757bf1df 6801{
808f4dfe
DM
6802 tree int_42 = build_int_cst (integer_type_node, 42);
6803 tree int_17 = build_int_cst (integer_type_node, 17);
757bf1df 6804
808f4dfe
DM
6805/* Verify that "empty" region_model instances are equal to each other. */
6806 region_model_manager mgr;
6807 region_model model0 (&mgr);
6808 region_model model1 (&mgr);
757bf1df 6809 ASSERT_EQ (model0, model1);
808f4dfe
DM
6810
6811 /* Verify that setting state in model1 makes the models non-equal. */
6812 tree x = build_global_decl ("x", integer_type_node);
6813 model0.set_value (x, int_42, NULL);
6814 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
6815 ASSERT_NE (model0, model1);
6816
6817 /* Verify the copy-ctor. */
6818 region_model model2 (model0);
6819 ASSERT_EQ (model0, model2);
6820 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
6821 ASSERT_NE (model1, model2);
6822
6823 /* Verify that models obtained from copy-ctor are independently editable
6824 w/o affecting the original model. */
6825 model2.set_value (x, int_17, NULL);
6826 ASSERT_NE (model0, model2);
6827 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
6828 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
757bf1df
DM
6829}
6830
6831/* Verify that region models for
6832 x = 42; y = 113;
6833 and
6834 y = 113; x = 42;
808f4dfe 6835 are equal. */
757bf1df
DM
6836
6837static void
6838test_canonicalization_2 ()
6839{
6840 tree int_42 = build_int_cst (integer_type_node, 42);
6841 tree int_113 = build_int_cst (integer_type_node, 113);
6842 tree x = build_global_decl ("x", integer_type_node);
6843 tree y = build_global_decl ("y", integer_type_node);
6844
808f4dfe
DM
6845 region_model_manager mgr;
6846 region_model model0 (&mgr);
757bf1df
DM
6847 model0.set_value (model0.get_lvalue (x, NULL),
6848 model0.get_rvalue (int_42, NULL),
6849 NULL);
6850 model0.set_value (model0.get_lvalue (y, NULL),
6851 model0.get_rvalue (int_113, NULL),
6852 NULL);
6853
808f4dfe 6854 region_model model1 (&mgr);
757bf1df
DM
6855 model1.set_value (model1.get_lvalue (y, NULL),
6856 model1.get_rvalue (int_113, NULL),
6857 NULL);
6858 model1.set_value (model1.get_lvalue (x, NULL),
6859 model1.get_rvalue (int_42, NULL),
6860 NULL);
6861
757bf1df
DM
6862 ASSERT_EQ (model0, model1);
6863}
6864
6865/* Verify that constraints for
6866 x > 3 && y > 42
6867 and
6868 y > 42 && x > 3
6869 are equal after canonicalization. */
6870
6871static void
6872test_canonicalization_3 ()
6873{
6874 tree int_3 = build_int_cst (integer_type_node, 3);
6875 tree int_42 = build_int_cst (integer_type_node, 42);
6876 tree x = build_global_decl ("x", integer_type_node);
6877 tree y = build_global_decl ("y", integer_type_node);
6878
808f4dfe
DM
6879 region_model_manager mgr;
6880 region_model model0 (&mgr);
757bf1df
DM
6881 model0.add_constraint (x, GT_EXPR, int_3, NULL);
6882 model0.add_constraint (y, GT_EXPR, int_42, NULL);
6883
808f4dfe 6884 region_model model1 (&mgr);
757bf1df
DM
6885 model1.add_constraint (y, GT_EXPR, int_42, NULL);
6886 model1.add_constraint (x, GT_EXPR, int_3, NULL);
6887
808f4dfe
DM
6888 model0.canonicalize ();
6889 model1.canonicalize ();
757bf1df
DM
6890 ASSERT_EQ (model0, model1);
6891}
6892
8c08c983
DM
6893/* Verify that we can canonicalize a model containing NaN and other real
6894 constants. */
6895
6896static void
6897test_canonicalization_4 ()
6898{
6899 auto_vec<tree> csts;
6900 append_interesting_constants (&csts);
6901
808f4dfe
DM
6902 region_model_manager mgr;
6903 region_model model (&mgr);
8c08c983 6904
3f207ab3 6905 for (tree cst : csts)
8c08c983
DM
6906 model.get_rvalue (cst, NULL);
6907
808f4dfe 6908 model.canonicalize ();
8c08c983
DM
6909}
6910
757bf1df
DM
6911/* Assert that if we have two region_model instances
6912 with values VAL_A and VAL_B for EXPR that they are
6913 mergable. Write the merged model to *OUT_MERGED_MODEL,
6914 and the merged svalue ptr to *OUT_MERGED_SVALUE.
6915 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
6916 for that region_model. */
6917
6918static void
6919assert_region_models_merge (tree expr, tree val_a, tree val_b,
808f4dfe
DM
6920 region_model *out_merged_model,
6921 const svalue **out_merged_svalue)
757bf1df 6922{
808f4dfe 6923 region_model_manager *mgr = out_merged_model->get_manager ();
bb8e93eb
DM
6924 program_point point (program_point::origin (*mgr));
6925 test_region_model_context ctxt;
808f4dfe
DM
6926 region_model model0 (mgr);
6927 region_model model1 (mgr);
757bf1df
DM
6928 if (val_a)
6929 model0.set_value (model0.get_lvalue (expr, &ctxt),
6930 model0.get_rvalue (val_a, &ctxt),
6931 &ctxt);
6932 if (val_b)
6933 model1.set_value (model1.get_lvalue (expr, &ctxt),
6934 model1.get_rvalue (val_b, &ctxt),
6935 &ctxt);
6936
6937 /* They should be mergeable. */
808f4dfe
DM
6938 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
6939 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
757bf1df
DM
6940}
6941
6942/* Verify that we can merge region_model instances. */
6943
6944static void
6945test_state_merging ()
6946{
6947 tree int_42 = build_int_cst (integer_type_node, 42);
6948 tree int_113 = build_int_cst (integer_type_node, 113);
6949 tree x = build_global_decl ("x", integer_type_node);
6950 tree y = build_global_decl ("y", integer_type_node);
6951 tree z = build_global_decl ("z", integer_type_node);
6952 tree p = build_global_decl ("p", ptr_type_node);
6953
6954 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
6955 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
6956
6957 auto_vec <tree> param_types;
6958 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
6959 allocate_struct_function (test_fndecl, true);
6960
6961 /* Param "a". */
6962 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6963 get_identifier ("a"),
6964 integer_type_node);
4cebae09 6965 DECL_CONTEXT (a) = test_fndecl;
757bf1df
DM
6966 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
6967
455f58ec
DM
6968 /* Param "q", a pointer. */
6969 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6970 get_identifier ("q"),
6971 ptr_type_node);
4cebae09 6972 DECL_CONTEXT (q) = test_fndecl;
455f58ec 6973
808f4dfe 6974 region_model_manager mgr;
bb8e93eb 6975 program_point point (program_point::origin (mgr));
808f4dfe 6976
757bf1df 6977 {
808f4dfe
DM
6978 region_model model0 (&mgr);
6979 region_model model1 (&mgr);
6980 region_model merged (&mgr);
757bf1df 6981 /* Verify empty models can be merged. */
808f4dfe 6982 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
6983 ASSERT_EQ (model0, merged);
6984 }
6985
6986 /* Verify that we can merge two contradictory constraints on the
6987 value for a global. */
6988 /* TODO: verify that the merged model doesn't have a value for
6989 the global */
6990 {
808f4dfe
DM
6991 region_model model0 (&mgr);
6992 region_model model1 (&mgr);
6993 region_model merged (&mgr);
757bf1df
DM
6994 test_region_model_context ctxt;
6995 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
6996 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe 6997 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
6998 ASSERT_NE (model0, merged);
6999 ASSERT_NE (model1, merged);
7000 }
7001
7002 /* Verify handling of a PARM_DECL. */
7003 {
7004 test_region_model_context ctxt;
808f4dfe
DM
7005 region_model model0 (&mgr);
7006 region_model model1 (&mgr);
757bf1df
DM
7007 ASSERT_EQ (model0.get_stack_depth (), 0);
7008 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7009 ASSERT_EQ (model0.get_stack_depth (), 1);
757bf1df
DM
7010 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7011
808f4dfe
DM
7012 placeholder_svalue test_sval (integer_type_node, "test sval");
7013 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
7014 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
757bf1df
DM
7015 ASSERT_EQ (model0, model1);
7016
757bf1df 7017 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7018 region_model merged (&mgr);
7019 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7020 ASSERT_EQ (model0, merged);
808f4dfe
DM
7021 /* In particular, "a" should have the placeholder value. */
7022 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
757bf1df
DM
7023 }
7024
7025 /* Verify handling of a global. */
7026 {
7027 test_region_model_context ctxt;
808f4dfe
DM
7028 region_model model0 (&mgr);
7029 region_model model1 (&mgr);
757bf1df 7030
808f4dfe
DM
7031 placeholder_svalue test_sval (integer_type_node, "test sval");
7032 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
7033 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
7034 ASSERT_EQ (model0, model1);
757bf1df
DM
7035
7036 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7037 region_model merged (&mgr);
7038 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7039 ASSERT_EQ (model0, merged);
808f4dfe
DM
7040 /* In particular, "x" should have the placeholder value. */
7041 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
757bf1df
DM
7042 }
7043
7044 /* Use global-handling to verify various combinations of values. */
7045
7046 /* Two equal constant values. */
7047 {
808f4dfe
DM
7048 region_model merged (&mgr);
7049 const svalue *merged_x_sval;
757bf1df
DM
7050 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
7051
7052 /* In particular, there should be a constant value for "x". */
7053 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
7054 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
7055 int_42);
7056 }
7057
7058 /* Two non-equal constant values. */
7059 {
808f4dfe
DM
7060 region_model merged (&mgr);
7061 const svalue *merged_x_sval;
757bf1df
DM
7062 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
7063
808f4dfe
DM
7064 /* In particular, there should be a "widening" value for "x". */
7065 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
757bf1df
DM
7066 }
7067
808f4dfe 7068 /* Initial and constant. */
757bf1df 7069 {
808f4dfe
DM
7070 region_model merged (&mgr);
7071 const svalue *merged_x_sval;
757bf1df
DM
7072 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
7073
7074 /* In particular, there should be an unknown value for "x". */
7075 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7076 }
7077
808f4dfe 7078 /* Constant and initial. */
757bf1df 7079 {
808f4dfe
DM
7080 region_model merged (&mgr);
7081 const svalue *merged_x_sval;
757bf1df
DM
7082 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
7083
7084 /* In particular, there should be an unknown value for "x". */
7085 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7086 }
7087
7088 /* Unknown and constant. */
7089 // TODO
7090
7091 /* Pointers: NULL and NULL. */
7092 // TODO
7093
7094 /* Pointers: NULL and non-NULL. */
7095 // TODO
7096
7097 /* Pointers: non-NULL and non-NULL: ptr to a local. */
7098 {
808f4dfe 7099 region_model model0 (&mgr);
757bf1df 7100 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
757bf1df
DM
7101 model0.set_value (model0.get_lvalue (p, NULL),
7102 model0.get_rvalue (addr_of_a, NULL), NULL);
7103
7104 region_model model1 (model0);
7105 ASSERT_EQ (model0, model1);
7106
7107 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7108 region_model merged (&mgr);
7109 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7110 ASSERT_EQ (model0, merged);
7111 }
7112
7113 /* Pointers: non-NULL and non-NULL: ptr to a global. */
7114 {
808f4dfe 7115 region_model merged (&mgr);
757bf1df 7116 /* p == &y in both input models. */
808f4dfe 7117 const svalue *merged_p_sval;
757bf1df
DM
7118 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
7119 &merged_p_sval);
7120
7121 /* We should get p == &y in the merged model. */
7122 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
808f4dfe
DM
7123 const region_svalue *merged_p_ptr
7124 = merged_p_sval->dyn_cast_region_svalue ();
7125 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
7126 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
757bf1df
DM
7127 }
7128
7129 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
7130 {
808f4dfe
DM
7131 region_model merged (&mgr);
7132 /* x == &y vs x == &z in the input models; these are actually casts
7133 of the ptrs to "int". */
7134 const svalue *merged_x_sval;
7135 // TODO:
757bf1df
DM
7136 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
7137 &merged_x_sval);
7138
7139 /* We should get x == unknown in the merged model. */
7140 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7141 }
7142
7143 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
7144 {
7145 test_region_model_context ctxt;
808f4dfe 7146 region_model model0 (&mgr);
9a2c9579 7147 tree size = build_int_cst (size_type_node, 1024);
808f4dfe 7148 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
b9365b93 7149 const region *new_reg
ce917b04 7150 = model0.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe 7151 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
757bf1df 7152 model0.set_value (model0.get_lvalue (p, &ctxt),
808f4dfe 7153 ptr_sval, &ctxt);
757bf1df
DM
7154
7155 region_model model1 (model0);
7156
7157 ASSERT_EQ (model0, model1);
7158
808f4dfe
DM
7159 region_model merged (&mgr);
7160 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7161
808f4dfe 7162 /* The merged model ought to be identical. */
757bf1df
DM
7163 ASSERT_EQ (model0, merged);
7164 }
7165
808f4dfe
DM
7166 /* Two regions sharing the same placeholder svalue should continue sharing
7167 it after self-merger. */
757bf1df
DM
7168 {
7169 test_region_model_context ctxt;
808f4dfe
DM
7170 region_model model0 (&mgr);
7171 placeholder_svalue placeholder_sval (integer_type_node, "test");
7172 model0.set_value (model0.get_lvalue (x, &ctxt),
7173 &placeholder_sval, &ctxt);
7174 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
757bf1df
DM
7175 region_model model1 (model0);
7176
7177 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7178 region_model merged (&mgr);
7179 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7180 ASSERT_EQ (model0, merged);
7181
7182 /* In particular, we should have x == y. */
7183 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
7184 tristate (tristate::TS_TRUE));
7185 }
7186
757bf1df 7187 {
808f4dfe
DM
7188 region_model model0 (&mgr);
7189 region_model model1 (&mgr);
757bf1df
DM
7190 test_region_model_context ctxt;
7191 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7192 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
808f4dfe
DM
7193 region_model merged (&mgr);
7194 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7195 }
7196
7197 {
808f4dfe
DM
7198 region_model model0 (&mgr);
7199 region_model model1 (&mgr);
757bf1df
DM
7200 test_region_model_context ctxt;
7201 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7202 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
7203 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe
DM
7204 region_model merged (&mgr);
7205 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7206 }
757bf1df
DM
7207
7208 // TODO: what can't we merge? need at least one such test
7209
7210 /* TODO: various things
7211 - heap regions
7212 - value merging:
7213 - every combination, but in particular
808f4dfe 7214 - pairs of regions
757bf1df
DM
7215 */
7216
7217 /* Views. */
7218 {
7219 test_region_model_context ctxt;
808f4dfe 7220 region_model model0 (&mgr);
757bf1df 7221
808f4dfe
DM
7222 const region *x_reg = model0.get_lvalue (x, &ctxt);
7223 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
757bf1df
DM
7224 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
7225
7226 region_model model1 (model0);
7227 ASSERT_EQ (model1, model0);
7228
7229 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7230 region_model merged (&mgr);
7231 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7232 }
455f58ec
DM
7233
7234 /* Verify that we can merge a model in which a local in an older stack
7235 frame points to a local in a more recent stack frame. */
7236 {
808f4dfe 7237 region_model model0 (&mgr);
455f58ec 7238 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
808f4dfe 7239 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
455f58ec
DM
7240
7241 /* Push a second frame. */
808f4dfe 7242 const region *reg_2nd_frame
455f58ec
DM
7243 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7244
7245 /* Have a pointer in the older frame point to a local in the
7246 more recent frame. */
808f4dfe
DM
7247 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
7248 model0.set_value (q_in_first_frame, sval_ptr, NULL);
455f58ec
DM
7249
7250 /* Verify that it's pointing at the newer frame. */
5932dd35 7251 const region *reg_pointee = sval_ptr->maybe_get_region ();
808f4dfe 7252 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
455f58ec 7253
808f4dfe 7254 model0.canonicalize ();
455f58ec
DM
7255
7256 region_model model1 (model0);
7257 ASSERT_EQ (model0, model1);
7258
7259 /* They should be mergeable, and the result should be the same
7260 (after canonicalization, at least). */
808f4dfe
DM
7261 region_model merged (&mgr);
7262 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7263 merged.canonicalize ();
455f58ec
DM
7264 ASSERT_EQ (model0, merged);
7265 }
7266
7267 /* Verify that we can merge a model in which a local points to a global. */
7268 {
808f4dfe 7269 region_model model0 (&mgr);
455f58ec
DM
7270 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7271 model0.set_value (model0.get_lvalue (q, NULL),
7272 model0.get_rvalue (addr_of_y, NULL), NULL);
7273
455f58ec
DM
7274 region_model model1 (model0);
7275 ASSERT_EQ (model0, model1);
7276
7277 /* They should be mergeable, and the result should be the same
7278 (after canonicalization, at least). */
808f4dfe
DM
7279 region_model merged (&mgr);
7280 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
455f58ec
DM
7281 ASSERT_EQ (model0, merged);
7282 }
757bf1df
DM
7283}
7284
7285/* Verify that constraints are correctly merged when merging region_model
7286 instances. */
7287
7288static void
7289test_constraint_merging ()
7290{
7291 tree int_0 = build_int_cst (integer_type_node, 0);
7292 tree int_5 = build_int_cst (integer_type_node, 5);
7293 tree x = build_global_decl ("x", integer_type_node);
7294 tree y = build_global_decl ("y", integer_type_node);
7295 tree z = build_global_decl ("z", integer_type_node);
7296 tree n = build_global_decl ("n", integer_type_node);
7297
808f4dfe 7298 region_model_manager mgr;
757bf1df
DM
7299 test_region_model_context ctxt;
7300
7301 /* model0: 0 <= (x == y) < n. */
808f4dfe 7302 region_model model0 (&mgr);
757bf1df
DM
7303 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
7304 model0.add_constraint (x, GE_EXPR, int_0, NULL);
7305 model0.add_constraint (x, LT_EXPR, n, NULL);
7306
7307 /* model1: z != 5 && (0 <= x < n). */
808f4dfe 7308 region_model model1 (&mgr);
757bf1df
DM
7309 model1.add_constraint (z, NE_EXPR, int_5, NULL);
7310 model1.add_constraint (x, GE_EXPR, int_0, NULL);
7311 model1.add_constraint (x, LT_EXPR, n, NULL);
7312
7313 /* They should be mergeable; the merged constraints should
7314 be: (0 <= x < n). */
bb8e93eb 7315 program_point point (program_point::origin (mgr));
808f4dfe
DM
7316 region_model merged (&mgr);
7317 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7318
7319 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
7320 tristate (tristate::TS_TRUE));
7321 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
7322 tristate (tristate::TS_TRUE));
7323
7324 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
7325 tristate (tristate::TS_UNKNOWN));
7326 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
7327 tristate (tristate::TS_UNKNOWN));
7328}
7329
808f4dfe
DM
7330/* Verify that widening_svalue::eval_condition_without_cm works as
7331 expected. */
7332
7333static void
7334test_widening_constraints ()
7335{
bb8e93eb 7336 region_model_manager mgr;
e6fe02d8 7337 function_point point (program_point::origin (mgr).get_function_point ());
808f4dfe
DM
7338 tree int_0 = build_int_cst (integer_type_node, 0);
7339 tree int_m1 = build_int_cst (integer_type_node, -1);
7340 tree int_1 = build_int_cst (integer_type_node, 1);
7341 tree int_256 = build_int_cst (integer_type_node, 256);
808f4dfe
DM
7342 test_region_model_context ctxt;
7343 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
7344 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
7345 const svalue *w_zero_then_one_sval
7346 = mgr.get_or_create_widening_svalue (integer_type_node, point,
7347 int_0_sval, int_1_sval);
7348 const widening_svalue *w_zero_then_one
7349 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
7350 ASSERT_EQ (w_zero_then_one->get_direction (),
7351 widening_svalue::DIR_ASCENDING);
7352 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
7353 tristate::TS_FALSE);
7354 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
7355 tristate::TS_FALSE);
7356 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
7357 tristate::TS_UNKNOWN);
7358 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
7359 tristate::TS_UNKNOWN);
7360
7361 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
7362 tristate::TS_FALSE);
7363 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
7364 tristate::TS_UNKNOWN);
7365 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
7366 tristate::TS_UNKNOWN);
7367 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
7368 tristate::TS_UNKNOWN);
7369
7370 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
7371 tristate::TS_TRUE);
7372 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
7373 tristate::TS_UNKNOWN);
7374 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
7375 tristate::TS_UNKNOWN);
7376 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
7377 tristate::TS_UNKNOWN);
7378
7379 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
7380 tristate::TS_TRUE);
7381 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
7382 tristate::TS_TRUE);
7383 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
7384 tristate::TS_UNKNOWN);
7385 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
7386 tristate::TS_UNKNOWN);
7387
7388 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
7389 tristate::TS_FALSE);
7390 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
7391 tristate::TS_UNKNOWN);
7392 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
7393 tristate::TS_UNKNOWN);
7394 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
7395 tristate::TS_UNKNOWN);
7396
7397 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
7398 tristate::TS_TRUE);
7399 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
7400 tristate::TS_UNKNOWN);
7401 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
7402 tristate::TS_UNKNOWN);
7403 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
7404 tristate::TS_UNKNOWN);
7405}
7406
7407/* Verify merging constraints for states simulating successive iterations
7408 of a loop.
7409 Simulate:
7410 for (i = 0; i < 256; i++)
7411 [...body...]
7412 i.e. this gimple:.
7413 i_15 = 0;
7414 goto <bb 4>;
7415
7416 <bb 4> :
7417 i_11 = PHI <i_15(2), i_23(3)>
7418 if (i_11 <= 255)
7419 goto <bb 3>;
7420 else
7421 goto [AFTER LOOP]
7422
7423 <bb 3> :
7424 [LOOP BODY]
7425 i_23 = i_11 + 1;
7426
7427 and thus these ops (and resultant states):
7428 i_11 = PHI()
7429 {i_11: 0}
7430 add_constraint (i_11 <= 255) [for the true edge]
7431 {i_11: 0} [constraint was a no-op]
7432 i_23 = i_11 + 1;
7433 {i_22: 1}
7434 i_11 = PHI()
7435 {i_11: WIDENED (at phi, 0, 1)}
7436 add_constraint (i_11 <= 255) [for the true edge]
7437 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
7438 i_23 = i_11 + 1;
7439 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
7440 i_11 = PHI(); merge with state at phi above
7441 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
7442 [changing meaning of "WIDENED" here]
7443 if (i_11 <= 255)
7444 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
7445 F: {i_11: 256}
7446 */
7447
7448static void
7449test_iteration_1 ()
7450{
bb8e93eb
DM
7451 region_model_manager mgr;
7452 program_point point (program_point::origin (mgr));
808f4dfe
DM
7453
7454 tree int_0 = build_int_cst (integer_type_node, 0);
7455 tree int_1 = build_int_cst (integer_type_node, 1);
7456 tree int_256 = build_int_cst (integer_type_node, 256);
7457 tree int_257 = build_int_cst (integer_type_node, 257);
7458 tree i = build_global_decl ("i", integer_type_node);
7459
808f4dfe
DM
7460 test_region_model_context ctxt;
7461
7462 /* model0: i: 0. */
7463 region_model model0 (&mgr);
7464 model0.set_value (i, int_0, &ctxt);
7465
7466 /* model1: i: 1. */
7467 region_model model1 (&mgr);
7468 model1.set_value (i, int_1, &ctxt);
7469
7470 /* Should merge "i" to a widened value. */
7471 region_model model2 (&mgr);
7472 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
7473 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
7474 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
7475 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
7476 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
7477
7478 /* Add constraint: i < 256 */
7479 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
7480 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
7481 tristate (tristate::TS_TRUE));
7482 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
7483 tristate (tristate::TS_TRUE));
7484
7485 /* Try merging with the initial state. */
7486 region_model model3 (&mgr);
7487 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
7488 /* Merging the merged value with the initial value should be idempotent,
7489 so that the analysis converges. */
7490 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
7491 /* Merger of 0 and a widening value with constraint < CST
7492 should retain the constraint, even though it was implicit
7493 for the 0 case. */
7494 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
7495 tristate (tristate::TS_TRUE));
7496 /* ...and we should have equality: the analysis should have converged. */
7497 ASSERT_EQ (model3, model2);
7498
7499 /* "i_23 = i_11 + 1;" */
7500 region_model model4 (model3);
7501 ASSERT_EQ (model4, model2);
7502 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
7503 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
7504 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
7505
7506 /* Try merging with the "i: 1" state. */
7507 region_model model5 (&mgr);
7508 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
7509 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
7510 ASSERT_EQ (model5, model4);
7511
7512 /* "i_11 = PHI();" merge with state at phi above.
7513 For i, we should have a merger of WIDENING with WIDENING + 1,
7514 and this should be WIDENING again. */
7515 region_model model6 (&mgr);
7516 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
7517 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
7518 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
7519
7520 ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257);
7521}
7522
6969ac30
DM
7523/* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
7524 all cast pointers to that region are also known to be non-NULL. */
7525
7526static void
7527test_malloc_constraints ()
7528{
808f4dfe
DM
7529 region_model_manager mgr;
7530 region_model model (&mgr);
6969ac30
DM
7531 tree p = build_global_decl ("p", ptr_type_node);
7532 tree char_star = build_pointer_type (char_type_node);
7533 tree q = build_global_decl ("q", char_star);
7534 tree null_ptr = build_int_cst (ptr_type_node, 0);
7535
808f4dfe 7536 const svalue *size_in_bytes
9a2c9579 7537 = mgr.get_or_create_unknown_svalue (size_type_node);
ce917b04
DM
7538 const region *reg
7539 = model.get_or_create_region_for_heap_alloc (size_in_bytes, NULL);
808f4dfe
DM
7540 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
7541 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
6969ac30
DM
7542 model.set_value (q, p, NULL);
7543
6969ac30
DM
7544 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
7545 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
7546 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
7547 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
7548
7549 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
7550
6969ac30
DM
7551 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
7552 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
7553 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
7554 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
7555}
7556
808f4dfe
DM
7557/* Smoketest of getting and setting the value of a variable. */
7558
7559static void
7560test_var ()
7561{
7562 /* "int i;" */
7563 tree i = build_global_decl ("i", integer_type_node);
7564
7565 tree int_17 = build_int_cst (integer_type_node, 17);
7566 tree int_m3 = build_int_cst (integer_type_node, -3);
7567
7568 region_model_manager mgr;
7569 region_model model (&mgr);
7570
7571 const region *i_reg = model.get_lvalue (i, NULL);
7572 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
7573
7574 /* Reading "i" should give a symbolic "initial value". */
7575 const svalue *sval_init = model.get_rvalue (i, NULL);
7576 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
7577 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
7578 /* ..and doing it again should give the same "initial value". */
7579 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
7580
7581 /* "i = 17;". */
7582 model.set_value (i, int_17, NULL);
7583 ASSERT_EQ (model.get_rvalue (i, NULL),
7584 model.get_rvalue (int_17, NULL));
7585
7586 /* "i = -3;". */
7587 model.set_value (i, int_m3, NULL);
7588 ASSERT_EQ (model.get_rvalue (i, NULL),
7589 model.get_rvalue (int_m3, NULL));
7590
7591 /* Verify get_offset for "i". */
7592 {
7a6564c9 7593 region_offset offset = i_reg->get_offset (&mgr);
808f4dfe
DM
7594 ASSERT_EQ (offset.get_base_region (), i_reg);
7595 ASSERT_EQ (offset.get_bit_offset (), 0);
7596 }
7597}
7598
7599static void
7600test_array_2 ()
7601{
7602 /* "int arr[10];" */
7603 tree tlen = size_int (10);
7604 tree arr_type
7605 = build_array_type (integer_type_node, build_index_type (tlen));
7606 tree arr = build_global_decl ("arr", arr_type);
7607
7608 /* "int i;" */
7609 tree i = build_global_decl ("i", integer_type_node);
7610
7611 tree int_0 = build_int_cst (integer_type_node, 0);
7612 tree int_1 = build_int_cst (integer_type_node, 1);
7613
7614 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
7615 arr, int_0, NULL_TREE, NULL_TREE);
7616 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
7617 arr, int_1, NULL_TREE, NULL_TREE);
7618 tree arr_i = build4 (ARRAY_REF, integer_type_node,
7619 arr, i, NULL_TREE, NULL_TREE);
7620
7621 tree int_17 = build_int_cst (integer_type_node, 17);
7622 tree int_42 = build_int_cst (integer_type_node, 42);
7623 tree int_m3 = build_int_cst (integer_type_node, -3);
7624
7625 region_model_manager mgr;
7626 region_model model (&mgr);
7627 /* "arr[0] = 17;". */
7628 model.set_value (arr_0, int_17, NULL);
7629 /* "arr[1] = -3;". */
7630 model.set_value (arr_1, int_m3, NULL);
7631
7632 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
7633 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
7634
7635 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
7636 model.set_value (arr_1, int_42, NULL);
7637 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
7638
7639 /* Verify get_offset for "arr[0]". */
7640 {
7641 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
7a6564c9 7642 region_offset offset = arr_0_reg->get_offset (&mgr);
808f4dfe
DM
7643 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7644 ASSERT_EQ (offset.get_bit_offset (), 0);
7645 }
7646
7647 /* Verify get_offset for "arr[1]". */
7648 {
7649 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
7a6564c9 7650 region_offset offset = arr_1_reg->get_offset (&mgr);
808f4dfe
DM
7651 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7652 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
7653 }
7654
7a6564c9
TL
7655 /* Verify get_offset for "arr[i]". */
7656 {
7657 const region *arr_i_reg = model.get_lvalue (arr_i, NULL);
7658 region_offset offset = arr_i_reg->get_offset (&mgr);
7659 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7660 ASSERT_EQ (offset.get_symbolic_byte_offset ()->get_kind (), SK_BINOP);
7661 }
7662
808f4dfe
DM
7663 /* "arr[i] = i;" - this should remove the earlier bindings. */
7664 model.set_value (arr_i, i, NULL);
7665 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
7666 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
7667
7668 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
7669 model.set_value (arr_0, int_17, NULL);
7670 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
7671 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
7672}
7673
7674/* Smoketest of dereferencing a pointer via MEM_REF. */
7675
7676static void
7677test_mem_ref ()
7678{
7679 /*
7680 x = 17;
7681 p = &x;
7682 *p;
7683 */
7684 tree x = build_global_decl ("x", integer_type_node);
7685 tree int_star = build_pointer_type (integer_type_node);
7686 tree p = build_global_decl ("p", int_star);
7687
7688 tree int_17 = build_int_cst (integer_type_node, 17);
7689 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
7690 tree offset_0 = build_int_cst (integer_type_node, 0);
7691 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
7692
7693 region_model_manager mgr;
7694 region_model model (&mgr);
7695
7696 /* "x = 17;". */
7697 model.set_value (x, int_17, NULL);
7698
7699 /* "p = &x;". */
7700 model.set_value (p, addr_of_x, NULL);
7701
7702 const svalue *sval = model.get_rvalue (star_p, NULL);
7703 ASSERT_EQ (sval->maybe_get_constant (), int_17);
7704}
7705
7706/* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
7707 Analogous to this code:
7708 void test_6 (int a[10])
7709 {
7710 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
7711 a[3] = 42;
7712 __analyzer_eval (a[3] == 42); [should be TRUE]
7713 }
7714 from data-model-1.c, which looks like this at the gimple level:
7715 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
7716 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
7717 int _2 = *_1; # MEM_REF
7718 _Bool _3 = _2 == 42;
7719 int _4 = (int) _3;
7720 __analyzer_eval (_4);
7721
7722 # a[3] = 42;
7723 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
7724 *_5 = 42; # MEM_REF
7725
7726 # __analyzer_eval (a[3] == 42); [should be TRUE]
7727 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
7728 int _7 = *_6; # MEM_REF
7729 _Bool _8 = _7 == 42;
7730 int _9 = (int) _8;
7731 __analyzer_eval (_9); */
7732
7733static void
7734test_POINTER_PLUS_EXPR_then_MEM_REF ()
7735{
7736 tree int_star = build_pointer_type (integer_type_node);
7737 tree a = build_global_decl ("a", int_star);
7738 tree offset_12 = build_int_cst (size_type_node, 12);
7739 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
7740 tree offset_0 = build_int_cst (integer_type_node, 0);
7741 tree mem_ref = build2 (MEM_REF, integer_type_node,
7742 pointer_plus_expr, offset_0);
7743 region_model_manager mgr;
7744 region_model m (&mgr);
7745
7746 tree int_42 = build_int_cst (integer_type_node, 42);
7747 m.set_value (mem_ref, int_42, NULL);
7748 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
7749}
7750
7751/* Verify that malloc works. */
7752
7753static void
7754test_malloc ()
7755{
7756 tree int_star = build_pointer_type (integer_type_node);
7757 tree p = build_global_decl ("p", int_star);
7758 tree n = build_global_decl ("n", integer_type_node);
7759 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
7760 n, build_int_cst (size_type_node, 4));
7761
7762 region_model_manager mgr;
7763 test_region_model_context ctxt;
7764 region_model model (&mgr);
7765
7766 /* "p = malloc (n * 4);". */
7767 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
ce917b04
DM
7768 const region *reg
7769 = model.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe
DM
7770 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
7771 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 7772 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
7773}
7774
7775/* Verify that alloca works. */
7776
7777static void
7778test_alloca ()
7779{
7780 auto_vec <tree> param_types;
7781 tree fndecl = make_fndecl (integer_type_node,
7782 "test_fn",
7783 param_types);
7784 allocate_struct_function (fndecl, true);
7785
7786
7787 tree int_star = build_pointer_type (integer_type_node);
7788 tree p = build_global_decl ("p", int_star);
7789 tree n = build_global_decl ("n", integer_type_node);
7790 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
7791 n, build_int_cst (size_type_node, 4));
7792
7793 region_model_manager mgr;
7794 test_region_model_context ctxt;
7795 region_model model (&mgr);
7796
7797 /* Push stack frame. */
7798 const region *frame_reg
7799 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl),
7800 NULL, &ctxt);
7801 /* "p = alloca (n * 4);". */
7802 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
b9365b93 7803 const region *reg = model.create_region_for_alloca (size_sval, &ctxt);
808f4dfe
DM
7804 ASSERT_EQ (reg->get_parent_region (), frame_reg);
7805 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
7806 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 7807 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
7808
7809 /* Verify that the pointers to the alloca region are replaced by
7810 poisoned values when the frame is popped. */
7811 model.pop_frame (NULL, NULL, &ctxt);
33255ad3 7812 ASSERT_EQ (model.get_rvalue (p, NULL)->get_kind (), SK_POISONED);
808f4dfe
DM
7813}
7814
71fc4655
DM
7815/* Verify that svalue::involves_p works. */
7816
7817static void
7818test_involves_p ()
7819{
7820 region_model_manager mgr;
7821 tree int_star = build_pointer_type (integer_type_node);
7822 tree p = build_global_decl ("p", int_star);
7823 tree q = build_global_decl ("q", int_star);
7824
7825 test_region_model_context ctxt;
7826 region_model model (&mgr);
7827 const svalue *p_init = model.get_rvalue (p, &ctxt);
7828 const svalue *q_init = model.get_rvalue (q, &ctxt);
7829
7830 ASSERT_TRUE (p_init->involves_p (p_init));
7831 ASSERT_FALSE (p_init->involves_p (q_init));
7832
7833 const region *star_p_reg = mgr.get_symbolic_region (p_init);
7834 const region *star_q_reg = mgr.get_symbolic_region (q_init);
7835
7836 const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
7837 const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
7838
7839 ASSERT_TRUE (init_star_p->involves_p (p_init));
7840 ASSERT_FALSE (p_init->involves_p (init_star_p));
7841 ASSERT_FALSE (init_star_p->involves_p (q_init));
7842 ASSERT_TRUE (init_star_q->involves_p (q_init));
7843 ASSERT_FALSE (init_star_q->involves_p (p_init));
7844}
7845
757bf1df
DM
7846/* Run all of the selftests within this file. */
7847
7848void
7849analyzer_region_model_cc_tests ()
7850{
8c08c983 7851 test_tree_cmp_on_constants ();
757bf1df 7852 test_dump ();
808f4dfe
DM
7853 test_struct ();
7854 test_array_1 ();
90f7c300 7855 test_get_representative_tree ();
757bf1df 7856 test_unique_constants ();
808f4dfe
DM
7857 test_unique_unknowns ();
7858 test_initial_svalue_folding ();
7859 test_unaryop_svalue_folding ();
7860 test_binop_svalue_folding ();
7861 test_sub_svalue_folding ();
f09b9955 7862 test_bits_within_svalue_folding ();
808f4dfe 7863 test_descendent_of_p ();
391512ad 7864 test_bit_range_regions ();
757bf1df 7865 test_assignment ();
a96f1c38 7866 test_compound_assignment ();
757bf1df
DM
7867 test_stack_frames ();
7868 test_get_representative_path_var ();
808f4dfe 7869 test_equality_1 ();
757bf1df
DM
7870 test_canonicalization_2 ();
7871 test_canonicalization_3 ();
8c08c983 7872 test_canonicalization_4 ();
757bf1df
DM
7873 test_state_merging ();
7874 test_constraint_merging ();
808f4dfe
DM
7875 test_widening_constraints ();
7876 test_iteration_1 ();
6969ac30 7877 test_malloc_constraints ();
808f4dfe
DM
7878 test_var ();
7879 test_array_2 ();
7880 test_mem_ref ();
7881 test_POINTER_PLUS_EXPR_then_MEM_REF ();
7882 test_malloc ();
7883 test_alloca ();
71fc4655 7884 test_involves_p ();
757bf1df
DM
7885}
7886
7887} // namespace selftest
7888
7889#endif /* CHECKING_P */
7890
75038aa6
DM
7891} // namespace ana
7892
757bf1df 7893#endif /* #if ENABLE_ANALYZER */