]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/analyzer/region-model.cc
analyzer: handle memmove like memcpy
[thirdparty/gcc.git] / gcc / analyzer / region-model.cc
CommitLineData
757bf1df 1/* Classes for modeling the state of memory.
7adcbafe 2 Copyright (C) 2019-2022 Free Software Foundation, Inc.
757bf1df
DM
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
c81b60b8 22#define INCLUDE_MEMORY
757bf1df
DM
23#include "system.h"
24#include "coretypes.h"
6341f14e 25#include "make-unique.h"
757bf1df
DM
26#include "tree.h"
27#include "function.h"
28#include "basic-block.h"
29#include "gimple.h"
30#include "gimple-iterator.h"
7892ff37 31#include "diagnostic-core.h"
757bf1df
DM
32#include "graphviz.h"
33#include "options.h"
34#include "cgraph.h"
35#include "tree-dfa.h"
36#include "stringpool.h"
37#include "convert.h"
38#include "target.h"
39#include "fold-const.h"
40#include "tree-pretty-print.h"
41#include "diagnostic-color.h"
42#include "diagnostic-metadata.h"
ef7827b0 43#include "bitmap.h"
757bf1df 44#include "selftest.h"
757bf1df
DM
45#include "analyzer/analyzer.h"
46#include "analyzer/analyzer-logging.h"
47#include "ordered-hash-map.h"
48#include "options.h"
49#include "cgraph.h"
50#include "cfg.h"
757bf1df
DM
51#include "analyzer/supergraph.h"
52#include "sbitmap.h"
808f4dfe
DM
53#include "analyzer/call-string.h"
54#include "analyzer/program-point.h"
55#include "analyzer/store.h"
757bf1df
DM
56#include "analyzer/region-model.h"
57#include "analyzer/constraint-manager.h"
58#include "diagnostic-event-id.h"
59#include "analyzer/sm.h"
60#include "diagnostic-event-id.h"
61#include "analyzer/sm.h"
62#include "analyzer/pending-diagnostic.h"
808f4dfe 63#include "analyzer/region-model-reachability.h"
757bf1df 64#include "analyzer/analyzer-selftests.h"
f573d351 65#include "analyzer/program-state.h"
bfca9505 66#include "analyzer/call-summary.h"
884d9141 67#include "stor-layout.h"
c7e276b8 68#include "attribs.h"
9a2c9579 69#include "tree-object-size.h"
1e2fe671
DM
70#include "gimple-ssa.h"
71#include "tree-phinodes.h"
72#include "tree-ssa-operands.h"
73#include "ssa-iterators.h"
5fbcbcaf 74#include "calls.h"
e6c3bb37 75#include "is-a.h"
c81b60b8 76#include "gcc-rich-location.h"
f5758fe5
DM
77#include "analyzer/checker-event.h"
78#include "analyzer/checker-path.h"
757bf1df
DM
79
80#if ENABLE_ANALYZER
81
75038aa6
DM
82namespace ana {
83
757bf1df
DM
84/* Dump T to PP in language-independent form, for debugging/logging/dumping
85 purposes. */
86
757bf1df 87void
808f4dfe 88dump_tree (pretty_printer *pp, tree t)
757bf1df 89{
808f4dfe 90 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
757bf1df
DM
91}
92
808f4dfe
DM
93/* Dump T to PP in language-independent form in quotes, for
94 debugging/logging/dumping purposes. */
757bf1df
DM
95
96void
808f4dfe 97dump_quoted_tree (pretty_printer *pp, tree t)
757bf1df 98{
808f4dfe
DM
99 pp_begin_quote (pp, pp_show_color (pp));
100 dump_tree (pp, t);
101 pp_end_quote (pp, pp_show_color (pp));
757bf1df
DM
102}
103
808f4dfe
DM
104/* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
105 calls within other pp_printf calls.
757bf1df 106
808f4dfe
DM
107 default_tree_printer handles 'T' and some other codes by calling
108 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
109 dump_generic_node calls pp_printf in various places, leading to
110 garbled output.
757bf1df 111
808f4dfe
DM
112 Ideally pp_printf could be made to be reentrant, but in the meantime
113 this function provides a workaround. */
6969ac30
DM
114
115void
808f4dfe 116print_quoted_type (pretty_printer *pp, tree t)
6969ac30 117{
808f4dfe
DM
118 pp_begin_quote (pp, pp_show_color (pp));
119 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
120 pp_end_quote (pp, pp_show_color (pp));
6969ac30
DM
121}
122
d726a57b
DM
123/* class region_to_value_map. */
124
125/* Assignment operator for region_to_value_map. */
126
127region_to_value_map &
128region_to_value_map::operator= (const region_to_value_map &other)
129{
130 m_hash_map.empty ();
131 for (auto iter : other.m_hash_map)
132 {
133 const region *reg = iter.first;
134 const svalue *sval = iter.second;
135 m_hash_map.put (reg, sval);
136 }
137 return *this;
138}
139
140/* Equality operator for region_to_value_map. */
141
142bool
143region_to_value_map::operator== (const region_to_value_map &other) const
144{
145 if (m_hash_map.elements () != other.m_hash_map.elements ())
146 return false;
147
148 for (auto iter : *this)
149 {
150 const region *reg = iter.first;
151 const svalue *sval = iter.second;
152 const svalue * const *other_slot = other.get (reg);
153 if (other_slot == NULL)
154 return false;
155 if (sval != *other_slot)
156 return false;
157 }
158
159 return true;
160}
161
162/* Dump this object to PP. */
163
164void
165region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
166 bool multiline) const
167{
168 auto_vec<const region *> regs;
169 for (iterator iter = begin (); iter != end (); ++iter)
170 regs.safe_push ((*iter).first);
171 regs.qsort (region::cmp_ptr_ptr);
172 if (multiline)
173 pp_newline (pp);
174 else
175 pp_string (pp, " {");
176 unsigned i;
177 const region *reg;
178 FOR_EACH_VEC_ELT (regs, i, reg)
179 {
180 if (multiline)
181 pp_string (pp, " ");
182 else if (i > 0)
183 pp_string (pp, ", ");
184 reg->dump_to_pp (pp, simple);
185 pp_string (pp, ": ");
186 const svalue *sval = *get (reg);
187 sval->dump_to_pp (pp, true);
188 if (multiline)
189 pp_newline (pp);
190 }
191 if (!multiline)
192 pp_string (pp, "}");
193}
194
195/* Dump this object to stderr. */
196
197DEBUG_FUNCTION void
198region_to_value_map::dump (bool simple) const
199{
200 pretty_printer pp;
201 pp_format_decoder (&pp) = default_tree_printer;
202 pp_show_color (&pp) = pp_show_color (global_dc->printer);
203 pp.buffer->stream = stderr;
204 dump_to_pp (&pp, simple, true);
205 pp_newline (&pp);
206 pp_flush (&pp);
207}
208
209
210/* Attempt to merge THIS with OTHER, writing the result
211 to OUT.
212
213 For now, write (region, value) mappings that are in common between THIS
ce917b04
DM
214 and OTHER to OUT, effectively taking the intersection.
215
216 Reject merger of different values. */
d726a57b
DM
217
218bool
219region_to_value_map::can_merge_with_p (const region_to_value_map &other,
220 region_to_value_map *out) const
221{
222 for (auto iter : *this)
223 {
224 const region *iter_reg = iter.first;
225 const svalue *iter_sval = iter.second;
226 const svalue * const * other_slot = other.get (iter_reg);
227 if (other_slot)
ce917b04
DM
228 {
229 if (iter_sval == *other_slot)
230 out->put (iter_reg, iter_sval);
231 else
232 return false;
233 }
d726a57b
DM
234 }
235 return true;
236}
237
33255ad3
DM
238/* Purge any state involving SVAL. */
239
240void
241region_to_value_map::purge_state_involving (const svalue *sval)
242{
243 auto_vec<const region *> to_purge;
244 for (auto iter : *this)
245 {
246 const region *iter_reg = iter.first;
247 const svalue *iter_sval = iter.second;
248 if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
249 to_purge.safe_push (iter_reg);
250 }
251 for (auto iter : to_purge)
252 m_hash_map.remove (iter);
253}
254
757bf1df
DM
255/* class region_model. */
256
808f4dfe 257/* Ctor for region_model: construct an "empty" model. */
757bf1df 258
808f4dfe 259region_model::region_model (region_model_manager *mgr)
9a2c9579
DM
260: m_mgr (mgr), m_store (), m_current_frame (NULL),
261 m_dynamic_extents ()
757bf1df 262{
808f4dfe 263 m_constraints = new constraint_manager (mgr);
757bf1df
DM
264}
265
266/* region_model's copy ctor. */
267
268region_model::region_model (const region_model &other)
808f4dfe
DM
269: m_mgr (other.m_mgr), m_store (other.m_store),
270 m_constraints (new constraint_manager (*other.m_constraints)),
9a2c9579
DM
271 m_current_frame (other.m_current_frame),
272 m_dynamic_extents (other.m_dynamic_extents)
757bf1df 273{
757bf1df
DM
274}
275
276/* region_model's dtor. */
277
278region_model::~region_model ()
279{
280 delete m_constraints;
281}
282
283/* region_model's assignment operator. */
284
285region_model &
286region_model::operator= (const region_model &other)
287{
808f4dfe
DM
288 /* m_mgr is const. */
289 gcc_assert (m_mgr == other.m_mgr);
757bf1df 290
808f4dfe 291 m_store = other.m_store;
757bf1df
DM
292
293 delete m_constraints;
808f4dfe 294 m_constraints = new constraint_manager (*other.m_constraints);
757bf1df 295
808f4dfe 296 m_current_frame = other.m_current_frame;
757bf1df 297
9a2c9579
DM
298 m_dynamic_extents = other.m_dynamic_extents;
299
757bf1df
DM
300 return *this;
301}
302
303/* Equality operator for region_model.
304
808f4dfe
DM
305 Amongst other things this directly compares the stores and the constraint
306 managers, so for this to be meaningful both this and OTHER should
757bf1df
DM
307 have been canonicalized. */
308
309bool
310region_model::operator== (const region_model &other) const
311{
808f4dfe
DM
312 /* We can only compare instances that use the same manager. */
313 gcc_assert (m_mgr == other.m_mgr);
757bf1df 314
808f4dfe 315 if (m_store != other.m_store)
757bf1df
DM
316 return false;
317
318 if (*m_constraints != *other.m_constraints)
319 return false;
320
808f4dfe
DM
321 if (m_current_frame != other.m_current_frame)
322 return false;
757bf1df 323
9a2c9579
DM
324 if (m_dynamic_extents != other.m_dynamic_extents)
325 return false;
326
757bf1df
DM
327 gcc_checking_assert (hash () == other.hash ());
328
329 return true;
330}
331
332/* Generate a hash value for this region_model. */
333
334hashval_t
808f4dfe
DM
335region_model::hash () const
336{
337 hashval_t result = m_store.hash ();
338 result ^= m_constraints->hash ();
339 return result;
757bf1df
DM
340}
341
808f4dfe
DM
342/* Dump a representation of this model to PP, showing the
343 stack, the store, and any constraints.
344 Use SIMPLE to control how svalues and regions are printed. */
757bf1df
DM
345
346void
808f4dfe
DM
347region_model::dump_to_pp (pretty_printer *pp, bool simple,
348 bool multiline) const
757bf1df 349{
808f4dfe
DM
350 /* Dump stack. */
351 pp_printf (pp, "stack depth: %i", get_stack_depth ());
352 if (multiline)
353 pp_newline (pp);
354 else
355 pp_string (pp, " {");
356 for (const frame_region *iter_frame = m_current_frame; iter_frame;
357 iter_frame = iter_frame->get_calling_frame ())
358 {
359 if (multiline)
360 pp_string (pp, " ");
361 else if (iter_frame != m_current_frame)
362 pp_string (pp, ", ");
363 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
364 iter_frame->dump_to_pp (pp, simple);
365 if (multiline)
366 pp_newline (pp);
367 }
368 if (!multiline)
369 pp_string (pp, "}");
370
371 /* Dump store. */
372 if (!multiline)
373 pp_string (pp, ", {");
374 m_store.dump_to_pp (pp, simple, multiline,
375 m_mgr->get_store_manager ());
376 if (!multiline)
377 pp_string (pp, "}");
378
379 /* Dump constraints. */
380 pp_string (pp, "constraint_manager:");
381 if (multiline)
382 pp_newline (pp);
383 else
384 pp_string (pp, " {");
385 m_constraints->dump_to_pp (pp, multiline);
386 if (!multiline)
387 pp_string (pp, "}");
9a2c9579
DM
388
389 /* Dump sizes of dynamic regions, if any are known. */
390 if (!m_dynamic_extents.is_empty ())
391 {
392 pp_string (pp, "dynamic_extents:");
393 m_dynamic_extents.dump_to_pp (pp, simple, multiline);
394 }
808f4dfe 395}
757bf1df 396
808f4dfe 397/* Dump a representation of this model to FILE. */
757bf1df 398
808f4dfe
DM
399void
400region_model::dump (FILE *fp, bool simple, bool multiline) const
401{
402 pretty_printer pp;
403 pp_format_decoder (&pp) = default_tree_printer;
404 pp_show_color (&pp) = pp_show_color (global_dc->printer);
405 pp.buffer->stream = fp;
406 dump_to_pp (&pp, simple, multiline);
407 pp_newline (&pp);
408 pp_flush (&pp);
757bf1df
DM
409}
410
808f4dfe 411/* Dump a multiline representation of this model to stderr. */
757bf1df 412
808f4dfe
DM
413DEBUG_FUNCTION void
414region_model::dump (bool simple) const
415{
416 dump (stderr, simple, true);
417}
757bf1df 418
808f4dfe 419/* Dump a multiline representation of this model to stderr. */
757bf1df 420
808f4dfe
DM
421DEBUG_FUNCTION void
422region_model::debug () const
757bf1df 423{
808f4dfe 424 dump (true);
757bf1df
DM
425}
426
e61ffa20
DM
427/* Assert that this object is valid. */
428
429void
430region_model::validate () const
431{
432 m_store.validate ();
433}
434
808f4dfe
DM
435/* Canonicalize the store and constraints, to maximize the chance of
436 equality between region_model instances. */
757bf1df
DM
437
438void
808f4dfe 439region_model::canonicalize ()
757bf1df 440{
808f4dfe
DM
441 m_store.canonicalize (m_mgr->get_store_manager ());
442 m_constraints->canonicalize ();
757bf1df
DM
443}
444
445/* Return true if this region_model is in canonical form. */
446
447bool
448region_model::canonicalized_p () const
449{
450 region_model copy (*this);
808f4dfe 451 copy.canonicalize ();
757bf1df
DM
452 return *this == copy;
453}
454
808f4dfe
DM
455/* See the comment for store::loop_replay_fixup. */
456
457void
458region_model::loop_replay_fixup (const region_model *dst_state)
459{
460 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
461}
462
757bf1df
DM
463/* A subclass of pending_diagnostic for complaining about uses of
464 poisoned values. */
465
466class poisoned_value_diagnostic
467: public pending_diagnostic_subclass<poisoned_value_diagnostic>
468{
469public:
00e7d024
DM
470 poisoned_value_diagnostic (tree expr, enum poison_kind pkind,
471 const region *src_region)
472 : m_expr (expr), m_pkind (pkind),
473 m_src_region (src_region)
757bf1df
DM
474 {}
475
ff171cb1 476 const char *get_kind () const final override { return "poisoned_value_diagnostic"; }
757bf1df 477
ff171cb1 478 bool use_of_uninit_p () const final override
33255ad3
DM
479 {
480 return m_pkind == POISON_KIND_UNINIT;
481 }
482
757bf1df
DM
483 bool operator== (const poisoned_value_diagnostic &other) const
484 {
00e7d024
DM
485 return (m_expr == other.m_expr
486 && m_pkind == other.m_pkind
487 && m_src_region == other.m_src_region);
757bf1df
DM
488 }
489
ff171cb1 490 int get_controlling_option () const final override
7fd6e36e
DM
491 {
492 switch (m_pkind)
493 {
494 default:
495 gcc_unreachable ();
496 case POISON_KIND_UNINIT:
497 return OPT_Wanalyzer_use_of_uninitialized_value;
498 case POISON_KIND_FREED:
499 return OPT_Wanalyzer_use_after_free;
500 case POISON_KIND_POPPED_STACK:
501 return OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame;
502 }
503 }
504
ff171cb1 505 bool emit (rich_location *rich_loc) final override
757bf1df
DM
506 {
507 switch (m_pkind)
508 {
509 default:
510 gcc_unreachable ();
33255ad3
DM
511 case POISON_KIND_UNINIT:
512 {
513 diagnostic_metadata m;
514 m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
7fd6e36e 515 return warning_meta (rich_loc, m, get_controlling_option (),
33255ad3
DM
516 "use of uninitialized value %qE",
517 m_expr);
518 }
519 break;
757bf1df
DM
520 case POISON_KIND_FREED:
521 {
522 diagnostic_metadata m;
523 m.add_cwe (416); /* "CWE-416: Use After Free". */
7fd6e36e 524 return warning_meta (rich_loc, m, get_controlling_option (),
6c8e5844
DM
525 "use after %<free%> of %qE",
526 m_expr);
757bf1df
DM
527 }
528 break;
529 case POISON_KIND_POPPED_STACK:
530 {
757bf1df 531 /* TODO: which CWE? */
808f4dfe 532 return warning_at
7fd6e36e 533 (rich_loc, get_controlling_option (),
808f4dfe
DM
534 "dereferencing pointer %qE to within stale stack frame",
535 m_expr);
757bf1df
DM
536 }
537 break;
538 }
539 }
540
ff171cb1 541 label_text describe_final_event (const evdesc::final_event &ev) final override
757bf1df
DM
542 {
543 switch (m_pkind)
544 {
545 default:
546 gcc_unreachable ();
33255ad3
DM
547 case POISON_KIND_UNINIT:
548 return ev.formatted_print ("use of uninitialized value %qE here",
549 m_expr);
757bf1df
DM
550 case POISON_KIND_FREED:
551 return ev.formatted_print ("use after %<free%> of %qE here",
552 m_expr);
553 case POISON_KIND_POPPED_STACK:
554 return ev.formatted_print
808f4dfe 555 ("dereferencing pointer %qE to within stale stack frame",
757bf1df
DM
556 m_expr);
557 }
558 }
559
ff171cb1 560 void mark_interesting_stuff (interesting_t *interest) final override
00e7d024
DM
561 {
562 if (m_src_region)
563 interest->add_region_creation (m_src_region);
564 }
565
757bf1df
DM
566private:
567 tree m_expr;
568 enum poison_kind m_pkind;
00e7d024 569 const region *m_src_region;
757bf1df
DM
570};
571
5e00ad3f
DM
572/* A subclass of pending_diagnostic for complaining about shifts
573 by negative counts. */
574
575class shift_count_negative_diagnostic
576: public pending_diagnostic_subclass<shift_count_negative_diagnostic>
577{
578public:
579 shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
580 : m_assign (assign), m_count_cst (count_cst)
581 {}
582
ff171cb1 583 const char *get_kind () const final override
5e00ad3f
DM
584 {
585 return "shift_count_negative_diagnostic";
586 }
587
588 bool operator== (const shift_count_negative_diagnostic &other) const
589 {
590 return (m_assign == other.m_assign
591 && same_tree_p (m_count_cst, other.m_count_cst));
592 }
593
ff171cb1 594 int get_controlling_option () const final override
7fd6e36e
DM
595 {
596 return OPT_Wanalyzer_shift_count_negative;
597 }
598
ff171cb1 599 bool emit (rich_location *rich_loc) final override
5e00ad3f 600 {
7fd6e36e 601 return warning_at (rich_loc, get_controlling_option (),
5e00ad3f
DM
602 "shift by negative count (%qE)", m_count_cst);
603 }
604
ff171cb1 605 label_text describe_final_event (const evdesc::final_event &ev) final override
5e00ad3f
DM
606 {
607 return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst);
608 }
609
610private:
611 const gassign *m_assign;
612 tree m_count_cst;
613};
614
615/* A subclass of pending_diagnostic for complaining about shifts
616 by counts >= the width of the operand type. */
617
618class shift_count_overflow_diagnostic
619: public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
620{
621public:
622 shift_count_overflow_diagnostic (const gassign *assign,
623 int operand_precision,
624 tree count_cst)
625 : m_assign (assign), m_operand_precision (operand_precision),
626 m_count_cst (count_cst)
627 {}
628
ff171cb1 629 const char *get_kind () const final override
5e00ad3f
DM
630 {
631 return "shift_count_overflow_diagnostic";
632 }
633
634 bool operator== (const shift_count_overflow_diagnostic &other) const
635 {
636 return (m_assign == other.m_assign
637 && m_operand_precision == other.m_operand_precision
638 && same_tree_p (m_count_cst, other.m_count_cst));
639 }
640
ff171cb1 641 int get_controlling_option () const final override
7fd6e36e
DM
642 {
643 return OPT_Wanalyzer_shift_count_overflow;
644 }
645
ff171cb1 646 bool emit (rich_location *rich_loc) final override
5e00ad3f 647 {
7fd6e36e 648 return warning_at (rich_loc, get_controlling_option (),
5e00ad3f
DM
649 "shift by count (%qE) >= precision of type (%qi)",
650 m_count_cst, m_operand_precision);
651 }
652
ff171cb1 653 label_text describe_final_event (const evdesc::final_event &ev) final override
5e00ad3f
DM
654 {
655 return ev.formatted_print ("shift by count %qE here", m_count_cst);
656 }
657
658private:
659 const gassign *m_assign;
660 int m_operand_precision;
661 tree m_count_cst;
662};
663
808f4dfe
DM
664/* If ASSIGN is a stmt that can be modelled via
665 set_value (lhs_reg, SVALUE, CTXT)
666 for some SVALUE, get the SVALUE.
667 Otherwise return NULL. */
757bf1df 668
808f4dfe
DM
669const svalue *
670region_model::get_gassign_result (const gassign *assign,
671 region_model_context *ctxt)
757bf1df
DM
672{
673 tree lhs = gimple_assign_lhs (assign);
674 tree rhs1 = gimple_assign_rhs1 (assign);
757bf1df
DM
675 enum tree_code op = gimple_assign_rhs_code (assign);
676 switch (op)
677 {
678 default:
808f4dfe 679 return NULL;
757bf1df
DM
680
681 case POINTER_PLUS_EXPR:
682 {
683 /* e.g. "_1 = a_10(D) + 12;" */
684 tree ptr = rhs1;
685 tree offset = gimple_assign_rhs2 (assign);
686
808f4dfe
DM
687 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
688 const svalue *offset_sval = get_rvalue (offset, ctxt);
689 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
690 is an integer of type sizetype". */
691 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
692
693 const svalue *sval_binop
694 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
695 ptr_sval, offset_sval);
696 return sval_binop;
757bf1df
DM
697 }
698 break;
699
700 case POINTER_DIFF_EXPR:
701 {
702 /* e.g. "_1 = p_2(D) - q_3(D);". */
808f4dfe
DM
703 tree rhs2 = gimple_assign_rhs2 (assign);
704 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
705 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 706
808f4dfe 707 // TODO: perhaps fold to zero if they're known to be equal?
757bf1df 708
808f4dfe
DM
709 const svalue *sval_binop
710 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
711 rhs1_sval, rhs2_sval);
712 return sval_binop;
757bf1df
DM
713 }
714 break;
715
808f4dfe
DM
716 /* Assignments of the form
717 set_value (lvalue (LHS), rvalue (EXPR))
718 for various EXPR.
719 We already have the lvalue for the LHS above, as "lhs_reg". */
720 case ADDR_EXPR: /* LHS = &RHS; */
721 case BIT_FIELD_REF:
722 case COMPONENT_REF: /* LHS = op0.op1; */
757bf1df 723 case MEM_REF:
757bf1df 724 case REAL_CST:
808f4dfe
DM
725 case COMPLEX_CST:
726 case VECTOR_CST:
757bf1df
DM
727 case INTEGER_CST:
728 case ARRAY_REF:
808f4dfe
DM
729 case SSA_NAME: /* LHS = VAR; */
730 case VAR_DECL: /* LHS = VAR; */
731 case PARM_DECL:/* LHS = VAR; */
732 case REALPART_EXPR:
733 case IMAGPART_EXPR:
734 return get_rvalue (rhs1, ctxt);
735
736 case ABS_EXPR:
737 case ABSU_EXPR:
738 case CONJ_EXPR:
739 case BIT_NOT_EXPR:
757bf1df
DM
740 case FIX_TRUNC_EXPR:
741 case FLOAT_EXPR:
808f4dfe 742 case NEGATE_EXPR:
757bf1df 743 case NOP_EXPR:
808f4dfe 744 case VIEW_CONVERT_EXPR:
757bf1df 745 {
808f4dfe
DM
746 /* Unary ops. */
747 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
748 const svalue *sval_unaryop
749 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
750 return sval_unaryop;
757bf1df 751 }
757bf1df
DM
752
753 case EQ_EXPR:
754 case GE_EXPR:
755 case LE_EXPR:
756 case NE_EXPR:
757 case GT_EXPR:
758 case LT_EXPR:
808f4dfe
DM
759 case UNORDERED_EXPR:
760 case ORDERED_EXPR:
757bf1df
DM
761 {
762 tree rhs2 = gimple_assign_rhs2 (assign);
763
808f4dfe
DM
764 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
765 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 766
2f5951bd 767 if (TREE_TYPE (lhs) == boolean_type_node)
808f4dfe 768 {
2f5951bd
DM
769 /* Consider constraints between svalues. */
770 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
771 if (t.is_known ())
772 return m_mgr->get_or_create_constant_svalue
773 (t.is_true () ? boolean_true_node : boolean_false_node);
808f4dfe 774 }
2f5951bd
DM
775
776 /* Otherwise, generate a symbolic binary op. */
777 const svalue *sval_binop
778 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
779 rhs1_sval, rhs2_sval);
780 return sval_binop;
757bf1df
DM
781 }
782 break;
783
784 case PLUS_EXPR:
785 case MINUS_EXPR:
786 case MULT_EXPR:
808f4dfe 787 case MULT_HIGHPART_EXPR:
757bf1df 788 case TRUNC_DIV_EXPR:
808f4dfe
DM
789 case CEIL_DIV_EXPR:
790 case FLOOR_DIV_EXPR:
791 case ROUND_DIV_EXPR:
757bf1df 792 case TRUNC_MOD_EXPR:
808f4dfe
DM
793 case CEIL_MOD_EXPR:
794 case FLOOR_MOD_EXPR:
795 case ROUND_MOD_EXPR:
796 case RDIV_EXPR:
797 case EXACT_DIV_EXPR:
757bf1df
DM
798 case LSHIFT_EXPR:
799 case RSHIFT_EXPR:
808f4dfe
DM
800 case LROTATE_EXPR:
801 case RROTATE_EXPR:
757bf1df
DM
802 case BIT_IOR_EXPR:
803 case BIT_XOR_EXPR:
804 case BIT_AND_EXPR:
805 case MIN_EXPR:
806 case MAX_EXPR:
808f4dfe 807 case COMPLEX_EXPR:
757bf1df
DM
808 {
809 /* Binary ops. */
810 tree rhs2 = gimple_assign_rhs2 (assign);
811
808f4dfe
DM
812 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
813 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 814
5e00ad3f
DM
815 if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
816 {
817 /* "INT34-C. Do not shift an expression by a negative number of bits
818 or by greater than or equal to the number of bits that exist in
819 the operand." */
820 if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
821 if (TREE_CODE (rhs2_cst) == INTEGER_CST)
822 {
823 if (tree_int_cst_sgn (rhs2_cst) < 0)
6341f14e
DM
824 ctxt->warn
825 (make_unique<shift_count_negative_diagnostic>
826 (assign, rhs2_cst));
5e00ad3f
DM
827 else if (compare_tree_int (rhs2_cst,
828 TYPE_PRECISION (TREE_TYPE (rhs1)))
829 >= 0)
6341f14e
DM
830 ctxt->warn
831 (make_unique<shift_count_overflow_diagnostic>
832 (assign,
833 int (TYPE_PRECISION (TREE_TYPE (rhs1))),
834 rhs2_cst));
5e00ad3f
DM
835 }
836 }
837
808f4dfe
DM
838 const svalue *sval_binop
839 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
840 rhs1_sval, rhs2_sval);
841 return sval_binop;
842 }
843
844 /* Vector expressions. In theory we could implement these elementwise,
845 but for now, simply return unknown values. */
846 case VEC_DUPLICATE_EXPR:
847 case VEC_SERIES_EXPR:
848 case VEC_COND_EXPR:
849 case VEC_PERM_EXPR:
1b0be822
DM
850 case VEC_WIDEN_MULT_HI_EXPR:
851 case VEC_WIDEN_MULT_LO_EXPR:
852 case VEC_WIDEN_MULT_EVEN_EXPR:
853 case VEC_WIDEN_MULT_ODD_EXPR:
854 case VEC_UNPACK_HI_EXPR:
855 case VEC_UNPACK_LO_EXPR:
856 case VEC_UNPACK_FLOAT_HI_EXPR:
857 case VEC_UNPACK_FLOAT_LO_EXPR:
858 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
859 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
860 case VEC_PACK_TRUNC_EXPR:
861 case VEC_PACK_SAT_EXPR:
862 case VEC_PACK_FIX_TRUNC_EXPR:
863 case VEC_PACK_FLOAT_EXPR:
864 case VEC_WIDEN_LSHIFT_HI_EXPR:
865 case VEC_WIDEN_LSHIFT_LO_EXPR:
808f4dfe
DM
866 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
867 }
868}
869
1e2fe671
DM
870/* Workaround for discarding certain false positives from
871 -Wanalyzer-use-of-uninitialized-value
872 of the form:
873 ((A OR-IF B) OR-IF C)
874 and:
875 ((A AND-IF B) AND-IF C)
876 where evaluating B is redundant, but could involve simple accesses of
877 uninitialized locals.
878
879 When optimization is turned on the FE can immediately fold compound
880 conditionals. Specifically, c_parser_condition parses this condition:
881 ((A OR-IF B) OR-IF C)
882 and calls c_fully_fold on the condition.
883 Within c_fully_fold, fold_truth_andor is called, which bails when
884 optimization is off, but if any optimization is turned on can convert the
885 ((A OR-IF B) OR-IF C)
886 into:
887 ((A OR B) OR_IF C)
888 for sufficiently simple B
889 i.e. the inner OR-IF becomes an OR.
890 At gimplification time the inner OR becomes BIT_IOR_EXPR (in gimplify_expr),
891 giving this for the inner condition:
892 tmp = A | B;
893 if (tmp)
894 thus effectively synthesizing a redundant access of B when optimization
895 is turned on, when compared to:
896 if (A) goto L1; else goto L4;
897 L1: if (B) goto L2; else goto L4;
898 L2: if (C) goto L3; else goto L4;
899 for the unoptimized case.
900
901 Return true if CTXT appears to be handling such a short-circuitable stmt,
902 such as the def-stmt for B for the:
903 tmp = A | B;
904 case above, for the case where A is true and thus B would have been
905 short-circuited without optimization, using MODEL for the value of A. */
906
907static bool
908within_short_circuited_stmt_p (const region_model *model,
b33dd787 909 const gassign *assign_stmt)
1e2fe671 910{
1e2fe671 911 /* We must have an assignment to a temporary of _Bool type. */
1e2fe671
DM
912 tree lhs = gimple_assign_lhs (assign_stmt);
913 if (TREE_TYPE (lhs) != boolean_type_node)
914 return false;
915 if (TREE_CODE (lhs) != SSA_NAME)
916 return false;
917 if (SSA_NAME_VAR (lhs) != NULL_TREE)
918 return false;
919
920 /* The temporary bool must be used exactly once: as the second arg of
921 a BIT_IOR_EXPR or BIT_AND_EXPR. */
922 use_operand_p use_op;
923 gimple *use_stmt;
924 if (!single_imm_use (lhs, &use_op, &use_stmt))
925 return false;
926 const gassign *use_assign = dyn_cast <const gassign *> (use_stmt);
927 if (!use_assign)
928 return false;
929 enum tree_code op = gimple_assign_rhs_code (use_assign);
930 if (!(op == BIT_IOR_EXPR ||op == BIT_AND_EXPR))
931 return false;
932 if (!(gimple_assign_rhs1 (use_assign) != lhs
933 && gimple_assign_rhs2 (use_assign) == lhs))
934 return false;
935
936 /* The first arg of the bitwise stmt must have a known value in MODEL
937 that implies that the value of the second arg doesn't matter, i.e.
938 1 for bitwise or, 0 for bitwise and. */
939 tree other_arg = gimple_assign_rhs1 (use_assign);
940 /* Use a NULL ctxt here to avoid generating warnings. */
941 const svalue *other_arg_sval = model->get_rvalue (other_arg, NULL);
942 tree other_arg_cst = other_arg_sval->maybe_get_constant ();
943 if (!other_arg_cst)
944 return false;
945 switch (op)
946 {
947 default:
948 gcc_unreachable ();
949 case BIT_IOR_EXPR:
950 if (zerop (other_arg_cst))
951 return false;
952 break;
953 case BIT_AND_EXPR:
954 if (!zerop (other_arg_cst))
955 return false;
956 break;
957 }
958
959 /* All tests passed. We appear to be in a stmt that generates a boolean
960 temporary with a value that won't matter. */
961 return true;
962}
963
b33dd787
DM
964/* Workaround for discarding certain false positives from
965 -Wanalyzer-use-of-uninitialized-value
966 seen with -ftrivial-auto-var-init=.
967
968 -ftrivial-auto-var-init= will generate calls to IFN_DEFERRED_INIT.
969
970 If the address of the var is taken, gimplification will give us
971 something like:
972
973 _1 = .DEFERRED_INIT (4, 2, &"len"[0]);
974 len = _1;
975
976 The result of DEFERRED_INIT will be an uninit value; we don't
977 want to emit a false positive for "len = _1;"
978
979 Return true if ASSIGN_STMT is such a stmt. */
980
981static bool
982due_to_ifn_deferred_init_p (const gassign *assign_stmt)
983
984{
985 /* We must have an assignment to a decl from an SSA name that's the
986 result of a IFN_DEFERRED_INIT call. */
987 if (gimple_assign_rhs_code (assign_stmt) != SSA_NAME)
988 return false;
989 tree lhs = gimple_assign_lhs (assign_stmt);
990 if (TREE_CODE (lhs) != VAR_DECL)
991 return false;
992 tree rhs = gimple_assign_rhs1 (assign_stmt);
993 if (TREE_CODE (rhs) != SSA_NAME)
994 return false;
995 const gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
996 const gcall *call = dyn_cast <const gcall *> (def_stmt);
997 if (!call)
998 return false;
999 if (gimple_call_internal_p (call)
1000 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1001 return true;
1002 return false;
1003}
1004
33255ad3
DM
1005/* Check for SVAL being poisoned, adding a warning to CTXT.
1006 Return SVAL, or, if a warning is added, another value, to avoid
1007 repeatedly complaining about the same poisoned value in followup code. */
1008
1009const svalue *
1010region_model::check_for_poison (const svalue *sval,
1011 tree expr,
1012 region_model_context *ctxt) const
1013{
1014 if (!ctxt)
1015 return sval;
1016
1017 if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
1018 {
cc68ad87
DM
1019 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
1020
1021 /* Ignore uninitialized uses of empty types; there's nothing
1022 to initialize. */
1023 if (pkind == POISON_KIND_UNINIT
1024 && sval->get_type ()
1025 && is_empty_type (sval->get_type ()))
1026 return sval;
1027
b33dd787
DM
1028 if (pkind == POISON_KIND_UNINIT)
1029 if (const gimple *curr_stmt = ctxt->get_stmt ())
1030 if (const gassign *assign_stmt
1031 = dyn_cast <const gassign *> (curr_stmt))
1032 {
1033 /* Special case to avoid certain false positives. */
1034 if (within_short_circuited_stmt_p (this, assign_stmt))
1035 return sval;
1036
1037 /* Special case to avoid false positive on
1038 -ftrivial-auto-var-init=. */
1039 if (due_to_ifn_deferred_init_p (assign_stmt))
1040 return sval;
1041 }
1e2fe671 1042
33255ad3
DM
1043 /* If we have an SSA name for a temporary, we don't want to print
1044 '<unknown>'.
1045 Poisoned values are shared by type, and so we can't reconstruct
1046 the tree other than via the def stmts, using
1047 fixup_tree_for_diagnostic. */
1048 tree diag_arg = fixup_tree_for_diagnostic (expr);
00e7d024
DM
1049 const region *src_region = NULL;
1050 if (pkind == POISON_KIND_UNINIT)
1051 src_region = get_region_for_poisoned_expr (expr);
6341f14e
DM
1052 if (ctxt->warn (make_unique<poisoned_value_diagnostic> (diag_arg,
1053 pkind,
1054 src_region)))
33255ad3
DM
1055 {
1056 /* We only want to report use of a poisoned value at the first
1057 place it gets used; return an unknown value to avoid generating
1058 a chain of followup warnings. */
1059 sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
1060 }
1061
1062 return sval;
1063 }
1064
1065 return sval;
1066}
1067
00e7d024
DM
1068/* Attempt to get a region for describing EXPR, the source of region of
1069 a poisoned_svalue for use in a poisoned_value_diagnostic.
1070 Return NULL if there is no good region to use. */
1071
1072const region *
1073region_model::get_region_for_poisoned_expr (tree expr) const
1074{
1075 if (TREE_CODE (expr) == SSA_NAME)
1076 {
1077 tree decl = SSA_NAME_VAR (expr);
1078 if (decl && DECL_P (decl))
1079 expr = decl;
1080 else
1081 return NULL;
1082 }
1083 return get_lvalue (expr, NULL);
1084}
1085
808f4dfe
DM
1086/* Update this model for the ASSIGN stmt, using CTXT to report any
1087 diagnostics. */
1088
1089void
1090region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
1091{
1092 tree lhs = gimple_assign_lhs (assign);
1093 tree rhs1 = gimple_assign_rhs1 (assign);
1094
1095 const region *lhs_reg = get_lvalue (lhs, ctxt);
1096
1097 /* Most assignments are handled by:
1098 set_value (lhs_reg, SVALUE, CTXT)
1099 for some SVALUE. */
1100 if (const svalue *sval = get_gassign_result (assign, ctxt))
1101 {
33255ad3
DM
1102 tree expr = get_diagnostic_tree_for_gassign (assign);
1103 check_for_poison (sval, expr, ctxt);
808f4dfe
DM
1104 set_value (lhs_reg, sval, ctxt);
1105 return;
1106 }
1107
1108 enum tree_code op = gimple_assign_rhs_code (assign);
1109 switch (op)
1110 {
1111 default:
1112 {
1b0be822 1113 if (0)
808f4dfe
DM
1114 sorry_at (assign->location, "unhandled assignment op: %qs",
1115 get_tree_code_name (op));
1b0be822
DM
1116 const svalue *unknown_sval
1117 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1118 set_value (lhs_reg, unknown_sval, ctxt);
757bf1df
DM
1119 }
1120 break;
1121
808f4dfe
DM
1122 case CONSTRUCTOR:
1123 {
1124 if (TREE_CLOBBER_P (rhs1))
1125 {
1126 /* e.g. "x ={v} {CLOBBER};" */
1127 clobber_region (lhs_reg);
1128 }
1129 else
1130 {
1131 /* Any CONSTRUCTOR that survives to this point is either
1132 just a zero-init of everything, or a vector. */
1133 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
1134 zero_fill_region (lhs_reg);
1135 unsigned ix;
1136 tree index;
1137 tree val;
1138 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
1139 {
1140 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
1141 if (!index)
1142 index = build_int_cst (integer_type_node, ix);
1143 gcc_assert (TREE_CODE (index) == INTEGER_CST);
1144 const svalue *index_sval
1145 = m_mgr->get_or_create_constant_svalue (index);
1146 gcc_assert (index_sval);
1147 const region *sub_reg
1148 = m_mgr->get_element_region (lhs_reg,
1149 TREE_TYPE (val),
1150 index_sval);
1151 const svalue *val_sval = get_rvalue (val, ctxt);
1152 set_value (sub_reg, val_sval, ctxt);
1153 }
1154 }
1155 }
1156 break;
1157
1158 case STRING_CST:
757bf1df 1159 {
808f4dfe 1160 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
808f4dfe
DM
1161 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1162 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 1163 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
1164 }
1165 break;
1166 }
1167}
1168
33255ad3 1169/* Handle the pre-sm-state part of STMT, modifying this object in-place.
33255ad3
DM
1170 Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
1171 side effects. */
1172
1173void
1174region_model::on_stmt_pre (const gimple *stmt,
33255ad3
DM
1175 bool *out_unknown_side_effects,
1176 region_model_context *ctxt)
1177{
1178 switch (gimple_code (stmt))
1179 {
1180 default:
1181 /* No-op for now. */
1182 break;
1183
1184 case GIMPLE_ASSIGN:
1185 {
1186 const gassign *assign = as_a <const gassign *> (stmt);
1187 on_assignment (assign, ctxt);
1188 }
1189 break;
1190
1191 case GIMPLE_ASM:
ded2c2c0
DM
1192 {
1193 const gasm *asm_stmt = as_a <const gasm *> (stmt);
1194 on_asm_stmt (asm_stmt, ctxt);
1195 }
33255ad3
DM
1196 break;
1197
1198 case GIMPLE_CALL:
1199 {
1200 /* Track whether we have a gcall to a function that's not recognized by
1201 anything, for which we don't have a function body, or for which we
1202 don't know the fndecl. */
1203 const gcall *call = as_a <const gcall *> (stmt);
6bd31b33 1204 *out_unknown_side_effects = on_call_pre (call, ctxt);
33255ad3
DM
1205 }
1206 break;
1207
1208 case GIMPLE_RETURN:
1209 {
1210 const greturn *return_ = as_a <const greturn *> (stmt);
1211 on_return (return_, ctxt);
1212 }
1213 break;
1214 }
1215}
1216
9ff3e236
DM
1217/* Ensure that all arguments at the call described by CD are checked
1218 for poisoned values, by calling get_rvalue on each argument. */
1219
1220void
1221region_model::check_call_args (const call_details &cd) const
1222{
1223 for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++)
1224 cd.get_arg_svalue (arg_idx);
1225}
1226
aee1adf2
DM
1227/* Return true if CD is known to be a call to a function with
1228 __attribute__((const)). */
1229
1230static bool
1231const_fn_p (const call_details &cd)
1232{
1233 tree fndecl = cd.get_fndecl_for_call ();
1234 if (!fndecl)
1235 return false;
1236 gcc_assert (DECL_P (fndecl));
1237 return TREE_READONLY (fndecl);
1238}
1239
1240/* If this CD is known to be a call to a function with
1241 __attribute__((const)), attempt to get a const_fn_result_svalue
1242 based on the arguments, or return NULL otherwise. */
1243
1244static const svalue *
1245maybe_get_const_fn_result (const call_details &cd)
1246{
1247 if (!const_fn_p (cd))
1248 return NULL;
1249
1250 unsigned num_args = cd.num_args ();
1251 if (num_args > const_fn_result_svalue::MAX_INPUTS)
1252 /* Too many arguments. */
1253 return NULL;
1254
1255 auto_vec<const svalue *> inputs (num_args);
1256 for (unsigned arg_idx = 0; arg_idx < num_args; arg_idx++)
1257 {
1258 const svalue *arg_sval = cd.get_arg_svalue (arg_idx);
1259 if (!arg_sval->can_have_associated_state_p ())
1260 return NULL;
1261 inputs.quick_push (arg_sval);
1262 }
1263
1264 region_model_manager *mgr = cd.get_manager ();
1265 const svalue *sval
1266 = mgr->get_or_create_const_fn_result_svalue (cd.get_lhs_type (),
1267 cd.get_fndecl_for_call (),
1268 inputs);
1269 return sval;
1270}
1271
792f039f
DM
1272/* Update this model for an outcome of a call that returns a specific
1273 integer constant.
07e30160
DM
1274 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1275 the state-merger code from merging success and failure outcomes. */
1276
1277void
792f039f
DM
1278region_model::update_for_int_cst_return (const call_details &cd,
1279 int retval,
1280 bool unmergeable)
07e30160
DM
1281{
1282 if (!cd.get_lhs_type ())
1283 return;
4e4e45a4
DM
1284 if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1285 return;
07e30160 1286 const svalue *result
792f039f 1287 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), retval);
07e30160
DM
1288 if (unmergeable)
1289 result = m_mgr->get_or_create_unmergeable (result);
1290 set_value (cd.get_lhs_region (), result, cd.get_ctxt ());
1291}
1292
792f039f
DM
1293/* Update this model for an outcome of a call that returns zero.
1294 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1295 the state-merger code from merging success and failure outcomes. */
1296
1297void
1298region_model::update_for_zero_return (const call_details &cd,
1299 bool unmergeable)
1300{
1301 update_for_int_cst_return (cd, 0, unmergeable);
1302}
1303
07e30160
DM
1304/* Update this model for an outcome of a call that returns non-zero. */
1305
1306void
1307region_model::update_for_nonzero_return (const call_details &cd)
1308{
1309 if (!cd.get_lhs_type ())
1310 return;
4e4e45a4
DM
1311 if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1312 return;
07e30160
DM
1313 const svalue *zero
1314 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), 0);
1315 const svalue *result
1316 = get_store_value (cd.get_lhs_region (), cd.get_ctxt ());
1317 add_constraint (result, NE_EXPR, zero, cd.get_ctxt ());
1318}
1319
1320/* Subroutine of region_model::maybe_get_copy_bounds.
1321 The Linux kernel commonly uses
1322 min_t([unsigned] long, VAR, sizeof(T));
1323 to set an upper bound on the size of a copy_to_user.
1324 Attempt to simplify such sizes by trying to get the upper bound as a
1325 constant.
1326 Return the simplified svalue if possible, or NULL otherwise. */
1327
1328static const svalue *
1329maybe_simplify_upper_bound (const svalue *num_bytes_sval,
1330 region_model_manager *mgr)
1331{
1332 tree type = num_bytes_sval->get_type ();
1333 while (const svalue *raw = num_bytes_sval->maybe_undo_cast ())
1334 num_bytes_sval = raw;
1335 if (const binop_svalue *binop_sval = num_bytes_sval->dyn_cast_binop_svalue ())
1336 if (binop_sval->get_op () == MIN_EXPR)
1337 if (binop_sval->get_arg1 ()->get_kind () == SK_CONSTANT)
1338 {
1339 return mgr->get_or_create_cast (type, binop_sval->get_arg1 ());
1340 /* TODO: we might want to also capture the constraint
1341 when recording the diagnostic, or note that we're using
1342 the upper bound. */
1343 }
1344 return NULL;
1345}
1346
1347/* Attempt to get an upper bound for the size of a copy when simulating a
1348 copy function.
1349
1350 NUM_BYTES_SVAL is the symbolic value for the size of the copy.
1351 Use it if it's constant, otherwise try to simplify it. Failing
1352 that, use the size of SRC_REG if constant.
1353
1354 Return a symbolic value for an upper limit on the number of bytes
1355 copied, or NULL if no such value could be determined. */
1356
1357const svalue *
1358region_model::maybe_get_copy_bounds (const region *src_reg,
1359 const svalue *num_bytes_sval)
1360{
1361 if (num_bytes_sval->maybe_get_constant ())
1362 return num_bytes_sval;
1363
1364 if (const svalue *simplified
1365 = maybe_simplify_upper_bound (num_bytes_sval, m_mgr))
1366 num_bytes_sval = simplified;
1367
1368 if (num_bytes_sval->maybe_get_constant ())
1369 return num_bytes_sval;
1370
1371 /* For now, try just guessing the size as the capacity of the
1372 base region of the src.
1373 This is a hack; we might get too large a value. */
1374 const region *src_base_reg = src_reg->get_base_region ();
1375 num_bytes_sval = get_capacity (src_base_reg);
1376
1377 if (num_bytes_sval->maybe_get_constant ())
1378 return num_bytes_sval;
1379
1380 /* Non-constant: give up. */
1381 return NULL;
1382}
1383
6bd31b33
DM
1384/* Get any known_function for FNDECL for call CD.
1385
1386 The call must match all assumptions made by the known_function (such as
1387 e.g. "argument 1's type must be a pointer type").
1388
1389 Return NULL if no known_function is found, or it does not match the
1390 assumption(s). */
1391
1392const known_function *
1393region_model::get_known_function (tree fndecl, const call_details &cd) const
1394{
1395 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
1396 return known_fn_mgr->get_match (fndecl, cd);
1397}
1398
1399/* Get any known_function for IFN, or NULL. */
07e30160
DM
1400
1401const known_function *
6bd31b33 1402region_model::get_known_function (enum internal_fn ifn) const
07e30160
DM
1403{
1404 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
6bd31b33 1405 return known_fn_mgr->get_internal_fn (ifn);
07e30160
DM
1406}
1407
757bf1df
DM
1408/* Update this model for the CALL stmt, using CTXT to report any
1409 diagnostics - the first half.
1410
1411 Updates to the region_model that should be made *before* sm-states
1412 are updated are done here; other updates to the region_model are done
ef7827b0 1413 in region_model::on_call_post.
757bf1df 1414
ef7827b0
DM
1415 Return true if the function call has unknown side effects (it wasn't
1416 recognized and we don't have a body for it, or are unable to tell which
6bd31b33 1417 fndecl it is). */
ef7827b0
DM
1418
1419bool
6bd31b33 1420region_model::on_call_pre (const gcall *call, region_model_context *ctxt)
757bf1df 1421{
48e8a7a6
DM
1422 call_details cd (call, this, ctxt);
1423
ef7827b0
DM
1424 bool unknown_side_effects = false;
1425
9b4eee5f
DM
1426 /* Special-case for IFN_DEFERRED_INIT.
1427 We want to report uninitialized variables with -fanalyzer (treating
1428 -ftrivial-auto-var-init= as purely a mitigation feature).
1429 Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the
1430 lhs of the call, so that it is still uninitialized from the point of
1431 view of the analyzer. */
1432 if (gimple_call_internal_p (call)
1433 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1434 return false;
1435
bddd8d86
DM
1436 /* Get svalues for all of the arguments at the callsite, to ensure that we
1437 complain about any uninitialized arguments. This might lead to
1438 duplicates if any of the handling below also looks up the svalues,
1439 but the deduplication code should deal with that. */
1440 if (ctxt)
ca123e01 1441 check_call_args (cd);
bddd8d86 1442
33255ad3
DM
1443 /* Some of the cases below update the lhs of the call based on the
1444 return value, but not all. Provide a default value, which may
1445 get overwritten below. */
1446 if (tree lhs = gimple_call_lhs (call))
1447 {
1448 const region *lhs_region = get_lvalue (lhs, ctxt);
aee1adf2
DM
1449 const svalue *sval = maybe_get_const_fn_result (cd);
1450 if (!sval)
1451 {
1452 /* For the common case of functions without __attribute__((const)),
1453 use a conjured value, and purge any prior state involving that
1454 value (in case this is in a loop). */
1455 sval = m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs), call,
3734527d
DM
1456 lhs_region,
1457 conjured_purge (this,
1458 ctxt));
aee1adf2 1459 }
3a1d168e 1460 set_value (lhs_region, sval, ctxt);
33255ad3
DM
1461 }
1462
48e8a7a6 1463 if (gimple_call_internal_p (call))
6bd31b33
DM
1464 if (const known_function *kf
1465 = get_known_function (gimple_call_internal_fn (call)))
1466 {
1467 kf->impl_call_pre (cd);
1468 return false;
1469 }
808f4dfe 1470
48e8a7a6
DM
1471 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1472 {
5fbcbcaf 1473 int callee_fndecl_flags = flags_from_decl_or_type (callee_fndecl);
ee7bfbe5 1474
6bd31b33 1475 if (const known_function *kf = get_known_function (callee_fndecl, cd))
b5081130 1476 {
6bd31b33 1477 kf->impl_call_pre (cd);
b5081130
DM
1478 return false;
1479 }
6bd31b33
DM
1480 else if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
1481 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
5ee4ba03 1482 {
6bd31b33 1483 if (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
5ee4ba03
DM
1484 unknown_side_effects = true;
1485 }
ef7827b0 1486 else if (!fndecl_has_gimple_body_p (callee_fndecl)
5fbcbcaf 1487 && (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
808f4dfe 1488 && !fndecl_built_in_p (callee_fndecl))
ef7827b0 1489 unknown_side_effects = true;
757bf1df 1490 }
ef7827b0
DM
1491 else
1492 unknown_side_effects = true;
757bf1df 1493
ef7827b0 1494 return unknown_side_effects;
757bf1df
DM
1495}
1496
1497/* Update this model for the CALL stmt, using CTXT to report any
1498 diagnostics - the second half.
1499
1500 Updates to the region_model that should be made *after* sm-states
1501 are updated are done here; other updates to the region_model are done
ef7827b0
DM
1502 in region_model::on_call_pre.
1503
1504 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
1505 to purge state. */
757bf1df
DM
1506
1507void
ef7827b0
DM
1508region_model::on_call_post (const gcall *call,
1509 bool unknown_side_effects,
1510 region_model_context *ctxt)
757bf1df 1511{
757bf1df 1512 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1690a839 1513 {
eafa9d96 1514 call_details cd (call, this, ctxt);
6bd31b33 1515 if (const known_function *kf = get_known_function (callee_fndecl, cd))
55e04240 1516 {
6bd31b33 1517 kf->impl_call_post (cd);
55e04240
DM
1518 return;
1519 }
c7e276b8
DM
1520 /* Was this fndecl referenced by
1521 __attribute__((malloc(FOO)))? */
1522 if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
1523 {
c7e276b8
DM
1524 impl_deallocation_call (cd);
1525 return;
1526 }
1690a839 1527 }
ef7827b0
DM
1528
1529 if (unknown_side_effects)
1530 handle_unrecognized_call (call, ctxt);
1531}
1532
33255ad3
DM
1533/* Purge state involving SVAL from this region_model, using CTXT
1534 (if non-NULL) to purge other state in a program_state.
1535
1536 For example, if we're at the def-stmt of an SSA name, then we need to
1537 purge any state for svalues that involve that SSA name. This avoids
1538 false positives in loops, since a symbolic value referring to the
1539 SSA name will be referring to the previous value of that SSA name.
1540
1541 For example, in:
1542 while ((e = hashmap_iter_next(&iter))) {
1543 struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
1544 free (e_strbuf->value);
1545 }
1546 at the def-stmt of e_8:
1547 e_8 = hashmap_iter_next (&iter);
1548 we should purge the "freed" state of:
1549 INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
1550 which is the "e_strbuf->value" value from the previous iteration,
1551 or we will erroneously report a double-free - the "e_8" within it
1552 refers to the previous value. */
1553
1554void
1555region_model::purge_state_involving (const svalue *sval,
1556 region_model_context *ctxt)
1557{
a113b143
DM
1558 if (!sval->can_have_associated_state_p ())
1559 return;
33255ad3
DM
1560 m_store.purge_state_involving (sval, m_mgr);
1561 m_constraints->purge_state_involving (sval);
1562 m_dynamic_extents.purge_state_involving (sval);
1563 if (ctxt)
1564 ctxt->purge_state_involving (sval);
1565}
1566
c65d3c7f
DM
1567/* A pending_note subclass for adding a note about an
1568 __attribute__((access, ...)) to a diagnostic. */
1569
1570class reason_attr_access : public pending_note_subclass<reason_attr_access>
1571{
1572public:
1573 reason_attr_access (tree callee_fndecl, const attr_access &access)
1574 : m_callee_fndecl (callee_fndecl),
1575 m_ptr_argno (access.ptrarg),
1576 m_access_str (TREE_STRING_POINTER (access.to_external_string ()))
1577 {
1578 }
1579
ff171cb1 1580 const char *get_kind () const final override { return "reason_attr_access"; }
c65d3c7f 1581
2ac1459f 1582 void emit () const final override
c65d3c7f
DM
1583 {
1584 inform (DECL_SOURCE_LOCATION (m_callee_fndecl),
1585 "parameter %i of %qD marked with attribute %qs",
1586 m_ptr_argno + 1, m_callee_fndecl, m_access_str);
1587 }
1588
1589 bool operator== (const reason_attr_access &other) const
1590 {
1591 return (m_callee_fndecl == other.m_callee_fndecl
1592 && m_ptr_argno == other.m_ptr_argno
1593 && !strcmp (m_access_str, other.m_access_str));
1594 }
1595
1596private:
1597 tree m_callee_fndecl;
1598 unsigned m_ptr_argno;
1599 const char *m_access_str;
1600};
1601
b6eaf90c
DM
1602/* Check CALL a call to external function CALLEE_FNDECL based on
1603 any __attribute__ ((access, ....) on the latter, complaining to
1604 CTXT about any issues.
1605
1606 Currently we merely call check_region_for_write on any regions
1607 pointed to by arguments marked with a "write_only" or "read_write"
1608 attribute. */
1609
1610void
1611region_model::
1612check_external_function_for_access_attr (const gcall *call,
1613 tree callee_fndecl,
1614 region_model_context *ctxt) const
1615{
1616 gcc_assert (call);
1617 gcc_assert (callee_fndecl);
1618 gcc_assert (ctxt);
1619
1620 tree fntype = TREE_TYPE (callee_fndecl);
1621 if (!fntype)
1622 return;
1623
1624 if (!TYPE_ATTRIBUTES (fntype))
1625 return;
1626
1627 /* Initialize a map of attribute access specifications for arguments
1628 to the function call. */
1629 rdwr_map rdwr_idx;
1630 init_attr_rdwr_indices (&rdwr_idx, TYPE_ATTRIBUTES (fntype));
1631
1632 unsigned argno = 0;
1633
1634 for (tree iter = TYPE_ARG_TYPES (fntype); iter;
1635 iter = TREE_CHAIN (iter), ++argno)
1636 {
1637 const attr_access* access = rdwr_idx.get (argno);
1638 if (!access)
1639 continue;
1640
1641 /* Ignore any duplicate entry in the map for the size argument. */
1642 if (access->ptrarg != argno)
1643 continue;
1644
1645 if (access->mode == access_write_only
1646 || access->mode == access_read_write)
1647 {
c65d3c7f
DM
1648 /* Subclass of decorated_region_model_context that
1649 adds a note about the attr access to any saved diagnostics. */
1650 class annotating_ctxt : public note_adding_context
1651 {
1652 public:
1653 annotating_ctxt (tree callee_fndecl,
1654 const attr_access &access,
1655 region_model_context *ctxt)
1656 : note_adding_context (ctxt),
1657 m_callee_fndecl (callee_fndecl),
1658 m_access (access)
1659 {
1660 }
6341f14e 1661 std::unique_ptr<pending_note> make_note () final override
c65d3c7f 1662 {
6341f14e
DM
1663 return make_unique<reason_attr_access>
1664 (m_callee_fndecl, m_access);
c65d3c7f
DM
1665 }
1666 private:
1667 tree m_callee_fndecl;
1668 const attr_access &m_access;
1669 };
1670
1671 /* Use this ctxt below so that any diagnostics get the
1672 note added to them. */
1673 annotating_ctxt my_ctxt (callee_fndecl, *access, ctxt);
1674
b6eaf90c 1675 tree ptr_tree = gimple_call_arg (call, access->ptrarg);
c65d3c7f
DM
1676 const svalue *ptr_sval = get_rvalue (ptr_tree, &my_ctxt);
1677 const region *reg = deref_rvalue (ptr_sval, ptr_tree, &my_ctxt);
1678 check_region_for_write (reg, &my_ctxt);
b6eaf90c
DM
1679 /* We don't use the size arg for now. */
1680 }
1681 }
1682}
1683
ef7827b0
DM
1684/* Handle a call CALL to a function with unknown behavior.
1685
1686 Traverse the regions in this model, determining what regions are
1687 reachable from pointer arguments to CALL and from global variables,
1688 recursively.
1689
1690 Set all reachable regions to new unknown values and purge sm-state
1691 from their values, and from values that point to them. */
1692
1693void
1694region_model::handle_unrecognized_call (const gcall *call,
1695 region_model_context *ctxt)
1696{
1697 tree fndecl = get_fndecl_for_call (call, ctxt);
1698
b6eaf90c
DM
1699 if (fndecl && ctxt)
1700 check_external_function_for_access_attr (call, fndecl, ctxt);
1701
c710051a 1702 reachable_regions reachable_regs (this);
ef7827b0
DM
1703
1704 /* Determine the reachable regions and their mutability. */
1705 {
808f4dfe
DM
1706 /* Add globals and regions that already escaped in previous
1707 unknown calls. */
1708 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1709 &reachable_regs);
ef7827b0
DM
1710
1711 /* Params that are pointers. */
1712 tree iter_param_types = NULL_TREE;
1713 if (fndecl)
1714 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
1715 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
1716 {
1717 /* Track expected param type, where available. */
1718 tree param_type = NULL_TREE;
1719 if (iter_param_types)
1720 {
1721 param_type = TREE_VALUE (iter_param_types);
1722 gcc_assert (param_type);
1723 iter_param_types = TREE_CHAIN (iter_param_types);
1724 }
1725
1726 tree parm = gimple_call_arg (call, arg_idx);
808f4dfe
DM
1727 const svalue *parm_sval = get_rvalue (parm, ctxt);
1728 reachable_regs.handle_parm (parm_sval, param_type);
ef7827b0
DM
1729 }
1730 }
1731
33255ad3 1732 uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULL;
3a66c289 1733
808f4dfe
DM
1734 /* Purge sm-state for the svalues that were reachable,
1735 both in non-mutable and mutable form. */
1736 for (svalue_set::iterator iter
1737 = reachable_regs.begin_reachable_svals ();
1738 iter != reachable_regs.end_reachable_svals (); ++iter)
ef7827b0 1739 {
808f4dfe 1740 const svalue *sval = (*iter);
33255ad3
DM
1741 if (ctxt)
1742 ctxt->on_unknown_change (sval, false);
808f4dfe
DM
1743 }
1744 for (svalue_set::iterator iter
1745 = reachable_regs.begin_mutable_svals ();
1746 iter != reachable_regs.end_mutable_svals (); ++iter)
1747 {
1748 const svalue *sval = (*iter);
33255ad3
DM
1749 if (ctxt)
1750 ctxt->on_unknown_change (sval, true);
3a66c289
DM
1751 if (uncertainty)
1752 uncertainty->on_mutable_sval_at_unknown_call (sval);
808f4dfe 1753 }
ef7827b0 1754
808f4dfe 1755 /* Mark any clusters that have escaped. */
af66094d 1756 reachable_regs.mark_escaped_clusters (ctxt);
ef7827b0 1757
808f4dfe
DM
1758 /* Update bindings for all clusters that have escaped, whether above,
1759 or previously. */
3734527d
DM
1760 m_store.on_unknown_fncall (call, m_mgr->get_store_manager (),
1761 conjured_purge (this, ctxt));
9a2c9579
DM
1762
1763 /* Purge dynamic extents from any regions that have escaped mutably:
1764 realloc could have been called on them. */
1765 for (hash_set<const region *>::iterator
1766 iter = reachable_regs.begin_mutable_base_regs ();
1767 iter != reachable_regs.end_mutable_base_regs ();
1768 ++iter)
1769 {
1770 const region *base_reg = (*iter);
1771 unset_dynamic_extents (base_reg);
1772 }
808f4dfe 1773}
ef7827b0 1774
808f4dfe
DM
1775/* Traverse the regions in this model, determining what regions are
1776 reachable from the store and populating *OUT.
ef7827b0 1777
808f4dfe
DM
1778 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
1779 for reachability (for handling return values from functions when
1780 analyzing return of the only function on the stack).
1781
3a66c289
DM
1782 If UNCERTAINTY is non-NULL, treat any svalues that were recorded
1783 within it as being maybe-bound as additional "roots" for reachability.
1784
808f4dfe
DM
1785 Find svalues that haven't leaked. */
1786
1787void
1788region_model::get_reachable_svalues (svalue_set *out,
3a66c289
DM
1789 const svalue *extra_sval,
1790 const uncertainty_t *uncertainty)
808f4dfe 1791{
c710051a 1792 reachable_regions reachable_regs (this);
808f4dfe
DM
1793
1794 /* Add globals and regions that already escaped in previous
1795 unknown calls. */
1796 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1797 &reachable_regs);
1798
1799 if (extra_sval)
1800 reachable_regs.handle_sval (extra_sval);
ef7827b0 1801
3a66c289
DM
1802 if (uncertainty)
1803 for (uncertainty_t::iterator iter
1804 = uncertainty->begin_maybe_bound_svals ();
1805 iter != uncertainty->end_maybe_bound_svals (); ++iter)
1806 reachable_regs.handle_sval (*iter);
1807
808f4dfe
DM
1808 /* Get regions for locals that have explicitly bound values. */
1809 for (store::cluster_map_t::iterator iter = m_store.begin ();
1810 iter != m_store.end (); ++iter)
1811 {
1812 const region *base_reg = (*iter).first;
1813 if (const region *parent = base_reg->get_parent_region ())
1814 if (parent->get_kind () == RK_FRAME)
1815 reachable_regs.add (base_reg, false);
1816 }
1817
1818 /* Populate *OUT based on the values that were reachable. */
1819 for (svalue_set::iterator iter
1820 = reachable_regs.begin_reachable_svals ();
1821 iter != reachable_regs.end_reachable_svals (); ++iter)
1822 out->add (*iter);
757bf1df
DM
1823}
1824
1825/* Update this model for the RETURN_STMT, using CTXT to report any
1826 diagnostics. */
1827
1828void
1829region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
1830{
1831 tree callee = get_current_function ()->decl;
1832 tree lhs = DECL_RESULT (callee);
1833 tree rhs = gimple_return_retval (return_stmt);
1834
1835 if (lhs && rhs)
13ad6d9f
DM
1836 {
1837 const svalue *sval = get_rvalue (rhs, ctxt);
1838 const region *ret_reg = get_lvalue (lhs, ctxt);
1839 set_value (ret_reg, sval, ctxt);
1840 }
757bf1df
DM
1841}
1842
342e14ff
DM
1843/* Update this model for a call and return of setjmp/sigsetjmp at CALL within
1844 ENODE, using CTXT to report any diagnostics.
757bf1df 1845
342e14ff
DM
1846 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
1847 0), as opposed to any second return due to longjmp/sigsetjmp. */
757bf1df
DM
1848
1849void
1850region_model::on_setjmp (const gcall *call, const exploded_node *enode,
1851 region_model_context *ctxt)
1852{
808f4dfe
DM
1853 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
1854 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
1855 ctxt);
757bf1df 1856
808f4dfe
DM
1857 /* Create a setjmp_svalue for this call and store it in BUF_REG's
1858 region. */
1859 if (buf_reg)
757bf1df 1860 {
fd9982bb 1861 setjmp_record r (enode, call);
808f4dfe
DM
1862 const svalue *sval
1863 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
1864 set_value (buf_reg, sval, ctxt);
757bf1df
DM
1865 }
1866
1867 /* Direct calls to setjmp return 0. */
1868 if (tree lhs = gimple_call_lhs (call))
1869 {
1aff29d4
DM
1870 const svalue *new_sval
1871 = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
808f4dfe
DM
1872 const region *lhs_reg = get_lvalue (lhs, ctxt);
1873 set_value (lhs_reg, new_sval, ctxt);
757bf1df
DM
1874 }
1875}
1876
1877/* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
1878 to a "setjmp" at SETJMP_CALL where the final stack depth should be
808f4dfe
DM
1879 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
1880 done, and should be done by the caller. */
757bf1df
DM
1881
1882void
1883region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
808f4dfe 1884 int setjmp_stack_depth, region_model_context *ctxt)
757bf1df
DM
1885{
1886 /* Evaluate the val, using the frame of the "longjmp". */
1887 tree fake_retval = gimple_call_arg (longjmp_call, 1);
808f4dfe 1888 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
757bf1df
DM
1889
1890 /* Pop any frames until we reach the stack depth of the function where
1891 setjmp was called. */
1892 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
1893 while (get_stack_depth () > setjmp_stack_depth)
808f4dfe 1894 pop_frame (NULL, NULL, ctxt);
757bf1df
DM
1895
1896 gcc_assert (get_stack_depth () == setjmp_stack_depth);
1897
1898 /* Assign to LHS of "setjmp" in new_state. */
1899 if (tree lhs = gimple_call_lhs (setjmp_call))
1900 {
1901 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
1aff29d4
DM
1902 const svalue *zero_sval
1903 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
808f4dfe 1904 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
757bf1df
DM
1905 /* If we have 0, use 1. */
1906 if (eq_zero.is_true ())
1907 {
808f4dfe 1908 const svalue *one_sval
1aff29d4 1909 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
808f4dfe 1910 fake_retval_sval = one_sval;
757bf1df
DM
1911 }
1912 else
1913 {
1914 /* Otherwise note that the value is nonzero. */
808f4dfe 1915 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
757bf1df
DM
1916 }
1917
808f4dfe
DM
1918 /* Decorate the return value from setjmp as being unmergeable,
1919 so that we don't attempt to merge states with it as zero
1920 with states in which it's nonzero, leading to a clean distinction
1921 in the exploded_graph betweeen the first return and the second
1922 return. */
1923 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
757bf1df 1924
808f4dfe
DM
1925 const region *lhs_reg = get_lvalue (lhs, ctxt);
1926 set_value (lhs_reg, fake_retval_sval, ctxt);
1927 }
757bf1df
DM
1928}
1929
1930/* Update this region_model for a phi stmt of the form
1931 LHS = PHI <...RHS...>.
e0a7a675
DM
1932 where RHS is for the appropriate edge.
1933 Get state from OLD_STATE so that all of the phi stmts for a basic block
1934 are effectively handled simultaneously. */
757bf1df
DM
1935
1936void
8525d1f5 1937region_model::handle_phi (const gphi *phi,
808f4dfe 1938 tree lhs, tree rhs,
e0a7a675 1939 const region_model &old_state,
757bf1df
DM
1940 region_model_context *ctxt)
1941{
1942 /* For now, don't bother tracking the .MEM SSA names. */
1943 if (tree var = SSA_NAME_VAR (lhs))
1944 if (TREE_CODE (var) == VAR_DECL)
1945 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
1946 return;
1947
e0a7a675
DM
1948 const svalue *src_sval = old_state.get_rvalue (rhs, ctxt);
1949 const region *dst_reg = old_state.get_lvalue (lhs, ctxt);
757bf1df 1950
e0a7a675 1951 set_value (dst_reg, src_sval, ctxt);
8525d1f5
DM
1952
1953 if (ctxt)
1954 ctxt->on_phi (phi, rhs);
757bf1df
DM
1955}
1956
1957/* Implementation of region_model::get_lvalue; the latter adds type-checking.
1958
1959 Get the id of the region for PV within this region_model,
1960 emitting any diagnostics to CTXT. */
1961
808f4dfe 1962const region *
53cb324c 1963region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
1964{
1965 tree expr = pv.m_tree;
1966
1967 gcc_assert (expr);
1968
1969 switch (TREE_CODE (expr))
1970 {
1971 default:
808f4dfe
DM
1972 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
1973 dump_location_t ());
757bf1df
DM
1974
1975 case ARRAY_REF:
1976 {
1977 tree array = TREE_OPERAND (expr, 0);
1978 tree index = TREE_OPERAND (expr, 1);
757bf1df 1979
808f4dfe
DM
1980 const region *array_reg = get_lvalue (array, ctxt);
1981 const svalue *index_sval = get_rvalue (index, ctxt);
1982 return m_mgr->get_element_region (array_reg,
1983 TREE_TYPE (TREE_TYPE (array)),
1984 index_sval);
757bf1df
DM
1985 }
1986 break;
1987
93e759fc
DM
1988 case BIT_FIELD_REF:
1989 {
1990 tree inner_expr = TREE_OPERAND (expr, 0);
1991 const region *inner_reg = get_lvalue (inner_expr, ctxt);
1992 tree num_bits = TREE_OPERAND (expr, 1);
1993 tree first_bit_offset = TREE_OPERAND (expr, 2);
1994 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
1995 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
1996 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
1997 TREE_INT_CST_LOW (num_bits));
1998 return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits);
1999 }
2000 break;
2001
757bf1df
DM
2002 case MEM_REF:
2003 {
2004 tree ptr = TREE_OPERAND (expr, 0);
2005 tree offset = TREE_OPERAND (expr, 1);
808f4dfe
DM
2006 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2007 const svalue *offset_sval = get_rvalue (offset, ctxt);
2008 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
2009 return m_mgr->get_offset_region (star_ptr,
2010 TREE_TYPE (expr),
2011 offset_sval);
757bf1df
DM
2012 }
2013 break;
2014
808f4dfe
DM
2015 case FUNCTION_DECL:
2016 return m_mgr->get_region_for_fndecl (expr);
2017
2018 case LABEL_DECL:
2019 return m_mgr->get_region_for_label (expr);
2020
757bf1df
DM
2021 case VAR_DECL:
2022 /* Handle globals. */
2023 if (is_global_var (expr))
808f4dfe 2024 return m_mgr->get_region_for_global (expr);
757bf1df
DM
2025
2026 /* Fall through. */
2027
2028 case SSA_NAME:
2029 case PARM_DECL:
2030 case RESULT_DECL:
2031 {
2032 gcc_assert (TREE_CODE (expr) == SSA_NAME
2033 || TREE_CODE (expr) == PARM_DECL
2034 || TREE_CODE (expr) == VAR_DECL
2035 || TREE_CODE (expr) == RESULT_DECL);
2036
808f4dfe
DM
2037 int stack_index = pv.m_stack_depth;
2038 const frame_region *frame = get_frame_at_index (stack_index);
757bf1df 2039 gcc_assert (frame);
4cebae09 2040 return frame->get_region_for_local (m_mgr, expr, ctxt);
757bf1df
DM
2041 }
2042
2043 case COMPONENT_REF:
2044 {
2045 /* obj.field */
2046 tree obj = TREE_OPERAND (expr, 0);
2047 tree field = TREE_OPERAND (expr, 1);
808f4dfe
DM
2048 const region *obj_reg = get_lvalue (obj, ctxt);
2049 return m_mgr->get_field_region (obj_reg, field);
41a9e940
DM
2050 }
2051 break;
2052
757bf1df 2053 case STRING_CST:
808f4dfe 2054 return m_mgr->get_region_for_string (expr);
757bf1df
DM
2055 }
2056}
2057
2058/* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
2059
09bea584
DM
2060static void
2061assert_compat_types (tree src_type, tree dst_type)
2062{
2063 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
808f4dfe
DM
2064 {
2065#if CHECKING_P
2066 if (!(useless_type_conversion_p (src_type, dst_type)))
2067 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
2068#endif
2069 }
09bea584 2070}
757bf1df 2071
ea4e3218
DM
2072/* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
2073
e66b9f67 2074bool
ea4e3218
DM
2075compat_types_p (tree src_type, tree dst_type)
2076{
2077 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2078 if (!(useless_type_conversion_p (src_type, dst_type)))
2079 return false;
2080 return true;
2081}
2082
808f4dfe 2083/* Get the region for PV within this region_model,
757bf1df
DM
2084 emitting any diagnostics to CTXT. */
2085
808f4dfe 2086const region *
53cb324c 2087region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2088{
2089 if (pv.m_tree == NULL_TREE)
808f4dfe 2090 return NULL;
757bf1df 2091
808f4dfe
DM
2092 const region *result_reg = get_lvalue_1 (pv, ctxt);
2093 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
2094 return result_reg;
757bf1df
DM
2095}
2096
808f4dfe 2097/* Get the region for EXPR within this region_model (assuming the most
757bf1df
DM
2098 recent stack frame if it's a local). */
2099
808f4dfe 2100const region *
53cb324c 2101region_model::get_lvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
2102{
2103 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2104}
2105
2106/* Implementation of region_model::get_rvalue; the latter adds type-checking.
2107
2108 Get the value of PV within this region_model,
2109 emitting any diagnostics to CTXT. */
2110
808f4dfe 2111const svalue *
53cb324c 2112region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2113{
2114 gcc_assert (pv.m_tree);
2115
2116 switch (TREE_CODE (pv.m_tree))
2117 {
2118 default:
2242b975 2119 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
757bf1df
DM
2120
2121 case ADDR_EXPR:
2122 {
2123 /* "&EXPR". */
2124 tree expr = pv.m_tree;
2125 tree op0 = TREE_OPERAND (expr, 0);
808f4dfe
DM
2126 const region *expr_reg = get_lvalue (op0, ctxt);
2127 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
757bf1df
DM
2128 }
2129 break;
2130
808f4dfe 2131 case BIT_FIELD_REF:
d3b1ef7a
DM
2132 {
2133 tree expr = pv.m_tree;
2134 tree op0 = TREE_OPERAND (expr, 0);
2135 const region *reg = get_lvalue (op0, ctxt);
2136 tree num_bits = TREE_OPERAND (expr, 1);
2137 tree first_bit_offset = TREE_OPERAND (expr, 2);
2138 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2139 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2140 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2141 TREE_INT_CST_LOW (num_bits));
9faf8348 2142 return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
d3b1ef7a 2143 }
808f4dfe
DM
2144
2145 case SSA_NAME:
2146 case VAR_DECL:
2147 case PARM_DECL:
2148 case RESULT_DECL:
757bf1df
DM
2149 case ARRAY_REF:
2150 {
da7c2773 2151 const region *reg = get_lvalue (pv, ctxt);
9faf8348 2152 return get_store_value (reg, ctxt);
757bf1df
DM
2153 }
2154
808f4dfe
DM
2155 case REALPART_EXPR:
2156 case IMAGPART_EXPR:
2157 case VIEW_CONVERT_EXPR:
2158 {
2159 tree expr = pv.m_tree;
2160 tree arg = TREE_OPERAND (expr, 0);
2161 const svalue *arg_sval = get_rvalue (arg, ctxt);
2162 const svalue *sval_unaryop
2163 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
2164 arg_sval);
2165 return sval_unaryop;
2166 };
2167
757bf1df
DM
2168 case INTEGER_CST:
2169 case REAL_CST:
808f4dfe
DM
2170 case COMPLEX_CST:
2171 case VECTOR_CST:
757bf1df 2172 case STRING_CST:
808f4dfe
DM
2173 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
2174
2175 case POINTER_PLUS_EXPR:
2176 {
2177 tree expr = pv.m_tree;
2178 tree ptr = TREE_OPERAND (expr, 0);
2179 tree offset = TREE_OPERAND (expr, 1);
2180 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2181 const svalue *offset_sval = get_rvalue (offset, ctxt);
2182 const svalue *sval_binop
2183 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
2184 ptr_sval, offset_sval);
2185 return sval_binop;
2186 }
2187
2188 /* Binary ops. */
2189 case PLUS_EXPR:
2190 case MULT_EXPR:
2191 {
2192 tree expr = pv.m_tree;
2193 tree arg0 = TREE_OPERAND (expr, 0);
2194 tree arg1 = TREE_OPERAND (expr, 1);
2195 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
2196 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
2197 const svalue *sval_binop
2198 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
2199 arg0_sval, arg1_sval);
2200 return sval_binop;
2201 }
757bf1df
DM
2202
2203 case COMPONENT_REF:
2204 case MEM_REF:
757bf1df 2205 {
808f4dfe 2206 const region *ref_reg = get_lvalue (pv, ctxt);
9faf8348 2207 return get_store_value (ref_reg, ctxt);
757bf1df 2208 }
1b342485
AS
2209 case OBJ_TYPE_REF:
2210 {
2211 tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
2212 return get_rvalue (expr, ctxt);
2213 }
757bf1df
DM
2214 }
2215}
2216
2217/* Get the value of PV within this region_model,
2218 emitting any diagnostics to CTXT. */
2219
808f4dfe 2220const svalue *
53cb324c 2221region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2222{
2223 if (pv.m_tree == NULL_TREE)
808f4dfe 2224 return NULL;
757bf1df 2225
808f4dfe 2226 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
757bf1df 2227
808f4dfe
DM
2228 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
2229
33255ad3
DM
2230 result_sval = check_for_poison (result_sval, pv.m_tree, ctxt);
2231
808f4dfe 2232 return result_sval;
757bf1df
DM
2233}
2234
2235/* Get the value of EXPR within this region_model (assuming the most
2236 recent stack frame if it's a local). */
2237
808f4dfe 2238const svalue *
53cb324c 2239region_model::get_rvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
2240{
2241 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2242}
2243
623bc027
DM
2244/* Return true if this model is on a path with "main" as the entrypoint
2245 (as opposed to one in which we're merely analyzing a subset of the
2246 path through the code). */
2247
2248bool
2249region_model::called_from_main_p () const
2250{
2251 if (!m_current_frame)
2252 return false;
2253 /* Determine if the oldest stack frame in this model is for "main". */
2254 const frame_region *frame0 = get_frame_at_index (0);
2255 gcc_assert (frame0);
2256 return id_equal (DECL_NAME (frame0->get_function ()->decl), "main");
2257}
2258
2259/* Subroutine of region_model::get_store_value for when REG is (or is within)
2260 a global variable that hasn't been touched since the start of this path
2261 (or was implicitly touched due to a call to an unknown function). */
2262
2263const svalue *
2264region_model::get_initial_value_for_global (const region *reg) const
2265{
2266 /* Get the decl that REG is for (or is within). */
2267 const decl_region *base_reg
2268 = reg->get_base_region ()->dyn_cast_decl_region ();
2269 gcc_assert (base_reg);
2270 tree decl = base_reg->get_decl ();
2271
2272 /* Special-case: to avoid having to explicitly update all previously
2273 untracked globals when calling an unknown fn, they implicitly have
2274 an unknown value if an unknown call has occurred, unless this is
2275 static to-this-TU and hasn't escaped. Globals that have escaped
2276 are explicitly tracked, so we shouldn't hit this case for them. */
af66094d
DM
2277 if (m_store.called_unknown_fn_p ()
2278 && TREE_PUBLIC (decl)
2279 && !TREE_READONLY (decl))
623bc027
DM
2280 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2281
2282 /* If we are on a path from the entrypoint from "main" and we have a
2283 global decl defined in this TU that hasn't been touched yet, then
2284 the initial value of REG can be taken from the initialization value
2285 of the decl. */
16ad9ae8 2286 if (called_from_main_p () || TREE_READONLY (decl))
623bc027 2287 {
61a43de5
DM
2288 /* Attempt to get the initializer value for base_reg. */
2289 if (const svalue *base_reg_init
2290 = base_reg->get_svalue_for_initializer (m_mgr))
623bc027 2291 {
61a43de5
DM
2292 if (reg == base_reg)
2293 return base_reg_init;
2294 else
623bc027 2295 {
61a43de5
DM
2296 /* Get the value for REG within base_reg_init. */
2297 binding_cluster c (base_reg);
e61ffa20 2298 c.bind (m_mgr->get_store_manager (), base_reg, base_reg_init);
61a43de5
DM
2299 const svalue *sval
2300 = c.get_any_binding (m_mgr->get_store_manager (), reg);
2301 if (sval)
2302 {
2303 if (reg->get_type ())
2304 sval = m_mgr->get_or_create_cast (reg->get_type (),
2305 sval);
2306 return sval;
2307 }
623bc027
DM
2308 }
2309 }
2310 }
2311
2312 /* Otherwise, return INIT_VAL(REG). */
2313 return m_mgr->get_or_create_initial_value (reg);
2314}
2315
808f4dfe 2316/* Get a value for REG, looking it up in the store, or otherwise falling
9faf8348
DM
2317 back to "initial" or "unknown" values.
2318 Use CTXT to report any warnings associated with reading from REG. */
757bf1df 2319
808f4dfe 2320const svalue *
9faf8348
DM
2321region_model::get_store_value (const region *reg,
2322 region_model_context *ctxt) const
757bf1df 2323{
dfe2ef7f
DM
2324 /* Getting the value of an empty region gives an unknown_svalue. */
2325 if (reg->empty_p ())
2326 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2327
9faf8348
DM
2328 check_region_for_read (reg, ctxt);
2329
2867118d
DM
2330 /* Special-case: handle var_decls in the constant pool. */
2331 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
2332 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
2333 return sval;
2334
808f4dfe
DM
2335 const svalue *sval
2336 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
2337 if (sval)
757bf1df 2338 {
808f4dfe
DM
2339 if (reg->get_type ())
2340 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
2341 return sval;
757bf1df 2342 }
757bf1df 2343
808f4dfe
DM
2344 /* Special-case: read at a constant index within a STRING_CST. */
2345 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
2346 if (tree byte_offset_cst
2347 = offset_reg->get_byte_offset ()->maybe_get_constant ())
2348 if (const string_region *str_reg
2349 = reg->get_parent_region ()->dyn_cast_string_region ())
757bf1df 2350 {
808f4dfe
DM
2351 tree string_cst = str_reg->get_string_cst ();
2352 if (const svalue *char_sval
2353 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2354 byte_offset_cst))
2355 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
757bf1df 2356 }
757bf1df 2357
808f4dfe
DM
2358 /* Special-case: read the initial char of a STRING_CST. */
2359 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
2360 if (const string_region *str_reg
2361 = cast_reg->get_original_region ()->dyn_cast_string_region ())
2362 {
2363 tree string_cst = str_reg->get_string_cst ();
2364 tree byte_offset_cst = build_int_cst (integer_type_node, 0);
2365 if (const svalue *char_sval
2366 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2367 byte_offset_cst))
2368 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2369 }
757bf1df 2370
808f4dfe
DM
2371 /* Otherwise we implicitly have the initial value of the region
2372 (if the cluster had been touched, binding_cluster::get_any_binding,
2373 would have returned UNKNOWN, and we would already have returned
2374 that above). */
757bf1df 2375
623bc027
DM
2376 /* Handle globals. */
2377 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
2378 == RK_GLOBALS)
2379 return get_initial_value_for_global (reg);
757bf1df 2380
808f4dfe 2381 return m_mgr->get_or_create_initial_value (reg);
757bf1df
DM
2382}
2383
808f4dfe
DM
2384/* Return false if REG does not exist, true if it may do.
2385 This is for detecting regions within the stack that don't exist anymore
2386 after frames are popped. */
757bf1df 2387
808f4dfe
DM
2388bool
2389region_model::region_exists_p (const region *reg) const
757bf1df 2390{
808f4dfe
DM
2391 /* If within a stack frame, check that the stack frame is live. */
2392 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
757bf1df 2393 {
808f4dfe
DM
2394 /* Check that the current frame is the enclosing frame, or is called
2395 by it. */
2396 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
2397 iter_frame = iter_frame->get_calling_frame ())
2398 if (iter_frame == enclosing_frame)
2399 return true;
2400 return false;
757bf1df 2401 }
808f4dfe
DM
2402
2403 return true;
757bf1df
DM
2404}
2405
808f4dfe
DM
2406/* Get a region for referencing PTR_SVAL, creating a region if need be, and
2407 potentially generating warnings via CTXT.
35e3f082 2408 PTR_SVAL must be of pointer type.
808f4dfe 2409 PTR_TREE if non-NULL can be used when emitting diagnostics. */
757bf1df 2410
808f4dfe
DM
2411const region *
2412region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
53cb324c 2413 region_model_context *ctxt) const
757bf1df 2414{
808f4dfe 2415 gcc_assert (ptr_sval);
35e3f082 2416 gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
757bf1df 2417
49bfbf18
DM
2418 /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
2419 as a constraint. This suppresses false positives from
2420 -Wanalyzer-null-dereference for the case where we later have an
2421 if (PTR_SVAL) that would occur if we considered the false branch
2422 and transitioned the malloc state machine from start->null. */
2423 tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
2424 const svalue *null_ptr = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
2425 m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
2426
808f4dfe 2427 switch (ptr_sval->get_kind ())
757bf1df 2428 {
808f4dfe 2429 default:
23ebfda0 2430 break;
808f4dfe 2431
757bf1df
DM
2432 case SK_REGION:
2433 {
808f4dfe
DM
2434 const region_svalue *region_sval
2435 = as_a <const region_svalue *> (ptr_sval);
757bf1df
DM
2436 return region_sval->get_pointee ();
2437 }
2438
808f4dfe
DM
2439 case SK_BINOP:
2440 {
2441 const binop_svalue *binop_sval
2442 = as_a <const binop_svalue *> (ptr_sval);
2443 switch (binop_sval->get_op ())
2444 {
2445 case POINTER_PLUS_EXPR:
2446 {
2447 /* If we have a symbolic value expressing pointer arithmentic,
2448 try to convert it to a suitable region. */
2449 const region *parent_region
2450 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
2451 const svalue *offset = binop_sval->get_arg1 ();
2452 tree type= TREE_TYPE (ptr_sval->get_type ());
2453 return m_mgr->get_offset_region (parent_region, type, offset);
2454 }
2455 default:
23ebfda0 2456 break;
808f4dfe
DM
2457 }
2458 }
23ebfda0 2459 break;
757bf1df
DM
2460
2461 case SK_POISONED:
2462 {
2463 if (ctxt)
808f4dfe
DM
2464 {
2465 tree ptr = get_representative_tree (ptr_sval);
2466 /* If we can't get a representative tree for PTR_SVAL
2467 (e.g. if it hasn't been bound into the store), then
2468 fall back on PTR_TREE, if non-NULL. */
2469 if (!ptr)
2470 ptr = ptr_tree;
2471 if (ptr)
2472 {
2473 const poisoned_svalue *poisoned_sval
2474 = as_a <const poisoned_svalue *> (ptr_sval);
2475 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
6341f14e
DM
2476 ctxt->warn (make_unique<poisoned_value_diagnostic>
2477 (ptr, pkind, NULL));
808f4dfe
DM
2478 }
2479 }
757bf1df 2480 }
23ebfda0 2481 break;
757bf1df
DM
2482 }
2483
23ebfda0 2484 return m_mgr->get_symbolic_region (ptr_sval);
757bf1df
DM
2485}
2486
d3b1ef7a
DM
2487/* Attempt to get BITS within any value of REG, as TYPE.
2488 In particular, extract values from compound_svalues for the case
2489 where there's a concrete binding at BITS.
9faf8348
DM
2490 Return an unknown svalue if we can't handle the given case.
2491 Use CTXT to report any warnings associated with reading from REG. */
d3b1ef7a
DM
2492
2493const svalue *
2494region_model::get_rvalue_for_bits (tree type,
2495 const region *reg,
9faf8348
DM
2496 const bit_range &bits,
2497 region_model_context *ctxt) const
d3b1ef7a 2498{
9faf8348 2499 const svalue *sval = get_store_value (reg, ctxt);
e61ffa20 2500 return m_mgr->get_or_create_bits_within (type, bits, sval);
d3b1ef7a
DM
2501}
2502
3175d40f
DM
2503/* A subclass of pending_diagnostic for complaining about writes to
2504 constant regions of memory. */
2505
2506class write_to_const_diagnostic
2507: public pending_diagnostic_subclass<write_to_const_diagnostic>
2508{
2509public:
2510 write_to_const_diagnostic (const region *reg, tree decl)
2511 : m_reg (reg), m_decl (decl)
2512 {}
2513
ff171cb1 2514 const char *get_kind () const final override
3175d40f
DM
2515 {
2516 return "write_to_const_diagnostic";
2517 }
2518
2519 bool operator== (const write_to_const_diagnostic &other) const
2520 {
2521 return (m_reg == other.m_reg
2522 && m_decl == other.m_decl);
2523 }
2524
ff171cb1 2525 int get_controlling_option () const final override
7fd6e36e
DM
2526 {
2527 return OPT_Wanalyzer_write_to_const;
2528 }
2529
ff171cb1 2530 bool emit (rich_location *rich_loc) final override
3175d40f 2531 {
111fd515
DM
2532 auto_diagnostic_group d;
2533 bool warned;
2534 switch (m_reg->get_kind ())
2535 {
2536 default:
7fd6e36e 2537 warned = warning_at (rich_loc, get_controlling_option (),
111fd515
DM
2538 "write to %<const%> object %qE", m_decl);
2539 break;
2540 case RK_FUNCTION:
7fd6e36e 2541 warned = warning_at (rich_loc, get_controlling_option (),
111fd515
DM
2542 "write to function %qE", m_decl);
2543 break;
2544 case RK_LABEL:
7fd6e36e 2545 warned = warning_at (rich_loc, get_controlling_option (),
111fd515
DM
2546 "write to label %qE", m_decl);
2547 break;
2548 }
3175d40f
DM
2549 if (warned)
2550 inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
2551 return warned;
2552 }
2553
ff171cb1 2554 label_text describe_final_event (const evdesc::final_event &ev) final override
3175d40f 2555 {
111fd515
DM
2556 switch (m_reg->get_kind ())
2557 {
2558 default:
2559 return ev.formatted_print ("write to %<const%> object %qE here", m_decl);
2560 case RK_FUNCTION:
2561 return ev.formatted_print ("write to function %qE here", m_decl);
2562 case RK_LABEL:
2563 return ev.formatted_print ("write to label %qE here", m_decl);
2564 }
3175d40f
DM
2565 }
2566
2567private:
2568 const region *m_reg;
2569 tree m_decl;
2570};
2571
2572/* A subclass of pending_diagnostic for complaining about writes to
2573 string literals. */
2574
2575class write_to_string_literal_diagnostic
2576: public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
2577{
2578public:
2579 write_to_string_literal_diagnostic (const region *reg)
2580 : m_reg (reg)
2581 {}
2582
ff171cb1 2583 const char *get_kind () const final override
3175d40f
DM
2584 {
2585 return "write_to_string_literal_diagnostic";
2586 }
2587
2588 bool operator== (const write_to_string_literal_diagnostic &other) const
2589 {
2590 return m_reg == other.m_reg;
2591 }
2592
ff171cb1 2593 int get_controlling_option () const final override
7fd6e36e
DM
2594 {
2595 return OPT_Wanalyzer_write_to_string_literal;
2596 }
2597
ff171cb1 2598 bool emit (rich_location *rich_loc) final override
3175d40f 2599 {
7fd6e36e 2600 return warning_at (rich_loc, get_controlling_option (),
3175d40f
DM
2601 "write to string literal");
2602 /* Ideally we would show the location of the STRING_CST as well,
2603 but it is not available at this point. */
2604 }
2605
ff171cb1 2606 label_text describe_final_event (const evdesc::final_event &ev) final override
3175d40f
DM
2607 {
2608 return ev.formatted_print ("write to string literal here");
2609 }
2610
2611private:
2612 const region *m_reg;
2613};
2614
2615/* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
2616
2617void
2618region_model::check_for_writable_region (const region* dest_reg,
2619 region_model_context *ctxt) const
2620{
2621 /* Fail gracefully if CTXT is NULL. */
2622 if (!ctxt)
2623 return;
2624
2625 const region *base_reg = dest_reg->get_base_region ();
2626 switch (base_reg->get_kind ())
2627 {
2628 default:
2629 break;
111fd515
DM
2630 case RK_FUNCTION:
2631 {
2632 const function_region *func_reg = as_a <const function_region *> (base_reg);
2633 tree fndecl = func_reg->get_fndecl ();
6341f14e
DM
2634 ctxt->warn (make_unique<write_to_const_diagnostic>
2635 (func_reg, fndecl));
111fd515
DM
2636 }
2637 break;
2638 case RK_LABEL:
2639 {
2640 const label_region *label_reg = as_a <const label_region *> (base_reg);
2641 tree label = label_reg->get_label ();
6341f14e
DM
2642 ctxt->warn (make_unique<write_to_const_diagnostic>
2643 (label_reg, label));
111fd515
DM
2644 }
2645 break;
3175d40f
DM
2646 case RK_DECL:
2647 {
2648 const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
2649 tree decl = decl_reg->get_decl ();
2650 /* Warn about writes to const globals.
2651 Don't warn for writes to const locals, and params in particular,
2652 since we would warn in push_frame when setting them up (e.g the
2653 "this" param is "T* const"). */
2654 if (TREE_READONLY (decl)
2655 && is_global_var (decl))
6341f14e 2656 ctxt->warn (make_unique<write_to_const_diagnostic> (dest_reg, decl));
3175d40f
DM
2657 }
2658 break;
2659 case RK_STRING:
6341f14e 2660 ctxt->warn (make_unique<write_to_string_literal_diagnostic> (dest_reg));
3175d40f
DM
2661 break;
2662 }
2663}
2664
9a2c9579
DM
2665/* Get the capacity of REG in bytes. */
2666
2667const svalue *
2668region_model::get_capacity (const region *reg) const
2669{
2670 switch (reg->get_kind ())
2671 {
2672 default:
2673 break;
2674 case RK_DECL:
2675 {
2676 const decl_region *decl_reg = as_a <const decl_region *> (reg);
2677 tree decl = decl_reg->get_decl ();
2678 if (TREE_CODE (decl) == SSA_NAME)
2679 {
2680 tree type = TREE_TYPE (decl);
2681 tree size = TYPE_SIZE (type);
2682 return get_rvalue (size, NULL);
2683 }
2684 else
2685 {
2686 tree size = decl_init_size (decl, false);
2687 if (size)
2688 return get_rvalue (size, NULL);
2689 }
2690 }
2691 break;
e61ffa20
DM
2692 case RK_SIZED:
2693 /* Look through sized regions to get at the capacity
2694 of the underlying regions. */
2695 return get_capacity (reg->get_parent_region ());
9a2c9579
DM
2696 }
2697
2698 if (const svalue *recorded = get_dynamic_extents (reg))
2699 return recorded;
2700
2701 return m_mgr->get_or_create_unknown_svalue (sizetype);
2702}
2703
0a9c0d4a
TL
2704/* Return the string size, including the 0-terminator, if SVAL is a
2705 constant_svalue holding a string. Otherwise, return an unknown_svalue. */
2706
2707const svalue *
2708region_model::get_string_size (const svalue *sval) const
2709{
2710 tree cst = sval->maybe_get_constant ();
2711 if (!cst || TREE_CODE (cst) != STRING_CST)
2712 return m_mgr->get_or_create_unknown_svalue (size_type_node);
2713
2714 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
2715 return m_mgr->get_or_create_constant_svalue (out);
2716}
2717
2718/* Return the string size, including the 0-terminator, if REG is a
2719 string_region. Otherwise, return an unknown_svalue. */
2720
2721const svalue *
2722region_model::get_string_size (const region *reg) const
2723{
2724 const string_region *str_reg = dyn_cast <const string_region *> (reg);
2725 if (!str_reg)
2726 return m_mgr->get_or_create_unknown_svalue (size_type_node);
2727
2728 tree cst = str_reg->get_string_cst ();
2729 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
2730 return m_mgr->get_or_create_constant_svalue (out);
2731}
2732
9faf8348
DM
2733/* If CTXT is non-NULL, use it to warn about any problems accessing REG,
2734 using DIR to determine if this access is a read or write. */
2735
2736void
2737region_model::check_region_access (const region *reg,
2738 enum access_direction dir,
2739 region_model_context *ctxt) const
2740{
2741 /* Fail gracefully if CTXT is NULL. */
2742 if (!ctxt)
2743 return;
2744
b9365b93 2745 check_region_for_taint (reg, dir, ctxt);
7e3b45be 2746 check_region_bounds (reg, dir, ctxt);
b9365b93 2747
9faf8348
DM
2748 switch (dir)
2749 {
2750 default:
2751 gcc_unreachable ();
2752 case DIR_READ:
2753 /* Currently a no-op. */
2754 break;
2755 case DIR_WRITE:
2756 check_for_writable_region (reg, ctxt);
2757 break;
2758 }
2759}
2760
2761/* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
2762
2763void
2764region_model::check_region_for_write (const region *dest_reg,
2765 region_model_context *ctxt) const
2766{
2767 check_region_access (dest_reg, DIR_WRITE, ctxt);
2768}
2769
2770/* If CTXT is non-NULL, use it to warn about any problems reading from REG. */
2771
2772void
2773region_model::check_region_for_read (const region *src_reg,
2774 region_model_context *ctxt) const
2775{
2776 check_region_access (src_reg, DIR_READ, ctxt);
2777}
2778
e6c3bb37
TL
2779/* Concrete subclass for casts of pointers that lead to trailing bytes. */
2780
2781class dubious_allocation_size
2782: public pending_diagnostic_subclass<dubious_allocation_size>
2783{
2784public:
2785 dubious_allocation_size (const region *lhs, const region *rhs)
f5758fe5
DM
2786 : m_lhs (lhs), m_rhs (rhs), m_expr (NULL_TREE),
2787 m_has_allocation_event (false)
e6c3bb37
TL
2788 {}
2789
2790 dubious_allocation_size (const region *lhs, const region *rhs,
2791 tree expr)
f5758fe5
DM
2792 : m_lhs (lhs), m_rhs (rhs), m_expr (expr),
2793 m_has_allocation_event (false)
e6c3bb37
TL
2794 {}
2795
2796 const char *get_kind () const final override
2797 {
2798 return "dubious_allocation_size";
2799 }
2800
2801 bool operator== (const dubious_allocation_size &other) const
2802 {
2803 return m_lhs == other.m_lhs && m_rhs == other.m_rhs
2804 && pending_diagnostic::same_tree_p (m_expr, other.m_expr);
2805 }
2806
2807 int get_controlling_option () const final override
2808 {
2809 return OPT_Wanalyzer_allocation_size;
2810 }
2811
2812 bool emit (rich_location *rich_loc) final override
2813 {
2814 diagnostic_metadata m;
2815 m.add_cwe (131);
2816
2817 return warning_meta (rich_loc, m, get_controlling_option (),
c83e9731
TL
2818 "allocated buffer size is not a multiple"
2819 " of the pointee's size");
e6c3bb37
TL
2820 }
2821
e6c3bb37
TL
2822 label_text describe_final_event (const evdesc::final_event &ev) final
2823 override
2824 {
2825 tree pointee_type = TREE_TYPE (m_lhs->get_type ());
f5758fe5 2826 if (m_has_allocation_event)
e6c3bb37
TL
2827 return ev.formatted_print ("assigned to %qT here;"
2828 " %<sizeof (%T)%> is %qE",
2829 m_lhs->get_type (), pointee_type,
2830 size_in_bytes (pointee_type));
f5758fe5
DM
2831 /* Fallback: Typically, we should always see an allocation_event
2832 before. */
e6c3bb37
TL
2833 if (m_expr)
2834 {
2835 if (TREE_CODE (m_expr) == INTEGER_CST)
2836 return ev.formatted_print ("allocated %E bytes and assigned to"
2837 " %qT here; %<sizeof (%T)%> is %qE",
2838 m_expr, m_lhs->get_type (), pointee_type,
2839 size_in_bytes (pointee_type));
2840 else
2841 return ev.formatted_print ("allocated %qE bytes and assigned to"
2842 " %qT here; %<sizeof (%T)%> is %qE",
2843 m_expr, m_lhs->get_type (), pointee_type,
2844 size_in_bytes (pointee_type));
2845 }
2846
2847 return ev.formatted_print ("allocated and assigned to %qT here;"
2848 " %<sizeof (%T)%> is %qE",
2849 m_lhs->get_type (), pointee_type,
2850 size_in_bytes (pointee_type));
2851 }
2852
f5758fe5
DM
2853 void
2854 add_region_creation_events (const region *,
2855 tree capacity,
e24fe128 2856 const event_loc_info &loc_info,
f5758fe5
DM
2857 checker_path &emission_path) final override
2858 {
2859 emission_path.add_event
e24fe128 2860 (make_unique<region_creation_event_allocation_size> (capacity, loc_info));
f5758fe5
DM
2861
2862 m_has_allocation_event = true;
2863 }
2864
e6c3bb37
TL
2865 void mark_interesting_stuff (interesting_t *interest) final override
2866 {
2867 interest->add_region_creation (m_rhs);
2868 }
2869
2870private:
2871 const region *m_lhs;
2872 const region *m_rhs;
2873 const tree m_expr;
f5758fe5 2874 bool m_has_allocation_event;
e6c3bb37
TL
2875};
2876
2877/* Return true on dubious allocation sizes for constant sizes. */
2878
2879static bool
2880capacity_compatible_with_type (tree cst, tree pointee_size_tree,
2881 bool is_struct)
2882{
2883 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
2884 gcc_assert (TREE_CODE (pointee_size_tree) == INTEGER_CST);
2885
2886 unsigned HOST_WIDE_INT pointee_size = TREE_INT_CST_LOW (pointee_size_tree);
2887 unsigned HOST_WIDE_INT alloc_size = TREE_INT_CST_LOW (cst);
2888
2889 if (is_struct)
b4cc945c 2890 return alloc_size == 0 || alloc_size >= pointee_size;
e6c3bb37
TL
2891 return alloc_size % pointee_size == 0;
2892}
2893
2894static bool
2895capacity_compatible_with_type (tree cst, tree pointee_size_tree)
2896{
2897 return capacity_compatible_with_type (cst, pointee_size_tree, false);
2898}
2899
2900/* Checks whether SVAL could be a multiple of SIZE_CST.
2901
2902 It works by visiting all svalues inside SVAL until it reaches
2903 atomic nodes. From those, it goes back up again and adds each
2904 node that might be a multiple of SIZE_CST to the RESULT_SET. */
2905
2906class size_visitor : public visitor
2907{
2908public:
c83e9731
TL
2909 size_visitor (tree size_cst, const svalue *root_sval, constraint_manager *cm)
2910 : m_size_cst (size_cst), m_root_sval (root_sval), m_cm (cm)
e6c3bb37 2911 {
c83e9731 2912 m_root_sval->accept (this);
e6c3bb37
TL
2913 }
2914
2915 bool get_result ()
2916 {
c83e9731 2917 return result_set.contains (m_root_sval);
e6c3bb37
TL
2918 }
2919
2920 void visit_constant_svalue (const constant_svalue *sval) final override
2921 {
c83e9731 2922 check_constant (sval->get_constant (), sval);
e6c3bb37
TL
2923 }
2924
2925 void visit_unknown_svalue (const unknown_svalue *sval ATTRIBUTE_UNUSED)
2926 final override
2927 {
2928 result_set.add (sval);
2929 }
2930
2931 void visit_poisoned_svalue (const poisoned_svalue *sval ATTRIBUTE_UNUSED)
2932 final override
2933 {
2934 result_set.add (sval);
2935 }
2936
bdd385b2 2937 void visit_unaryop_svalue (const unaryop_svalue *sval) final override
e6c3bb37
TL
2938 {
2939 const svalue *arg = sval->get_arg ();
2940 if (result_set.contains (arg))
2941 result_set.add (sval);
2942 }
2943
2944 void visit_binop_svalue (const binop_svalue *sval) final override
2945 {
2946 const svalue *arg0 = sval->get_arg0 ();
2947 const svalue *arg1 = sval->get_arg1 ();
2948
2949 if (sval->get_op () == MULT_EXPR)
2950 {
2951 if (result_set.contains (arg0) || result_set.contains (arg1))
2952 result_set.add (sval);
2953 }
2954 else
2955 {
2956 if (result_set.contains (arg0) && result_set.contains (arg1))
2957 result_set.add (sval);
2958 }
2959 }
2960
bdd385b2 2961 void visit_repeated_svalue (const repeated_svalue *sval) final override
e6c3bb37
TL
2962 {
2963 sval->get_inner_svalue ()->accept (this);
2964 if (result_set.contains (sval->get_inner_svalue ()))
2965 result_set.add (sval);
2966 }
2967
2968 void visit_unmergeable_svalue (const unmergeable_svalue *sval) final override
2969 {
2970 sval->get_arg ()->accept (this);
2971 if (result_set.contains (sval->get_arg ()))
2972 result_set.add (sval);
2973 }
2974
2975 void visit_widening_svalue (const widening_svalue *sval) final override
2976 {
2977 const svalue *base = sval->get_base_svalue ();
2978 const svalue *iter = sval->get_iter_svalue ();
2979
2980 if (result_set.contains (base) && result_set.contains (iter))
2981 result_set.add (sval);
2982 }
2983
2984 void visit_conjured_svalue (const conjured_svalue *sval ATTRIBUTE_UNUSED)
2985 final override
2986 {
2987 equiv_class_id id (-1);
2988 if (m_cm->get_equiv_class_by_svalue (sval, &id))
2989 {
c83e9731
TL
2990 if (tree cst = id.get_obj (*m_cm).get_any_constant ())
2991 check_constant (cst, sval);
e6c3bb37 2992 else
c83e9731 2993 result_set.add (sval);
e6c3bb37
TL
2994 }
2995 }
2996
2997 void visit_asm_output_svalue (const asm_output_svalue *sval ATTRIBUTE_UNUSED)
2998 final override
2999 {
3000 result_set.add (sval);
3001 }
3002
3003 void visit_const_fn_result_svalue (const const_fn_result_svalue
3004 *sval ATTRIBUTE_UNUSED) final override
3005 {
3006 result_set.add (sval);
3007 }
3008
3009private:
c83e9731
TL
3010 void check_constant (tree cst, const svalue *sval)
3011 {
3012 switch (TREE_CODE (cst))
3013 {
3014 default:
3015 /* Assume all unhandled operands are compatible. */
3016 result_set.add (sval);
3017 break;
3018 case INTEGER_CST:
3019 if (capacity_compatible_with_type (cst, m_size_cst))
3020 result_set.add (sval);
3021 break;
3022 }
3023 }
3024
e6c3bb37 3025 tree m_size_cst;
c83e9731 3026 const svalue *m_root_sval;
e6c3bb37
TL
3027 constraint_manager *m_cm;
3028 svalue_set result_set; /* Used as a mapping of svalue*->bool. */
3029};
3030
3031/* Return true if a struct or union either uses the inheritance pattern,
3032 where the first field is a base struct, or the flexible array member
3033 pattern, where the last field is an array without a specified size. */
3034
3035static bool
3036struct_or_union_with_inheritance_p (tree struc)
3037{
3038 tree iter = TYPE_FIELDS (struc);
3039 if (iter == NULL_TREE)
3040 return false;
3041 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (iter)))
3042 return true;
3043
3044 tree last_field;
3045 while (iter != NULL_TREE)
3046 {
3047 last_field = iter;
3048 iter = DECL_CHAIN (iter);
3049 }
3050
3051 if (last_field != NULL_TREE
3052 && TREE_CODE (TREE_TYPE (last_field)) == ARRAY_TYPE)
3053 return true;
3054
3055 return false;
3056}
3057
3058/* Return true if the lhs and rhs of an assignment have different types. */
3059
3060static bool
3061is_any_cast_p (const gimple *stmt)
3062{
c83e9731 3063 if (const gassign *assign = dyn_cast <const gassign *> (stmt))
e6c3bb37
TL
3064 return gimple_assign_cast_p (assign)
3065 || !pending_diagnostic::same_tree_p (
3066 TREE_TYPE (gimple_assign_lhs (assign)),
3067 TREE_TYPE (gimple_assign_rhs1 (assign)));
c83e9731 3068 else if (const gcall *call = dyn_cast <const gcall *> (stmt))
e6c3bb37
TL
3069 {
3070 tree lhs = gimple_call_lhs (call);
3071 return lhs != NULL_TREE && !pending_diagnostic::same_tree_p (
3072 TREE_TYPE (gimple_call_lhs (call)),
3073 gimple_call_return_type (call));
3074 }
3075
3076 return false;
3077}
3078
3079/* On pointer assignments, check whether the buffer size of
3080 RHS_SVAL is compatible with the type of the LHS_REG.
3081 Use a non-null CTXT to report allocation size warnings. */
3082
3083void
3084region_model::check_region_size (const region *lhs_reg, const svalue *rhs_sval,
3085 region_model_context *ctxt) const
3086{
3087 if (!ctxt || ctxt->get_stmt () == NULL)
3088 return;
3089 /* Only report warnings on assignments that actually change the type. */
3090 if (!is_any_cast_p (ctxt->get_stmt ()))
3091 return;
3092
3093 const region_svalue *reg_sval = dyn_cast <const region_svalue *> (rhs_sval);
3094 if (!reg_sval)
3095 return;
3096
3097 tree pointer_type = lhs_reg->get_type ();
3098 if (pointer_type == NULL_TREE || !POINTER_TYPE_P (pointer_type))
3099 return;
3100
3101 tree pointee_type = TREE_TYPE (pointer_type);
3102 /* Make sure that the type on the left-hand size actually has a size. */
3103 if (pointee_type == NULL_TREE || VOID_TYPE_P (pointee_type)
3104 || TYPE_SIZE_UNIT (pointee_type) == NULL_TREE)
3105 return;
3106
3107 /* Bail out early on pointers to structs where we can
3108 not deduce whether the buffer size is compatible. */
3109 bool is_struct = RECORD_OR_UNION_TYPE_P (pointee_type);
3110 if (is_struct && struct_or_union_with_inheritance_p (pointee_type))
3111 return;
3112
3113 tree pointee_size_tree = size_in_bytes (pointee_type);
3114 /* We give up if the type size is not known at compile-time or the
3115 type size is always compatible regardless of the buffer size. */
3116 if (TREE_CODE (pointee_size_tree) != INTEGER_CST
3117 || integer_zerop (pointee_size_tree)
3118 || integer_onep (pointee_size_tree))
3119 return;
3120
3121 const region *rhs_reg = reg_sval->get_pointee ();
3122 const svalue *capacity = get_capacity (rhs_reg);
3123 switch (capacity->get_kind ())
3124 {
3125 case svalue_kind::SK_CONSTANT:
3126 {
3127 const constant_svalue *cst_cap_sval
c83e9731 3128 = as_a <const constant_svalue *> (capacity);
e6c3bb37 3129 tree cst_cap = cst_cap_sval->get_constant ();
c83e9731
TL
3130 if (TREE_CODE (cst_cap) == INTEGER_CST
3131 && !capacity_compatible_with_type (cst_cap, pointee_size_tree,
3132 is_struct))
6341f14e
DM
3133 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg, rhs_reg,
3134 cst_cap));
e6c3bb37
TL
3135 }
3136 break;
3137 default:
3138 {
3139 if (!is_struct)
3140 {
3141 size_visitor v (pointee_size_tree, capacity, m_constraints);
3142 if (!v.get_result ())
3143 {
3144 tree expr = get_representative_tree (capacity);
6341f14e
DM
3145 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg,
3146 rhs_reg,
3147 expr));
e6c3bb37
TL
3148 }
3149 }
3150 break;
3151 }
3152 }
3153}
3154
808f4dfe 3155/* Set the value of the region given by LHS_REG to the value given
9faf8348
DM
3156 by RHS_SVAL.
3157 Use CTXT to report any warnings associated with writing to LHS_REG. */
757bf1df 3158
808f4dfe
DM
3159void
3160region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
3175d40f 3161 region_model_context *ctxt)
757bf1df 3162{
808f4dfe
DM
3163 gcc_assert (lhs_reg);
3164 gcc_assert (rhs_sval);
3165
dfe2ef7f
DM
3166 /* Setting the value of an empty region is a no-op. */
3167 if (lhs_reg->empty_p ())
3168 return;
3169
e6c3bb37
TL
3170 check_region_size (lhs_reg, rhs_sval, ctxt);
3171
9faf8348 3172 check_region_for_write (lhs_reg, ctxt);
3175d40f 3173
808f4dfe 3174 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 3175 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
3176}
3177
808f4dfe 3178/* Set the value of the region given by LHS to the value given by RHS. */
757bf1df
DM
3179
3180void
808f4dfe 3181region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
757bf1df 3182{
808f4dfe
DM
3183 const region *lhs_reg = get_lvalue (lhs, ctxt);
3184 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
3185 gcc_assert (lhs_reg);
3186 gcc_assert (rhs_sval);
3187 set_value (lhs_reg, rhs_sval, ctxt);
757bf1df
DM
3188}
3189
808f4dfe 3190/* Remove all bindings overlapping REG within the store. */
884d9141
DM
3191
3192void
808f4dfe
DM
3193region_model::clobber_region (const region *reg)
3194{
3195 m_store.clobber_region (m_mgr->get_store_manager(), reg);
3196}
3197
3198/* Remove any bindings for REG within the store. */
3199
3200void
3201region_model::purge_region (const region *reg)
3202{
3203 m_store.purge_region (m_mgr->get_store_manager(), reg);
3204}
3205
e61ffa20
DM
3206/* Fill REG with SVAL. */
3207
3208void
3209region_model::fill_region (const region *reg, const svalue *sval)
3210{
3211 m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
3212}
3213
808f4dfe
DM
3214/* Zero-fill REG. */
3215
3216void
3217region_model::zero_fill_region (const region *reg)
3218{
3219 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
3220}
3221
3222/* Mark REG as having unknown content. */
3223
3224void
3a66c289
DM
3225region_model::mark_region_as_unknown (const region *reg,
3226 uncertainty_t *uncertainty)
884d9141 3227{
3a66c289
DM
3228 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
3229 uncertainty);
884d9141
DM
3230}
3231
808f4dfe 3232/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
757bf1df
DM
3233 this model. */
3234
3235tristate
808f4dfe
DM
3236region_model::eval_condition (const svalue *lhs,
3237 enum tree_code op,
3238 const svalue *rhs) const
757bf1df 3239{
757bf1df
DM
3240 gcc_assert (lhs);
3241 gcc_assert (rhs);
3242
808f4dfe
DM
3243 /* For now, make no attempt to capture constraints on floating-point
3244 values. */
3245 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
3246 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
3247 return tristate::unknown ();
3248
9bbcee45
DM
3249 /* See what we know based on the values. */
3250
808f4dfe
DM
3251 /* Unwrap any unmergeable values. */
3252 lhs = lhs->unwrap_any_unmergeable ();
3253 rhs = rhs->unwrap_any_unmergeable ();
3254
3255 if (lhs == rhs)
757bf1df 3256 {
808f4dfe
DM
3257 /* If we have the same svalue, then we have equality
3258 (apart from NaN-handling).
3259 TODO: should this definitely be the case for poisoned values? */
3260 /* Poisoned and unknown values are "unknowable". */
3261 if (lhs->get_kind () == SK_POISONED
3262 || lhs->get_kind () == SK_UNKNOWN)
3263 return tristate::TS_UNKNOWN;
e978955d 3264
808f4dfe 3265 switch (op)
757bf1df 3266 {
808f4dfe
DM
3267 case EQ_EXPR:
3268 case GE_EXPR:
3269 case LE_EXPR:
3270 return tristate::TS_TRUE;
07c86323 3271
808f4dfe
DM
3272 case NE_EXPR:
3273 case GT_EXPR:
3274 case LT_EXPR:
3275 return tristate::TS_FALSE;
3276
3277 default:
3278 /* For other ops, use the logic below. */
3279 break;
757bf1df 3280 }
808f4dfe 3281 }
757bf1df 3282
808f4dfe
DM
3283 /* If we have a pair of region_svalues, compare them. */
3284 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
3285 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
3286 {
3287 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
3288 if (res.is_known ())
3289 return res;
3290 /* Otherwise, only known through constraints. */
3291 }
757bf1df 3292
808f4dfe 3293 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
18faaeb3
DM
3294 {
3295 /* If we have a pair of constants, compare them. */
3296 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
3297 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
3298 else
3299 {
3300 /* When we have one constant, put it on the RHS. */
3301 std::swap (lhs, rhs);
3302 op = swap_tree_comparison (op);
3303 }
3304 }
3305 gcc_assert (lhs->get_kind () != SK_CONSTANT);
757bf1df 3306
e82e0f14
DM
3307 /* Handle comparison against zero. */
3308 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
3309 if (zerop (cst_rhs->get_constant ()))
3310 {
3311 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
3312 {
3313 /* A region_svalue is a non-NULL pointer, except in certain
3314 special cases (see the comment for region::non_null_p). */
3315 const region *pointee = ptr->get_pointee ();
3316 if (pointee->non_null_p ())
3317 {
3318 switch (op)
3319 {
3320 default:
3321 gcc_unreachable ();
3322
3323 case EQ_EXPR:
3324 case GE_EXPR:
3325 case LE_EXPR:
3326 return tristate::TS_FALSE;
3327
3328 case NE_EXPR:
3329 case GT_EXPR:
3330 case LT_EXPR:
3331 return tristate::TS_TRUE;
3332 }
3333 }
3334 }
3335 else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
3336 {
3337 /* Treat offsets from a non-NULL pointer as being non-NULL. This
3338 isn't strictly true, in that eventually ptr++ will wrap
3339 around and be NULL, but it won't occur in practise and thus
3340 can be used to suppress effectively false positives that we
3341 shouldn't warn for. */
3342 if (binop->get_op () == POINTER_PLUS_EXPR)
3343 {
9bbcee45 3344 tristate lhs_ts = eval_condition (binop->get_arg0 (), op, rhs);
e82e0f14
DM
3345 if (lhs_ts.is_known ())
3346 return lhs_ts;
3347 }
3348 }
0b737090
DM
3349 else if (const unaryop_svalue *unaryop
3350 = lhs->dyn_cast_unaryop_svalue ())
3351 {
3352 if (unaryop->get_op () == NEGATE_EXPR)
3353 {
3354 /* e.g. "-X <= 0" is equivalent to X >= 0". */
3355 tristate lhs_ts = eval_condition (unaryop->get_arg (),
3356 swap_tree_comparison (op),
3357 rhs);
3358 if (lhs_ts.is_known ())
3359 return lhs_ts;
3360 }
3361 }
e82e0f14 3362 }
808f4dfe
DM
3363
3364 /* Handle rejection of equality for comparisons of the initial values of
3365 "external" values (such as params) with the address of locals. */
3366 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
3367 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
3368 {
3369 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
3370 if (res.is_known ())
3371 return res;
3372 }
3373 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
3374 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
3375 {
3376 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
3377 if (res.is_known ())
3378 return res;
3379 }
3380
3381 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
3382 if (tree rhs_cst = rhs->maybe_get_constant ())
3383 {
3384 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
3385 if (res.is_known ())
3386 return res;
3387 }
3388
7a6564c9 3389 /* Handle comparisons between two svalues with more than one operand. */
9bbcee45 3390 if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
7a6564c9
TL
3391 {
3392 switch (op)
3393 {
3394 default:
3395 break;
3396 case EQ_EXPR:
3397 {
3398 /* TODO: binops can be equal even if they are not structurally
3399 equal in case of commutative operators. */
3400 tristate res = structural_equality (lhs, rhs);
3401 if (res.is_true ())
3402 return res;
3403 }
3404 break;
3405 case LE_EXPR:
3406 {
3407 tristate res = structural_equality (lhs, rhs);
3408 if (res.is_true ())
3409 return res;
3410 }
3411 break;
3412 case GE_EXPR:
3413 {
3414 tristate res = structural_equality (lhs, rhs);
3415 if (res.is_true ())
3416 return res;
3417 res = symbolic_greater_than (binop, rhs);
3418 if (res.is_true ())
3419 return res;
3420 }
3421 break;
3422 case GT_EXPR:
3423 {
3424 tristate res = symbolic_greater_than (binop, rhs);
3425 if (res.is_true ())
3426 return res;
3427 }
3428 break;
3429 }
3430 }
3431
9bbcee45
DM
3432 /* Otherwise, try constraints.
3433 Cast to const to ensure we don't change the constraint_manager as we
3434 do this (e.g. by creating equivalence classes). */
3435 const constraint_manager *constraints = m_constraints;
3436 return constraints->eval_condition (lhs, op, rhs);
808f4dfe
DM
3437}
3438
9bbcee45 3439/* Subroutine of region_model::eval_condition, for rejecting
808f4dfe
DM
3440 equality of INIT_VAL(PARM) with &LOCAL. */
3441
3442tristate
3443region_model::compare_initial_and_pointer (const initial_svalue *init,
3444 const region_svalue *ptr) const
3445{
3446 const region *pointee = ptr->get_pointee ();
3447
3448 /* If we have a pointer to something within a stack frame, it can't be the
3449 initial value of a param. */
3450 if (pointee->maybe_get_frame_region ())
e0139b2a
DM
3451 if (init->initial_value_of_param_p ())
3452 return tristate::TS_FALSE;
757bf1df
DM
3453
3454 return tristate::TS_UNKNOWN;
3455}
3456
7a6564c9
TL
3457/* Return true if SVAL is definitely positive. */
3458
3459static bool
3460is_positive_svalue (const svalue *sval)
3461{
3462 if (tree cst = sval->maybe_get_constant ())
3463 return !zerop (cst) && get_range_pos_neg (cst) == 1;
3464 tree type = sval->get_type ();
3465 if (!type)
3466 return false;
3467 /* Consider a binary operation size_t + int. The analyzer wraps the int in
3468 an unaryop_svalue, converting it to a size_t, but in the dynamic execution
3469 the result is smaller than the first operand. Thus, we have to look if
3470 the argument of the unaryop_svalue is also positive. */
3471 if (const unaryop_svalue *un_op = dyn_cast <const unaryop_svalue *> (sval))
3472 return CONVERT_EXPR_CODE_P (un_op->get_op ()) && TYPE_UNSIGNED (type)
3473 && is_positive_svalue (un_op->get_arg ());
3474 return TYPE_UNSIGNED (type);
3475}
3476
3477/* Return true if A is definitely larger than B.
3478
3479 Limitation: does not account for integer overflows and does not try to
3480 return false, so it can not be used negated. */
3481
3482tristate
3483region_model::symbolic_greater_than (const binop_svalue *bin_a,
3484 const svalue *b) const
3485{
3486 if (bin_a->get_op () == PLUS_EXPR || bin_a->get_op () == MULT_EXPR)
3487 {
3488 /* Eliminate the right-hand side of both svalues. */
3489 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
3490 if (bin_a->get_op () == bin_b->get_op ()
9bbcee45
DM
3491 && eval_condition (bin_a->get_arg1 (),
3492 GT_EXPR,
3493 bin_b->get_arg1 ()).is_true ()
3494 && eval_condition (bin_a->get_arg0 (),
3495 GE_EXPR,
3496 bin_b->get_arg0 ()).is_true ())
7a6564c9
TL
3497 return tristate (tristate::TS_TRUE);
3498
3499 /* Otherwise, try to remove a positive offset or factor from BIN_A. */
3500 if (is_positive_svalue (bin_a->get_arg1 ())
9bbcee45
DM
3501 && eval_condition (bin_a->get_arg0 (),
3502 GE_EXPR, b).is_true ())
7a6564c9
TL
3503 return tristate (tristate::TS_TRUE);
3504 }
3505 return tristate::unknown ();
3506}
3507
3508/* Return true if A and B are equal structurally.
3509
3510 Structural equality means that A and B are equal if the svalues A and B have
3511 the same nodes at the same positions in the tree and the leafs are equal.
3512 Equality for conjured_svalues and initial_svalues is determined by comparing
3513 the pointers while constants are compared by value. That behavior is useful
3514 to check for binaryop_svlaues that evaluate to the same concrete value but
3515 might use one operand with a different type but the same constant value.
3516
3517 For example,
3518 binop_svalue (mult_expr,
3519 initial_svalue (‘size_t’, decl_region (..., 'some_var')),
3520 constant_svalue (‘size_t’, 4))
3521 and
3522 binop_svalue (mult_expr,
3523 initial_svalue (‘size_t’, decl_region (..., 'some_var'),
3524 constant_svalue (‘sizetype’, 4))
3525 are structurally equal. A concrete C code example, where this occurs, can
3526 be found in test7 of out-of-bounds-5.c. */
3527
3528tristate
3529region_model::structural_equality (const svalue *a, const svalue *b) const
3530{
3531 /* If A and B are referentially equal, they are also structurally equal. */
3532 if (a == b)
3533 return tristate (tristate::TS_TRUE);
3534
3535 switch (a->get_kind ())
3536 {
3537 default:
3538 return tristate::unknown ();
3539 /* SK_CONJURED and SK_INITIAL are already handled
3540 by the referential equality above. */
3541 case SK_CONSTANT:
3542 {
3543 tree a_cst = a->maybe_get_constant ();
3544 tree b_cst = b->maybe_get_constant ();
3545 if (a_cst && b_cst)
3546 return tristate (tree_int_cst_equal (a_cst, b_cst));
3547 }
3548 return tristate (tristate::TS_FALSE);
3549 case SK_UNARYOP:
3550 {
3551 const unaryop_svalue *un_a = as_a <const unaryop_svalue *> (a);
3552 if (const unaryop_svalue *un_b = dyn_cast <const unaryop_svalue *> (b))
3553 return tristate (pending_diagnostic::same_tree_p (un_a->get_type (),
3554 un_b->get_type ())
3555 && un_a->get_op () == un_b->get_op ()
3556 && structural_equality (un_a->get_arg (),
3557 un_b->get_arg ()));
3558 }
3559 return tristate (tristate::TS_FALSE);
3560 case SK_BINOP:
3561 {
3562 const binop_svalue *bin_a = as_a <const binop_svalue *> (a);
3563 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
3564 return tristate (bin_a->get_op () == bin_b->get_op ()
3565 && structural_equality (bin_a->get_arg0 (),
3566 bin_b->get_arg0 ())
3567 && structural_equality (bin_a->get_arg1 (),
3568 bin_b->get_arg1 ()));
3569 }
3570 return tristate (tristate::TS_FALSE);
3571 }
3572}
3573
48e8a7a6
DM
3574/* Handle various constraints of the form:
3575 LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
3576 OP : == or !=
3577 RHS: zero
3578 and (with a cast):
3579 LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
3580 OP : == or !=
3581 RHS: zero
3582 by adding constraints for INNER_LHS INNEROP INNER_RHS.
3583
3584 Return true if this function can fully handle the constraint; if
3585 so, add the implied constraint(s) and write true to *OUT if they
3586 are consistent with existing constraints, or write false to *OUT
3587 if they contradicts existing constraints.
3588
3589 Return false for cases that this function doeesn't know how to handle.
3590
3591 For example, if we're checking a stored conditional, we'll have
3592 something like:
3593 LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
3594 OP : NE_EXPR
3595 RHS: zero
3596 which this function can turn into an add_constraint of:
3597 (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
3598
3599 Similarly, optimized && and || conditionals lead to e.g.
3600 if (p && q)
3601 becoming gimple like this:
3602 _1 = p_6 == 0B;
3603 _2 = q_8 == 0B
3604 _3 = _1 | _2
3605 On the "_3 is false" branch we can have constraints of the form:
3606 ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
3607 | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
3608 == 0
3609 which implies that both _1 and _2 are false,
3610 which this function can turn into a pair of add_constraints of
3611 (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
3612 and:
3613 (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
3614
3615bool
3616region_model::add_constraints_from_binop (const svalue *outer_lhs,
3617 enum tree_code outer_op,
3618 const svalue *outer_rhs,
3619 bool *out,
3620 region_model_context *ctxt)
3621{
3622 while (const svalue *cast = outer_lhs->maybe_undo_cast ())
3623 outer_lhs = cast;
3624 const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
3625 if (!binop_sval)
3626 return false;
3627 if (!outer_rhs->all_zeroes_p ())
3628 return false;
3629
3630 const svalue *inner_lhs = binop_sval->get_arg0 ();
3631 enum tree_code inner_op = binop_sval->get_op ();
3632 const svalue *inner_rhs = binop_sval->get_arg1 ();
3633
3634 if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
3635 return false;
3636
3637 /* We have either
3638 - "OUTER_LHS != false" (i.e. OUTER is true), or
3639 - "OUTER_LHS == false" (i.e. OUTER is false). */
3640 bool is_true = outer_op == NE_EXPR;
3641
3642 switch (inner_op)
3643 {
3644 default:
3645 return false;
3646
3647 case EQ_EXPR:
3648 case NE_EXPR:
3649 {
3650 /* ...and "(inner_lhs OP inner_rhs) == 0"
3651 then (inner_lhs OP inner_rhs) must have the same
3652 logical value as LHS. */
3653 if (!is_true)
3654 inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
3655 *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
3656 return true;
3657 }
3658 break;
3659
3660 case BIT_AND_EXPR:
3661 if (is_true)
3662 {
3663 /* ...and "(inner_lhs & inner_rhs) != 0"
3664 then both inner_lhs and inner_rhs must be true. */
3665 const svalue *false_sval
3666 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
3667 bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
3668 bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
3669 *out = sat1 && sat2;
3670 return true;
3671 }
3672 return false;
3673
3674 case BIT_IOR_EXPR:
3675 if (!is_true)
3676 {
3677 /* ...and "(inner_lhs | inner_rhs) == 0"
3678 i.e. "(inner_lhs | inner_rhs)" is false
3679 then both inner_lhs and inner_rhs must be false. */
3680 const svalue *false_sval
3681 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
3682 bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
3683 bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
3684 *out = sat1 && sat2;
3685 return true;
3686 }
3687 return false;
3688 }
3689}
3690
757bf1df
DM
3691/* Attempt to add the constraint "LHS OP RHS" to this region_model.
3692 If it is consistent with existing constraints, add it, and return true.
3693 Return false if it contradicts existing constraints.
3694 Use CTXT for reporting any diagnostics associated with the accesses. */
3695
3696bool
3697region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
3698 region_model_context *ctxt)
3699{
e978955d
DM
3700 /* For now, make no attempt to capture constraints on floating-point
3701 values. */
3702 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
3703 return true;
3704
808f4dfe
DM
3705 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
3706 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
757bf1df 3707
48e8a7a6
DM
3708 return add_constraint (lhs_sval, op, rhs_sval, ctxt);
3709}
3710
3711/* Attempt to add the constraint "LHS OP RHS" to this region_model.
3712 If it is consistent with existing constraints, add it, and return true.
3713 Return false if it contradicts existing constraints.
3714 Use CTXT for reporting any diagnostics associated with the accesses. */
3715
3716bool
3717region_model::add_constraint (const svalue *lhs,
3718 enum tree_code op,
3719 const svalue *rhs,
3720 region_model_context *ctxt)
3721{
3722 tristate t_cond = eval_condition (lhs, op, rhs);
757bf1df
DM
3723
3724 /* If we already have the condition, do nothing. */
3725 if (t_cond.is_true ())
3726 return true;
3727
3728 /* Reject a constraint that would contradict existing knowledge, as
3729 unsatisfiable. */
3730 if (t_cond.is_false ())
3731 return false;
3732
48e8a7a6
DM
3733 bool out;
3734 if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
3735 return out;
757bf1df 3736
c4b8f373
DM
3737 /* Attempt to store the constraint. */
3738 if (!m_constraints->add_constraint (lhs, op, rhs))
3739 return false;
757bf1df
DM
3740
3741 /* Notify the context, if any. This exists so that the state machines
3742 in a program_state can be notified about the condition, and so can
3743 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
3744 when synthesizing constraints as above. */
3745 if (ctxt)
3746 ctxt->on_condition (lhs, op, rhs);
3747
9a2c9579
DM
3748 /* If we have &REGION == NULL, then drop dynamic extents for REGION (for
3749 the case where REGION is heap-allocated and thus could be NULL). */
48e8a7a6
DM
3750 if (tree rhs_cst = rhs->maybe_get_constant ())
3751 if (op == EQ_EXPR && zerop (rhs_cst))
3752 if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
3753 unset_dynamic_extents (region_sval->get_pointee ());
9a2c9579 3754
757bf1df
DM
3755 return true;
3756}
3757
84fb3546
DM
3758/* As above, but when returning false, if OUT is non-NULL, write a
3759 new rejected_constraint to *OUT. */
3760
3761bool
3762region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
3763 region_model_context *ctxt,
3764 rejected_constraint **out)
3765{
3766 bool sat = add_constraint (lhs, op, rhs, ctxt);
3767 if (!sat && out)
8ca7fa84 3768 *out = new rejected_op_constraint (*this, lhs, op, rhs);
84fb3546
DM
3769 return sat;
3770}
3771
757bf1df
DM
3772/* Determine what is known about the condition "LHS OP RHS" within
3773 this model.
3774 Use CTXT for reporting any diagnostics associated with the accesses. */
3775
3776tristate
3777region_model::eval_condition (tree lhs,
3778 enum tree_code op,
3779 tree rhs,
5c6546ca 3780 region_model_context *ctxt) const
757bf1df 3781{
e978955d
DM
3782 /* For now, make no attempt to model constraints on floating-point
3783 values. */
3784 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
3785 return tristate::unknown ();
3786
757bf1df
DM
3787 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
3788}
3789
467a4820
DM
3790/* Implementation of region_model::get_representative_path_var.
3791 Attempt to return a path_var that represents SVAL, or return NULL_TREE.
808f4dfe
DM
3792 Use VISITED to prevent infinite mutual recursion with the overload for
3793 regions. */
757bf1df 3794
808f4dfe 3795path_var
467a4820
DM
3796region_model::get_representative_path_var_1 (const svalue *sval,
3797 svalue_set *visited) const
757bf1df 3798{
467a4820 3799 gcc_assert (sval);
757bf1df 3800
808f4dfe
DM
3801 /* Prevent infinite recursion. */
3802 if (visited->contains (sval))
3803 return path_var (NULL_TREE, 0);
3804 visited->add (sval);
757bf1df 3805
467a4820
DM
3806 /* Handle casts by recursion into get_representative_path_var. */
3807 if (const svalue *cast_sval = sval->maybe_undo_cast ())
3808 {
3809 path_var result = get_representative_path_var (cast_sval, visited);
3810 tree orig_type = sval->get_type ();
3811 /* If necessary, wrap the result in a cast. */
3812 if (result.m_tree && orig_type)
3813 result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
3814 return result;
3815 }
3816
808f4dfe
DM
3817 auto_vec<path_var> pvs;
3818 m_store.get_representative_path_vars (this, visited, sval, &pvs);
757bf1df 3819
808f4dfe
DM
3820 if (tree cst = sval->maybe_get_constant ())
3821 pvs.safe_push (path_var (cst, 0));
757bf1df 3822
90f7c300 3823 /* Handle string literals and various other pointers. */
808f4dfe
DM
3824 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
3825 {
3826 const region *reg = ptr_sval->get_pointee ();
3827 if (path_var pv = get_representative_path_var (reg, visited))
3828 return path_var (build1 (ADDR_EXPR,
467a4820 3829 sval->get_type (),
808f4dfe
DM
3830 pv.m_tree),
3831 pv.m_stack_depth);
3832 }
3833
3834 /* If we have a sub_svalue, look for ways to represent the parent. */
3835 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
90f7c300 3836 {
808f4dfe
DM
3837 const svalue *parent_sval = sub_sval->get_parent ();
3838 const region *subreg = sub_sval->get_subregion ();
3839 if (path_var parent_pv
3840 = get_representative_path_var (parent_sval, visited))
3841 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
3842 return path_var (build3 (COMPONENT_REF,
3843 sval->get_type (),
3844 parent_pv.m_tree,
3845 field_reg->get_field (),
3846 NULL_TREE),
3847 parent_pv.m_stack_depth);
90f7c300
DM
3848 }
3849
b9365b93
DM
3850 /* Handle binops. */
3851 if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
3852 if (path_var lhs_pv
3853 = get_representative_path_var (binop_sval->get_arg0 (), visited))
3854 if (path_var rhs_pv
3855 = get_representative_path_var (binop_sval->get_arg1 (), visited))
3856 return path_var (build2 (binop_sval->get_op (),
3857 sval->get_type (),
3858 lhs_pv.m_tree, rhs_pv.m_tree),
3859 lhs_pv.m_stack_depth);
3860
808f4dfe
DM
3861 if (pvs.length () < 1)
3862 return path_var (NULL_TREE, 0);
3863
3864 pvs.qsort (readability_comparator);
3865 return pvs[0];
757bf1df
DM
3866}
3867
467a4820
DM
3868/* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
3869 Use VISITED to prevent infinite mutual recursion with the overload for
3870 regions
3871
3872 This function defers to get_representative_path_var_1 to do the work;
3873 it adds verification that get_representative_path_var_1 returned a tree
3874 of the correct type. */
3875
3876path_var
3877region_model::get_representative_path_var (const svalue *sval,
3878 svalue_set *visited) const
3879{
3880 if (sval == NULL)
3881 return path_var (NULL_TREE, 0);
3882
3883 tree orig_type = sval->get_type ();
3884
3885 path_var result = get_representative_path_var_1 (sval, visited);
3886
3887 /* Verify that the result has the same type as SVAL, if any. */
3888 if (result.m_tree && orig_type)
3889 gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
3890
3891 return result;
3892}
3893
3894/* Attempt to return a tree that represents SVAL, or return NULL_TREE.
3895
3896 Strip off any top-level cast, to avoid messages like
3897 double-free of '(void *)ptr'
3898 from analyzer diagnostics. */
757bf1df 3899
808f4dfe
DM
3900tree
3901region_model::get_representative_tree (const svalue *sval) const
757bf1df 3902{
808f4dfe 3903 svalue_set visited;
467a4820
DM
3904 tree expr = get_representative_path_var (sval, &visited).m_tree;
3905
3906 /* Strip off any top-level cast. */
7e3b45be
TL
3907 if (expr && TREE_CODE (expr) == NOP_EXPR)
3908 expr = TREE_OPERAND (expr, 0);
3909
3910 return fixup_tree_for_diagnostic (expr);
3911}
3912
3913tree
3914region_model::get_representative_tree (const region *reg) const
3915{
3916 svalue_set visited;
3917 tree expr = get_representative_path_var (reg, &visited).m_tree;
3918
3919 /* Strip off any top-level cast. */
467a4820 3920 if (expr && TREE_CODE (expr) == NOP_EXPR)
e4bb1bd6 3921 expr = TREE_OPERAND (expr, 0);
467a4820 3922
e4bb1bd6 3923 return fixup_tree_for_diagnostic (expr);
808f4dfe
DM
3924}
3925
467a4820
DM
3926/* Implementation of region_model::get_representative_path_var.
3927
3928 Attempt to return a path_var that represents REG, or return
808f4dfe
DM
3929 the NULL path_var.
3930 For example, a region for a field of a local would be a path_var
3931 wrapping a COMPONENT_REF.
3932 Use VISITED to prevent infinite mutual recursion with the overload for
3933 svalues. */
757bf1df 3934
808f4dfe 3935path_var
467a4820
DM
3936region_model::get_representative_path_var_1 (const region *reg,
3937 svalue_set *visited) const
808f4dfe
DM
3938{
3939 switch (reg->get_kind ())
757bf1df 3940 {
808f4dfe
DM
3941 default:
3942 gcc_unreachable ();
e516294a 3943
808f4dfe
DM
3944 case RK_FRAME:
3945 case RK_GLOBALS:
3946 case RK_CODE:
3947 case RK_HEAP:
3948 case RK_STACK:
358dab90 3949 case RK_THREAD_LOCAL:
808f4dfe
DM
3950 case RK_ROOT:
3951 /* Regions that represent memory spaces are not expressible as trees. */
3952 return path_var (NULL_TREE, 0);
757bf1df 3953
808f4dfe 3954 case RK_FUNCTION:
884d9141 3955 {
808f4dfe
DM
3956 const function_region *function_reg
3957 = as_a <const function_region *> (reg);
3958 return path_var (function_reg->get_fndecl (), 0);
884d9141 3959 }
808f4dfe 3960 case RK_LABEL:
9e78634c
DM
3961 {
3962 const label_region *label_reg = as_a <const label_region *> (reg);
3963 return path_var (label_reg->get_label (), 0);
3964 }
90f7c300 3965
808f4dfe
DM
3966 case RK_SYMBOLIC:
3967 {
3968 const symbolic_region *symbolic_reg
3969 = as_a <const symbolic_region *> (reg);
3970 const svalue *pointer = symbolic_reg->get_pointer ();
3971 path_var pointer_pv = get_representative_path_var (pointer, visited);
3972 if (!pointer_pv)
3973 return path_var (NULL_TREE, 0);
3974 tree offset = build_int_cst (pointer->get_type (), 0);
3975 return path_var (build2 (MEM_REF,
3976 reg->get_type (),
3977 pointer_pv.m_tree,
3978 offset),
3979 pointer_pv.m_stack_depth);
3980 }
3981 case RK_DECL:
3982 {
3983 const decl_region *decl_reg = as_a <const decl_region *> (reg);
3984 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
3985 }
3986 case RK_FIELD:
3987 {
3988 const field_region *field_reg = as_a <const field_region *> (reg);
3989 path_var parent_pv
3990 = get_representative_path_var (reg->get_parent_region (), visited);
3991 if (!parent_pv)
3992 return path_var (NULL_TREE, 0);
3993 return path_var (build3 (COMPONENT_REF,
3994 reg->get_type (),
3995 parent_pv.m_tree,
3996 field_reg->get_field (),
3997 NULL_TREE),
3998 parent_pv.m_stack_depth);
3999 }
757bf1df 4000
808f4dfe
DM
4001 case RK_ELEMENT:
4002 {
4003 const element_region *element_reg
4004 = as_a <const element_region *> (reg);
4005 path_var parent_pv
4006 = get_representative_path_var (reg->get_parent_region (), visited);
4007 if (!parent_pv)
4008 return path_var (NULL_TREE, 0);
4009 path_var index_pv
4010 = get_representative_path_var (element_reg->get_index (), visited);
4011 if (!index_pv)
4012 return path_var (NULL_TREE, 0);
4013 return path_var (build4 (ARRAY_REF,
4014 reg->get_type (),
4015 parent_pv.m_tree, index_pv.m_tree,
4016 NULL_TREE, NULL_TREE),
4017 parent_pv.m_stack_depth);
4018 }
757bf1df 4019
808f4dfe 4020 case RK_OFFSET:
757bf1df 4021 {
808f4dfe
DM
4022 const offset_region *offset_reg
4023 = as_a <const offset_region *> (reg);
4024 path_var parent_pv
4025 = get_representative_path_var (reg->get_parent_region (), visited);
4026 if (!parent_pv)
4027 return path_var (NULL_TREE, 0);
4028 path_var offset_pv
4029 = get_representative_path_var (offset_reg->get_byte_offset (),
4030 visited);
29f5db8e 4031 if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
808f4dfe 4032 return path_var (NULL_TREE, 0);
29f5db8e
DM
4033 tree addr_parent = build1 (ADDR_EXPR,
4034 build_pointer_type (reg->get_type ()),
4035 parent_pv.m_tree);
808f4dfe
DM
4036 return path_var (build2 (MEM_REF,
4037 reg->get_type (),
29f5db8e 4038 addr_parent, offset_pv.m_tree),
808f4dfe 4039 parent_pv.m_stack_depth);
757bf1df 4040 }
757bf1df 4041
e61ffa20
DM
4042 case RK_SIZED:
4043 return path_var (NULL_TREE, 0);
4044
808f4dfe
DM
4045 case RK_CAST:
4046 {
4047 path_var parent_pv
4048 = get_representative_path_var (reg->get_parent_region (), visited);
4049 if (!parent_pv)
4050 return path_var (NULL_TREE, 0);
4051 return path_var (build1 (NOP_EXPR,
4052 reg->get_type (),
4053 parent_pv.m_tree),
4054 parent_pv.m_stack_depth);
4055 }
757bf1df 4056
808f4dfe
DM
4057 case RK_HEAP_ALLOCATED:
4058 case RK_ALLOCA:
4059 /* No good way to express heap-allocated/alloca regions as trees. */
4060 return path_var (NULL_TREE, 0);
757bf1df 4061
808f4dfe
DM
4062 case RK_STRING:
4063 {
4064 const string_region *string_reg = as_a <const string_region *> (reg);
4065 return path_var (string_reg->get_string_cst (), 0);
4066 }
757bf1df 4067
2402dc6b 4068 case RK_VAR_ARG:
358dab90 4069 case RK_ERRNO:
808f4dfe
DM
4070 case RK_UNKNOWN:
4071 return path_var (NULL_TREE, 0);
4072 }
757bf1df
DM
4073}
4074
467a4820
DM
4075/* Attempt to return a path_var that represents REG, or return
4076 the NULL path_var.
4077 For example, a region for a field of a local would be a path_var
4078 wrapping a COMPONENT_REF.
4079 Use VISITED to prevent infinite mutual recursion with the overload for
4080 svalues.
4081
4082 This function defers to get_representative_path_var_1 to do the work;
4083 it adds verification that get_representative_path_var_1 returned a tree
4084 of the correct type. */
4085
4086path_var
4087region_model::get_representative_path_var (const region *reg,
4088 svalue_set *visited) const
4089{
4090 path_var result = get_representative_path_var_1 (reg, visited);
4091
4092 /* Verify that the result has the same type as REG, if any. */
4093 if (result.m_tree && reg->get_type ())
4094 gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
4095
4096 return result;
4097}
4098
757bf1df
DM
4099/* Update this model for any phis in SNODE, assuming we came from
4100 LAST_CFG_SUPEREDGE. */
4101
4102void
4103region_model::update_for_phis (const supernode *snode,
4104 const cfg_superedge *last_cfg_superedge,
4105 region_model_context *ctxt)
4106{
4107 gcc_assert (last_cfg_superedge);
4108
e0a7a675
DM
4109 /* Copy this state and pass it to handle_phi so that all of the phi stmts
4110 are effectively handled simultaneously. */
4111 const region_model old_state (*this);
4112
757bf1df
DM
4113 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
4114 !gsi_end_p (gpi); gsi_next (&gpi))
4115 {
4116 gphi *phi = gpi.phi ();
4117
4118 tree src = last_cfg_superedge->get_phi_arg (phi);
4119 tree lhs = gimple_phi_result (phi);
4120
e0a7a675
DM
4121 /* Update next_state based on phi and old_state. */
4122 handle_phi (phi, lhs, src, old_state, ctxt);
757bf1df
DM
4123 }
4124}
4125
4126/* Attempt to update this model for taking EDGE (where the last statement
4127 was LAST_STMT), returning true if the edge can be taken, false
4128 otherwise.
84fb3546
DM
4129 When returning false, if OUT is non-NULL, write a new rejected_constraint
4130 to it.
757bf1df
DM
4131
4132 For CFG superedges where LAST_STMT is a conditional or a switch
4133 statement, attempt to add the relevant conditions for EDGE to this
4134 model, returning true if they are feasible, or false if they are
4135 impossible.
4136
4137 For call superedges, push frame information and store arguments
4138 into parameters.
4139
4140 For return superedges, pop frame information and store return
4141 values into any lhs.
4142
4143 Rejection of call/return superedges happens elsewhere, in
4144 program_point::on_edge (i.e. based on program point, rather
4145 than program state). */
4146
4147bool
4148region_model::maybe_update_for_edge (const superedge &edge,
4149 const gimple *last_stmt,
84fb3546
DM
4150 region_model_context *ctxt,
4151 rejected_constraint **out)
757bf1df
DM
4152{
4153 /* Handle frame updates for interprocedural edges. */
4154 switch (edge.m_kind)
4155 {
4156 default:
4157 break;
4158
4159 case SUPEREDGE_CALL:
4160 {
4161 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
4162 update_for_call_superedge (*call_edge, ctxt);
4163 }
4164 break;
4165
4166 case SUPEREDGE_RETURN:
4167 {
4168 const return_superedge *return_edge
4169 = as_a <const return_superedge *> (&edge);
4170 update_for_return_superedge (*return_edge, ctxt);
4171 }
4172 break;
4173
4174 case SUPEREDGE_INTRAPROCEDURAL_CALL:
bfca9505
DM
4175 /* This is a no-op for call summaries; we should already
4176 have handled the effect of the call summary at the call stmt. */
757bf1df
DM
4177 break;
4178 }
4179
4180 if (last_stmt == NULL)
4181 return true;
4182
4183 /* Apply any constraints for conditionals/switch statements. */
4184
4185 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
4186 {
4187 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
84fb3546 4188 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out);
757bf1df
DM
4189 }
4190
4191 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
4192 {
4193 const switch_cfg_superedge *switch_sedge
4194 = as_a <const switch_cfg_superedge *> (&edge);
84fb3546
DM
4195 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt,
4196 ctxt, out);
757bf1df
DM
4197 }
4198
1690a839
DM
4199 /* Apply any constraints due to an exception being thrown. */
4200 if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge))
4201 if (cfg_sedge->get_flags () & EDGE_EH)
84fb3546 4202 return apply_constraints_for_exception (last_stmt, ctxt, out);
1690a839 4203
757bf1df
DM
4204 return true;
4205}
4206
4207/* Push a new frame_region on to the stack region.
4208 Populate the frame_region with child regions for the function call's
4209 parameters, using values from the arguments at the callsite in the
4210 caller's frame. */
4211
4212void
aef703cf 4213region_model::update_for_gcall (const gcall *call_stmt,
e92d0ff6
AS
4214 region_model_context *ctxt,
4215 function *callee)
757bf1df 4216{
808f4dfe 4217 /* Build a vec of argument svalues, using the current top
757bf1df 4218 frame for resolving tree expressions. */
808f4dfe 4219 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
757bf1df
DM
4220
4221 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
4222 {
4223 tree arg = gimple_call_arg (call_stmt, i);
808f4dfe 4224 arg_svals.quick_push (get_rvalue (arg, ctxt));
757bf1df
DM
4225 }
4226
e92d0ff6
AS
4227 if(!callee)
4228 {
4229 /* Get the function * from the gcall. */
4230 tree fn_decl = get_fndecl_for_call (call_stmt,ctxt);
4231 callee = DECL_STRUCT_FUNCTION (fn_decl);
4232 }
4233
4234 push_frame (callee, &arg_svals, ctxt);
757bf1df
DM
4235}
4236
a96f1c38
DM
4237/* Pop the top-most frame_region from the stack, and copy the return
4238 region's values (if any) into the region for the lvalue of the LHS of
757bf1df 4239 the call (if any). */
aef703cf 4240
757bf1df 4241void
aef703cf
AS
4242region_model::update_for_return_gcall (const gcall *call_stmt,
4243 region_model_context *ctxt)
757bf1df 4244{
4cebae09
DM
4245 /* Get the lvalue for the result of the call, passing it to pop_frame,
4246 so that pop_frame can determine the region with respect to the
4247 *caller* frame. */
757bf1df 4248 tree lhs = gimple_call_lhs (call_stmt);
4cebae09 4249 pop_frame (lhs, NULL, ctxt);
757bf1df
DM
4250}
4251
aef703cf
AS
4252/* Extract calling information from the superedge and update the model for the
4253 call */
4254
4255void
4256region_model::update_for_call_superedge (const call_superedge &call_edge,
4257 region_model_context *ctxt)
4258{
4259 const gcall *call_stmt = call_edge.get_call_stmt ();
e92d0ff6 4260 update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ());
aef703cf
AS
4261}
4262
4263/* Extract calling information from the return superedge and update the model
4264 for the returning call */
4265
4266void
4267region_model::update_for_return_superedge (const return_superedge &return_edge,
4268 region_model_context *ctxt)
4269{
4270 const gcall *call_stmt = return_edge.get_call_stmt ();
4271 update_for_return_gcall (call_stmt, ctxt);
4272}
4273
bfca9505
DM
4274/* Attempt to to use R to replay SUMMARY into this object.
4275 Return true if it is possible. */
757bf1df 4276
bfca9505
DM
4277bool
4278region_model::replay_call_summary (call_summary_replay &r,
4279 const region_model &summary)
757bf1df 4280{
bfca9505
DM
4281 gcc_assert (summary.get_stack_depth () == 1);
4282
4283 m_store.replay_call_summary (r, summary.m_store);
757bf1df 4284
bfca9505
DM
4285 if (!m_constraints->replay_call_summary (r, *summary.m_constraints))
4286 return false;
4287
4288 for (auto kv : summary.m_dynamic_extents)
4289 {
4290 const region *summary_reg = kv.first;
4291 const region *caller_reg = r.convert_region_from_summary (summary_reg);
4292 if (!caller_reg)
4293 continue;
4294 const svalue *summary_sval = kv.second;
4295 const svalue *caller_sval = r.convert_svalue_from_summary (summary_sval);
4296 if (!caller_sval)
4297 continue;
4298 m_dynamic_extents.put (caller_reg, caller_sval);
4299 }
4300
4301 return true;
757bf1df
DM
4302}
4303
4304/* Given a true or false edge guarded by conditional statement COND_STMT,
4305 determine appropriate constraints for the edge to be taken.
4306
4307 If they are feasible, add the constraints and return true.
4308
4309 Return false if the constraints contradict existing knowledge
84fb3546
DM
4310 (and so the edge should not be taken).
4311 When returning false, if OUT is non-NULL, write a new rejected_constraint
4312 to it. */
757bf1df
DM
4313
4314bool
4315region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
4316 const gcond *cond_stmt,
84fb3546
DM
4317 region_model_context *ctxt,
4318 rejected_constraint **out)
757bf1df
DM
4319{
4320 ::edge cfg_edge = sedge.get_cfg_edge ();
4321 gcc_assert (cfg_edge != NULL);
4322 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
4323
4324 enum tree_code op = gimple_cond_code (cond_stmt);
4325 tree lhs = gimple_cond_lhs (cond_stmt);
4326 tree rhs = gimple_cond_rhs (cond_stmt);
4327 if (cfg_edge->flags & EDGE_FALSE_VALUE)
4328 op = invert_tree_comparison (op, false /* honor_nans */);
84fb3546 4329 return add_constraint (lhs, op, rhs, ctxt, out);
757bf1df
DM
4330}
4331
4332/* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
4333 for the edge to be taken.
4334
4335 If they are feasible, add the constraints and return true.
4336
4337 Return false if the constraints contradict existing knowledge
84fb3546
DM
4338 (and so the edge should not be taken).
4339 When returning false, if OUT is non-NULL, write a new rejected_constraint
4340 to it. */
757bf1df
DM
4341
4342bool
4343region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
4344 const gswitch *switch_stmt,
84fb3546
DM
4345 region_model_context *ctxt,
4346 rejected_constraint **out)
757bf1df 4347{
8ca7fa84
DM
4348 bounded_ranges_manager *ranges_mgr = get_range_manager ();
4349 const bounded_ranges *all_cases_ranges
4350 = ranges_mgr->get_or_create_ranges_for_switch (&edge, switch_stmt);
757bf1df 4351 tree index = gimple_switch_index (switch_stmt);
8ca7fa84
DM
4352 const svalue *index_sval = get_rvalue (index, ctxt);
4353 bool sat = m_constraints->add_bounded_ranges (index_sval, all_cases_ranges);
4354 if (!sat && out)
4355 *out = new rejected_ranges_constraint (*this, index, all_cases_ranges);
2c044ff1
DM
4356 if (sat && ctxt && !all_cases_ranges->empty_p ())
4357 ctxt->on_bounded_ranges (*index_sval, *all_cases_ranges);
8ca7fa84 4358 return sat;
757bf1df
DM
4359}
4360
1690a839
DM
4361/* Apply any constraints due to an exception being thrown at LAST_STMT.
4362
4363 If they are feasible, add the constraints and return true.
4364
4365 Return false if the constraints contradict existing knowledge
84fb3546
DM
4366 (and so the edge should not be taken).
4367 When returning false, if OUT is non-NULL, write a new rejected_constraint
4368 to it. */
1690a839
DM
4369
4370bool
4371region_model::apply_constraints_for_exception (const gimple *last_stmt,
84fb3546
DM
4372 region_model_context *ctxt,
4373 rejected_constraint **out)
1690a839
DM
4374{
4375 gcc_assert (last_stmt);
4376 if (const gcall *call = dyn_cast <const gcall *> (last_stmt))
4377 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
4378 if (is_named_call_p (callee_fndecl, "operator new", call, 1)
4379 || is_named_call_p (callee_fndecl, "operator new []", call, 1))
4380 {
4381 /* We have an exception thrown from operator new.
4382 Add a constraint that the result was NULL, to avoid a false
4383 leak report due to the result being lost when following
4384 the EH edge. */
4385 if (tree lhs = gimple_call_lhs (call))
84fb3546 4386 return add_constraint (lhs, EQ_EXPR, null_pointer_node, ctxt, out);
1690a839
DM
4387 return true;
4388 }
4389 return true;
4390}
4391
808f4dfe
DM
4392/* For use with push_frame when handling a top-level call within the analysis.
4393 PARAM has a defined but unknown initial value.
4394 Anything it points to has escaped, since the calling context "knows"
4395 the pointer, and thus calls to unknown functions could read/write into
dcfc7ac9
DM
4396 the region.
4397 If NONNULL is true, then assume that PARAM must be non-NULL. */
757bf1df
DM
4398
4399void
808f4dfe 4400region_model::on_top_level_param (tree param,
dcfc7ac9
DM
4401 bool nonnull,
4402 region_model_context *ctxt)
757bf1df 4403{
808f4dfe 4404 if (POINTER_TYPE_P (TREE_TYPE (param)))
5eae0ac7 4405 {
808f4dfe
DM
4406 const region *param_reg = get_lvalue (param, ctxt);
4407 const svalue *init_ptr_sval
4408 = m_mgr->get_or_create_initial_value (param_reg);
4409 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
4410 m_store.mark_as_escaped (pointee_reg);
dcfc7ac9
DM
4411 if (nonnull)
4412 {
4413 const svalue *null_ptr_sval
4414 = m_mgr->get_or_create_null_ptr (TREE_TYPE (param));
4415 add_constraint (init_ptr_sval, NE_EXPR, null_ptr_sval, ctxt);
4416 }
5eae0ac7 4417 }
757bf1df
DM
4418}
4419
808f4dfe
DM
4420/* Update this region_model to reflect pushing a frame onto the stack
4421 for a call to FUN.
757bf1df 4422
808f4dfe
DM
4423 If ARG_SVALS is non-NULL, use it to populate the parameters
4424 in the new frame.
4425 Otherwise, the params have their initial_svalues.
757bf1df 4426
808f4dfe 4427 Return the frame_region for the new frame. */
757bf1df 4428
808f4dfe
DM
4429const region *
4430region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
4431 region_model_context *ctxt)
757bf1df 4432{
808f4dfe
DM
4433 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
4434 if (arg_svals)
757bf1df 4435 {
808f4dfe
DM
4436 /* Arguments supplied from a caller frame. */
4437 tree fndecl = fun->decl;
4438 unsigned idx = 0;
4439 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
4440 iter_parm = DECL_CHAIN (iter_parm), ++idx)
757bf1df 4441 {
808f4dfe
DM
4442 /* If there's a mismatching declaration, the call stmt might
4443 not have enough args. Handle this case by leaving the
4444 rest of the params as uninitialized. */
4445 if (idx >= arg_svals->length ())
4446 break;
294b6da2
DM
4447 tree parm_lval = iter_parm;
4448 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
4449 parm_lval = parm_default_ssa;
4450 const region *parm_reg = get_lvalue (parm_lval, ctxt);
808f4dfe 4451 const svalue *arg_sval = (*arg_svals)[idx];
808f4dfe 4452 set_value (parm_reg, arg_sval, ctxt);
757bf1df 4453 }
2402dc6b
DM
4454
4455 /* Handle any variadic args. */
4456 unsigned va_arg_idx = 0;
4457 for (; idx < arg_svals->length (); idx++, va_arg_idx++)
4458 {
4459 const svalue *arg_sval = (*arg_svals)[idx];
4460 const region *var_arg_reg
4461 = m_mgr->get_var_arg_region (m_current_frame,
4462 va_arg_idx);
4463 set_value (var_arg_reg, arg_sval, ctxt);
4464 }
757bf1df 4465 }
808f4dfe 4466 else
757bf1df 4467 {
808f4dfe
DM
4468 /* Otherwise we have a top-level call within the analysis. The params
4469 have defined but unknown initial values.
4470 Anything they point to has escaped. */
4471 tree fndecl = fun->decl;
dcfc7ac9
DM
4472
4473 /* Handle "__attribute__((nonnull))". */
4474 tree fntype = TREE_TYPE (fndecl);
4475 bitmap nonnull_args = get_nonnull_args (fntype);
4476
4477 unsigned parm_idx = 0;
808f4dfe
DM
4478 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
4479 iter_parm = DECL_CHAIN (iter_parm))
757bf1df 4480 {
dcfc7ac9
DM
4481 bool non_null = (nonnull_args
4482 ? (bitmap_empty_p (nonnull_args)
4483 || bitmap_bit_p (nonnull_args, parm_idx))
4484 : false);
294b6da2 4485 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
dcfc7ac9 4486 on_top_level_param (parm_default_ssa, non_null, ctxt);
294b6da2 4487 else
dcfc7ac9
DM
4488 on_top_level_param (iter_parm, non_null, ctxt);
4489 parm_idx++;
757bf1df 4490 }
dcfc7ac9
DM
4491
4492 BITMAP_FREE (nonnull_args);
757bf1df 4493 }
757bf1df 4494
808f4dfe 4495 return m_current_frame;
757bf1df
DM
4496}
4497
808f4dfe
DM
4498/* Get the function of the top-most frame in this region_model's stack.
4499 There must be such a frame. */
757bf1df 4500
808f4dfe
DM
4501function *
4502region_model::get_current_function () const
757bf1df 4503{
808f4dfe
DM
4504 const frame_region *frame = get_current_frame ();
4505 gcc_assert (frame);
4506 return frame->get_function ();
757bf1df
DM
4507}
4508
808f4dfe 4509/* Pop the topmost frame_region from this region_model's stack;
757bf1df 4510
4cebae09
DM
4511 If RESULT_LVALUE is non-null, copy any return value from the frame
4512 into the corresponding region (evaluated with respect to the *caller*
4513 frame, rather than the called frame).
808f4dfe
DM
4514 If OUT_RESULT is non-null, copy any return value from the frame
4515 into *OUT_RESULT.
757bf1df 4516
808f4dfe
DM
4517 Purge the frame region and all its descendent regions.
4518 Convert any pointers that point into such regions into
4519 POISON_KIND_POPPED_STACK svalues. */
757bf1df 4520
808f4dfe 4521void
4cebae09 4522region_model::pop_frame (tree result_lvalue,
808f4dfe
DM
4523 const svalue **out_result,
4524 region_model_context *ctxt)
4525{
4526 gcc_assert (m_current_frame);
757bf1df 4527
808f4dfe 4528 const frame_region *frame_reg = m_current_frame;
5c6546ca
DM
4529
4530 /* Notify state machines. */
4531 if (ctxt)
4532 ctxt->on_pop_frame (frame_reg);
4533
4534 /* Evaluate the result, within the callee frame. */
808f4dfe
DM
4535 tree fndecl = m_current_frame->get_function ()->decl;
4536 tree result = DECL_RESULT (fndecl);
4cebae09 4537 const svalue *retval = NULL;
808f4dfe
DM
4538 if (result && TREE_TYPE (result) != void_type_node)
4539 {
4cebae09 4540 retval = get_rvalue (result, ctxt);
808f4dfe 4541 if (out_result)
13ad6d9f 4542 *out_result = retval;
808f4dfe 4543 }
757bf1df 4544
808f4dfe
DM
4545 /* Pop the frame. */
4546 m_current_frame = m_current_frame->get_calling_frame ();
757bf1df 4547
4cebae09
DM
4548 if (result_lvalue && retval)
4549 {
4550 /* Compute result_dst_reg using RESULT_LVALUE *after* popping
4551 the frame, but before poisoning pointers into the old frame. */
4552 const region *result_dst_reg = get_lvalue (result_lvalue, ctxt);
4553 set_value (result_dst_reg, retval, ctxt);
4554 }
4555
808f4dfe 4556 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
757bf1df
DM
4557}
4558
808f4dfe 4559/* Get the number of frames in this region_model's stack. */
757bf1df 4560
808f4dfe
DM
4561int
4562region_model::get_stack_depth () const
757bf1df 4563{
808f4dfe
DM
4564 const frame_region *frame = get_current_frame ();
4565 if (frame)
4566 return frame->get_stack_depth ();
4567 else
4568 return 0;
757bf1df
DM
4569}
4570
808f4dfe
DM
4571/* Get the frame_region with the given index within the stack.
4572 The frame_region must exist. */
757bf1df 4573
808f4dfe
DM
4574const frame_region *
4575region_model::get_frame_at_index (int index) const
757bf1df 4576{
808f4dfe
DM
4577 const frame_region *frame = get_current_frame ();
4578 gcc_assert (frame);
4579 gcc_assert (index >= 0);
4580 gcc_assert (index <= frame->get_index ());
4581 while (index != frame->get_index ())
4582 {
4583 frame = frame->get_calling_frame ();
4584 gcc_assert (frame);
4585 }
4586 return frame;
757bf1df
DM
4587}
4588
808f4dfe
DM
4589/* Unbind svalues for any regions in REG and below.
4590 Find any pointers to such regions; convert them to
9a2c9579
DM
4591 poisoned values of kind PKIND.
4592 Also purge any dynamic extents. */
757bf1df 4593
808f4dfe
DM
4594void
4595region_model::unbind_region_and_descendents (const region *reg,
4596 enum poison_kind pkind)
757bf1df 4597{
808f4dfe
DM
4598 /* Gather a set of base regions to be unbound. */
4599 hash_set<const region *> base_regs;
4600 for (store::cluster_map_t::iterator iter = m_store.begin ();
4601 iter != m_store.end (); ++iter)
757bf1df 4602 {
808f4dfe
DM
4603 const region *iter_base_reg = (*iter).first;
4604 if (iter_base_reg->descendent_of_p (reg))
4605 base_regs.add (iter_base_reg);
757bf1df 4606 }
808f4dfe
DM
4607 for (hash_set<const region *>::iterator iter = base_regs.begin ();
4608 iter != base_regs.end (); ++iter)
4609 m_store.purge_cluster (*iter);
757bf1df 4610
808f4dfe
DM
4611 /* Find any pointers to REG or its descendents; convert to poisoned. */
4612 poison_any_pointers_to_descendents (reg, pkind);
9a2c9579
DM
4613
4614 /* Purge dynamic extents of any base regions in REG and below
4615 (e.g. VLAs and alloca stack regions). */
4616 for (auto iter : m_dynamic_extents)
4617 {
4618 const region *iter_reg = iter.first;
4619 if (iter_reg->descendent_of_p (reg))
4620 unset_dynamic_extents (iter_reg);
4621 }
757bf1df
DM
4622}
4623
808f4dfe
DM
4624/* Implementation of BindingVisitor.
4625 Update the bound svalues for regions below REG to use poisoned
4626 values instead. */
757bf1df 4627
808f4dfe 4628struct bad_pointer_finder
757bf1df 4629{
808f4dfe
DM
4630 bad_pointer_finder (const region *reg, enum poison_kind pkind,
4631 region_model_manager *mgr)
4632 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
4633 {}
757bf1df 4634
808f4dfe
DM
4635 void on_binding (const binding_key *, const svalue *&sval)
4636 {
4637 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
4638 {
4639 const region *ptr_dst = ptr_sval->get_pointee ();
4640 /* Poison ptrs to descendents of REG, but not to REG itself,
4641 otherwise double-free detection doesn't work (since sm-state
4642 for "free" is stored on the original ptr svalue). */
4643 if (ptr_dst->descendent_of_p (m_reg)
4644 && ptr_dst != m_reg)
4645 {
4646 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
4647 sval->get_type ());
4648 ++m_count;
4649 }
4650 }
4651 }
757bf1df 4652
808f4dfe
DM
4653 const region *m_reg;
4654 enum poison_kind m_pkind;
4655 region_model_manager *const m_mgr;
4656 int m_count;
4657};
757bf1df 4658
808f4dfe
DM
4659/* Find any pointers to REG or its descendents; convert them to
4660 poisoned values of kind PKIND.
4661 Return the number of pointers that were poisoned. */
757bf1df 4662
808f4dfe
DM
4663int
4664region_model::poison_any_pointers_to_descendents (const region *reg,
4665 enum poison_kind pkind)
4666{
4667 bad_pointer_finder bv (reg, pkind, m_mgr);
4668 m_store.for_each_binding (bv);
4669 return bv.m_count;
757bf1df
DM
4670}
4671
808f4dfe
DM
4672/* Attempt to merge THIS with OTHER_MODEL, writing the result
4673 to OUT_MODEL. Use POINT to distinguish values created as a
4674 result of merging. */
757bf1df 4675
808f4dfe
DM
4676bool
4677region_model::can_merge_with_p (const region_model &other_model,
4678 const program_point &point,
f573d351
DM
4679 region_model *out_model,
4680 const extrinsic_state *ext_state,
4681 const program_state *state_a,
4682 const program_state *state_b) const
757bf1df 4683{
808f4dfe
DM
4684 gcc_assert (out_model);
4685 gcc_assert (m_mgr == other_model.m_mgr);
4686 gcc_assert (m_mgr == out_model->m_mgr);
757bf1df 4687
808f4dfe
DM
4688 if (m_current_frame != other_model.m_current_frame)
4689 return false;
4690 out_model->m_current_frame = m_current_frame;
757bf1df 4691
f573d351
DM
4692 model_merger m (this, &other_model, point, out_model,
4693 ext_state, state_a, state_b);
757bf1df 4694
808f4dfe
DM
4695 if (!store::can_merge_p (&m_store, &other_model.m_store,
4696 &out_model->m_store, m_mgr->get_store_manager (),
4697 &m))
4698 return false;
4699
9a2c9579
DM
4700 if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
4701 &out_model->m_dynamic_extents))
4702 return false;
4703
808f4dfe
DM
4704 /* Merge constraints. */
4705 constraint_manager::merge (*m_constraints,
4706 *other_model.m_constraints,
c710051a 4707 out_model->m_constraints);
757bf1df 4708
808f4dfe 4709 return true;
757bf1df
DM
4710}
4711
4712/* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
4713 otherwise. */
4714
4715tree
4716region_model::get_fndecl_for_call (const gcall *call,
4717 region_model_context *ctxt)
4718{
4719 tree fn_ptr = gimple_call_fn (call);
4720 if (fn_ptr == NULL_TREE)
4721 return NULL_TREE;
808f4dfe
DM
4722 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
4723 if (const region_svalue *fn_ptr_ptr
4724 = fn_ptr_sval->dyn_cast_region_svalue ())
757bf1df 4725 {
808f4dfe
DM
4726 const region *reg = fn_ptr_ptr->get_pointee ();
4727 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
757bf1df 4728 {
808f4dfe 4729 tree fn_decl = fn_reg->get_fndecl ();
0ba70d1b
DM
4730 cgraph_node *node = cgraph_node::get (fn_decl);
4731 if (!node)
4732 return NULL_TREE;
4733 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
91f993b7
DM
4734 if (ultimate_node)
4735 return ultimate_node->decl;
757bf1df
DM
4736 }
4737 }
4738
4739 return NULL_TREE;
4740}
4741
808f4dfe 4742/* Would be much simpler to use a lambda here, if it were supported. */
757bf1df 4743
faacafd2 4744struct append_regions_cb_data
757bf1df 4745{
808f4dfe
DM
4746 const region_model *model;
4747 auto_vec<const decl_region *> *out;
4748};
757bf1df 4749
faacafd2 4750/* Populate *OUT with all decl_regions in the current
808f4dfe 4751 frame that have clusters within the store. */
757bf1df
DM
4752
4753void
808f4dfe 4754region_model::
faacafd2 4755get_regions_for_current_frame (auto_vec<const decl_region *> *out) const
757bf1df 4756{
faacafd2 4757 append_regions_cb_data data;
808f4dfe
DM
4758 data.model = this;
4759 data.out = out;
faacafd2 4760 m_store.for_each_cluster (append_regions_cb, &data);
757bf1df
DM
4761}
4762
faacafd2 4763/* Implementation detail of get_regions_for_current_frame. */
757bf1df 4764
808f4dfe 4765void
faacafd2
DM
4766region_model::append_regions_cb (const region *base_reg,
4767 append_regions_cb_data *cb_data)
757bf1df 4768{
808f4dfe
DM
4769 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
4770 return;
4771 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
faacafd2 4772 cb_data->out->safe_push (decl_reg);
757bf1df
DM
4773}
4774
c83e9731
TL
4775
4776/* Abstract class for diagnostics related to the use of
4777 floating-point arithmetic where precision is needed. */
4778
4779class imprecise_floating_point_arithmetic : public pending_diagnostic
4780{
4781public:
4782 int get_controlling_option () const final override
4783 {
4784 return OPT_Wanalyzer_imprecise_fp_arithmetic;
4785 }
4786};
4787
4788/* Concrete diagnostic to complain about uses of floating-point arithmetic
4789 in the size argument of malloc etc. */
4790
4791class float_as_size_arg : public imprecise_floating_point_arithmetic
4792{
4793public:
4794 float_as_size_arg (tree arg) : m_arg (arg)
4795 {}
4796
4797 const char *get_kind () const final override
4798 {
4799 return "float_as_size_arg_diagnostic";
4800 }
4801
ac9230fb 4802 bool subclass_equal_p (const pending_diagnostic &other) const final override
c83e9731
TL
4803 {
4804 return same_tree_p (m_arg, ((const float_as_size_arg &) other).m_arg);
4805 }
4806
4807 bool emit (rich_location *rich_loc) final override
4808 {
4809 diagnostic_metadata m;
4810 bool warned = warning_meta (rich_loc, m, get_controlling_option (),
4811 "use of floating-point arithmetic here might"
4812 " yield unexpected results");
4813 if (warned)
4814 inform (rich_loc->get_loc (), "only use operands of an integer type"
4815 " inside the size argument");
4816 return warned;
4817 }
4818
4819 label_text describe_final_event (const evdesc::final_event &ev) final
4820 override
4821 {
4822 if (m_arg)
4823 return ev.formatted_print ("operand %qE is of type %qT",
4824 m_arg, TREE_TYPE (m_arg));
4825 return ev.formatted_print ("at least one operand of the size argument is"
4826 " of a floating-point type");
4827 }
4828
4829private:
4830 tree m_arg;
4831};
4832
4833/* Visitor to find uses of floating-point variables/constants in an svalue. */
4834
4835class contains_floating_point_visitor : public visitor
4836{
4837public:
4838 contains_floating_point_visitor (const svalue *root_sval) : m_result (NULL)
4839 {
4840 root_sval->accept (this);
4841 }
4842
4843 const svalue *get_svalue_to_report ()
4844 {
4845 return m_result;
4846 }
4847
4848 void visit_constant_svalue (const constant_svalue *sval) final override
4849 {
4850 /* At the point the analyzer runs, constant integer operands in a floating
4851 point expression are already implictly converted to floating-points.
4852 Thus, we do prefer to report non-constants such that the diagnostic
4853 always reports a floating-point operand. */
4854 tree type = sval->get_type ();
4855 if (type && FLOAT_TYPE_P (type) && !m_result)
4856 m_result = sval;
4857 }
4858
4859 void visit_conjured_svalue (const conjured_svalue *sval) final override
4860 {
4861 tree type = sval->get_type ();
4862 if (type && FLOAT_TYPE_P (type))
4863 m_result = sval;
4864 }
4865
4866 void visit_initial_svalue (const initial_svalue *sval) final override
4867 {
4868 tree type = sval->get_type ();
4869 if (type && FLOAT_TYPE_P (type))
4870 m_result = sval;
4871 }
4872
4873private:
4874 /* Non-null if at least one floating-point operand was found. */
4875 const svalue *m_result;
4876};
4877
4878/* May complain about uses of floating-point operands in SIZE_IN_BYTES. */
4879
4880void
4881region_model::check_dynamic_size_for_floats (const svalue *size_in_bytes,
4882 region_model_context *ctxt) const
4883{
4884 gcc_assert (ctxt);
4885
4886 contains_floating_point_visitor v (size_in_bytes);
4887 if (const svalue *float_sval = v.get_svalue_to_report ())
4888 {
4889 tree diag_arg = get_representative_tree (float_sval);
6341f14e 4890 ctxt->warn (make_unique<float_as_size_arg> (diag_arg));
c83e9731
TL
4891 }
4892}
4893
ce917b04
DM
4894/* Return a region describing a heap-allocated block of memory.
4895 Use CTXT to complain about tainted sizes.
4896
4897 Reuse an existing heap_allocated_region if it's not being referenced by
4898 this region_model; otherwise create a new one. */
757bf1df 4899
808f4dfe 4900const region *
ce917b04
DM
4901region_model::get_or_create_region_for_heap_alloc (const svalue *size_in_bytes,
4902 region_model_context *ctxt)
4903{
4904 /* Determine which regions are referenced in this region_model, so that
4905 we can reuse an existing heap_allocated_region if it's not in use on
4906 this path. */
4907 auto_sbitmap base_regs_in_use (m_mgr->get_num_regions ());
4908 get_referenced_base_regions (base_regs_in_use);
4909 const region *reg
4910 = m_mgr->get_or_create_region_for_heap_alloc (base_regs_in_use);
ea4e3218 4911 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
b9365b93 4912 set_dynamic_extents (reg, size_in_bytes, ctxt);
808f4dfe 4913 return reg;
757bf1df
DM
4914}
4915
ce917b04
DM
4916/* Populate OUT_IDS with the set of IDs of those base regions which are
4917 reachable in this region_model. */
4918
4919void
4920region_model::get_referenced_base_regions (auto_sbitmap &out_ids) const
4921{
4922 reachable_regions reachable_regs (const_cast<region_model *> (this));
4923 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
4924 &reachable_regs);
4925 /* Get regions for locals that have explicitly bound values. */
4926 for (store::cluster_map_t::iterator iter = m_store.begin ();
4927 iter != m_store.end (); ++iter)
4928 {
4929 const region *base_reg = (*iter).first;
4930 if (const region *parent = base_reg->get_parent_region ())
4931 if (parent->get_kind () == RK_FRAME)
4932 reachable_regs.add (base_reg, false);
4933 }
4934
4935 bitmap_clear (out_ids);
4936 for (auto iter_reg : reachable_regs)
4937 bitmap_set_bit (out_ids, iter_reg->get_id ());
4938}
4939
808f4dfe 4940/* Return a new region describing a block of memory allocated within the
b9365b93
DM
4941 current frame.
4942 Use CTXT to complain about tainted sizes. */
757bf1df 4943
808f4dfe 4944const region *
b9365b93
DM
4945region_model::create_region_for_alloca (const svalue *size_in_bytes,
4946 region_model_context *ctxt)
757bf1df 4947{
808f4dfe 4948 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
ea4e3218 4949 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
b9365b93 4950 set_dynamic_extents (reg, size_in_bytes, ctxt);
808f4dfe 4951 return reg;
757bf1df
DM
4952}
4953
b9365b93
DM
4954/* Record that the size of REG is SIZE_IN_BYTES.
4955 Use CTXT to complain about tainted sizes. */
757bf1df
DM
4956
4957void
9a2c9579 4958region_model::set_dynamic_extents (const region *reg,
b9365b93
DM
4959 const svalue *size_in_bytes,
4960 region_model_context *ctxt)
9a2c9579
DM
4961{
4962 assert_compat_types (size_in_bytes->get_type (), size_type_node);
b9365b93 4963 if (ctxt)
c83e9731
TL
4964 {
4965 check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
4966 ctxt);
4967 check_dynamic_size_for_floats (size_in_bytes, ctxt);
4968 }
9a2c9579
DM
4969 m_dynamic_extents.put (reg, size_in_bytes);
4970}
4971
4972/* Get the recording of REG in bytes, or NULL if no dynamic size was
4973 recorded. */
4974
4975const svalue *
4976region_model::get_dynamic_extents (const region *reg) const
757bf1df 4977{
9a2c9579
DM
4978 if (const svalue * const *slot = m_dynamic_extents.get (reg))
4979 return *slot;
4980 return NULL;
4981}
4982
4983/* Unset any recorded dynamic size of REG. */
4984
4985void
4986region_model::unset_dynamic_extents (const region *reg)
4987{
4988 m_dynamic_extents.remove (reg);
757bf1df
DM
4989}
4990
c81b60b8
DM
4991/* Information of the layout of a RECORD_TYPE, capturing it as a vector
4992 of items, where each item is either a field or padding. */
4993
4994class record_layout
4995{
4996public:
4997 /* An item within a record; either a field, or padding after a field. */
4998 struct item
4999 {
5000 public:
5001 item (const bit_range &br,
5002 tree field,
5003 bool is_padding)
5004 : m_bit_range (br),
5005 m_field (field),
5006 m_is_padding (is_padding)
5007 {
5008 }
5009
5010 bit_offset_t get_start_bit_offset () const
5011 {
5012 return m_bit_range.get_start_bit_offset ();
5013 }
5014 bit_offset_t get_next_bit_offset () const
5015 {
5016 return m_bit_range.get_next_bit_offset ();
5017 }
5018
5019 bool contains_p (bit_offset_t offset) const
5020 {
5021 return m_bit_range.contains_p (offset);
5022 }
5023
5024 void dump_to_pp (pretty_printer *pp) const
5025 {
5026 if (m_is_padding)
5027 pp_printf (pp, "padding after %qD", m_field);
5028 else
5029 pp_printf (pp, "%qD", m_field);
5030 pp_string (pp, ", ");
5031 m_bit_range.dump_to_pp (pp);
5032 }
5033
5034 bit_range m_bit_range;
5035 tree m_field;
5036 bool m_is_padding;
5037 };
5038
5039 record_layout (tree record_type)
c81b60b8
DM
5040 {
5041 gcc_assert (TREE_CODE (record_type) == RECORD_TYPE);
5042
5043 for (tree iter = TYPE_FIELDS (record_type); iter != NULL_TREE;
5044 iter = DECL_CHAIN (iter))
5045 {
5046 if (TREE_CODE (iter) == FIELD_DECL)
5047 {
5048 int iter_field_offset = int_bit_position (iter);
5049 bit_size_t size_in_bits;
5050 if (!int_size_in_bits (TREE_TYPE (iter), &size_in_bits))
5051 size_in_bits = 0;
5052
5053 maybe_pad_to (iter_field_offset);
5054
5055 /* Add field. */
5056 m_items.safe_push (item (bit_range (iter_field_offset,
5057 size_in_bits),
5058 iter, false));
5059 }
5060 }
5061
5062 /* Add any trailing padding. */
5063 bit_size_t size_in_bits;
5064 if (int_size_in_bits (record_type, &size_in_bits))
5065 maybe_pad_to (size_in_bits);
5066 }
5067
5068 void dump_to_pp (pretty_printer *pp) const
5069 {
5070 unsigned i;
5071 item *it;
5072 FOR_EACH_VEC_ELT (m_items, i, it)
5073 {
5074 it->dump_to_pp (pp);
5075 pp_newline (pp);
5076 }
5077 }
5078
5079 DEBUG_FUNCTION void dump () const
5080 {
5081 pretty_printer pp;
5082 pp_format_decoder (&pp) = default_tree_printer;
5083 pp.buffer->stream = stderr;
5084 dump_to_pp (&pp);
5085 pp_flush (&pp);
5086 }
5087
5088 const record_layout::item *get_item_at (bit_offset_t offset) const
5089 {
5090 unsigned i;
5091 item *it;
5092 FOR_EACH_VEC_ELT (m_items, i, it)
5093 if (it->contains_p (offset))
5094 return it;
5095 return NULL;
5096 }
5097
5098private:
5099 /* Subroutine of ctor. Add padding item to NEXT_OFFSET if necessary. */
5100
5101 void maybe_pad_to (bit_offset_t next_offset)
5102 {
5103 if (m_items.length () > 0)
5104 {
5105 const item &last_item = m_items[m_items.length () - 1];
5106 bit_offset_t offset_after_last_item
5107 = last_item.get_next_bit_offset ();
5108 if (next_offset > offset_after_last_item)
5109 {
5110 bit_size_t padding_size
5111 = next_offset - offset_after_last_item;
5112 m_items.safe_push (item (bit_range (offset_after_last_item,
5113 padding_size),
5114 last_item.m_field, true));
5115 }
5116 }
5117 }
5118
c81b60b8
DM
5119 auto_vec<item> m_items;
5120};
5121
5122/* A subclass of pending_diagnostic for complaining about uninitialized data
5123 being copied across a trust boundary to an untrusted output
5124 (e.g. copy_to_user infoleaks in the Linux kernel). */
5125
5126class exposure_through_uninit_copy
5127 : public pending_diagnostic_subclass<exposure_through_uninit_copy>
5128{
5129public:
5130 exposure_through_uninit_copy (const region *src_region,
5131 const region *dest_region,
ffaeb9dc 5132 const svalue *copied_sval)
c81b60b8
DM
5133 : m_src_region (src_region),
5134 m_dest_region (dest_region),
ffaeb9dc 5135 m_copied_sval (copied_sval)
c81b60b8
DM
5136 {
5137 gcc_assert (m_copied_sval->get_kind () == SK_POISONED
5138 || m_copied_sval->get_kind () == SK_COMPOUND);
5139 }
5140
5141 const char *get_kind () const final override
5142 {
5143 return "exposure_through_uninit_copy";
5144 }
5145
5146 bool operator== (const exposure_through_uninit_copy &other) const
5147 {
5148 return (m_src_region == other.m_src_region
5149 && m_dest_region == other.m_dest_region
5150 && m_copied_sval == other.m_copied_sval);
5151 }
5152
5153 int get_controlling_option () const final override
5154 {
5155 return OPT_Wanalyzer_exposure_through_uninit_copy;
5156 }
5157
5158 bool emit (rich_location *rich_loc) final override
5159 {
5160 diagnostic_metadata m;
5161 /* CWE-200: Exposure of Sensitive Information to an Unauthorized Actor. */
5162 m.add_cwe (200);
5163 enum memory_space mem_space = get_src_memory_space ();
5164 bool warned;
5165 switch (mem_space)
5166 {
5167 default:
5168 warned = warning_meta
5169 (rich_loc, m, get_controlling_option (),
5170 "potential exposure of sensitive information"
5171 " by copying uninitialized data across trust boundary");
5172 break;
5173 case MEMSPACE_STACK:
5174 warned = warning_meta
5175 (rich_loc, m, get_controlling_option (),
5176 "potential exposure of sensitive information"
5177 " by copying uninitialized data from stack across trust boundary");
5178 break;
5179 case MEMSPACE_HEAP:
5180 warned = warning_meta
5181 (rich_loc, m, get_controlling_option (),
5182 "potential exposure of sensitive information"
5183 " by copying uninitialized data from heap across trust boundary");
5184 break;
5185 }
5186 if (warned)
5187 {
5188 location_t loc = rich_loc->get_loc ();
5189 inform_number_of_uninit_bits (loc);
5190 complain_about_uninit_ranges (loc);
5191
5192 if (mem_space == MEMSPACE_STACK)
5193 maybe_emit_fixit_hint ();
5194 }
5195 return warned;
5196 }
5197
5198 label_text describe_final_event (const evdesc::final_event &) final override
5199 {
5200 enum memory_space mem_space = get_src_memory_space ();
5201 switch (mem_space)
5202 {
5203 default:
5204 return label_text::borrow ("uninitialized data copied here");
5205
5206 case MEMSPACE_STACK:
5207 return label_text::borrow ("uninitialized data copied from stack here");
5208
5209 case MEMSPACE_HEAP:
5210 return label_text::borrow ("uninitialized data copied from heap here");
5211 }
5212 }
5213
5214 void mark_interesting_stuff (interesting_t *interest) final override
5215 {
5216 if (m_src_region)
5217 interest->add_region_creation (m_src_region);
5218 }
5219
5220private:
5221 enum memory_space get_src_memory_space () const
5222 {
5223 return m_src_region ? m_src_region->get_memory_space () : MEMSPACE_UNKNOWN;
5224 }
5225
5226 bit_size_t calc_num_uninit_bits () const
5227 {
5228 switch (m_copied_sval->get_kind ())
5229 {
5230 default:
5231 gcc_unreachable ();
5232 break;
5233 case SK_POISONED:
5234 {
5235 const poisoned_svalue *poisoned_sval
5236 = as_a <const poisoned_svalue *> (m_copied_sval);
5237 gcc_assert (poisoned_sval->get_poison_kind () == POISON_KIND_UNINIT);
5238
5239 /* Give up if don't have type information. */
5240 if (m_copied_sval->get_type () == NULL_TREE)
5241 return 0;
5242
5243 bit_size_t size_in_bits;
5244 if (int_size_in_bits (m_copied_sval->get_type (), &size_in_bits))
5245 return size_in_bits;
5246
5247 /* Give up if we can't get the size of the type. */
5248 return 0;
5249 }
5250 break;
5251 case SK_COMPOUND:
5252 {
5253 const compound_svalue *compound_sval
5254 = as_a <const compound_svalue *> (m_copied_sval);
5255 bit_size_t result = 0;
5256 /* Find keys for uninit svals. */
5257 for (auto iter : *compound_sval)
5258 {
5259 const svalue *sval = iter.second;
5260 if (const poisoned_svalue *psval
5261 = sval->dyn_cast_poisoned_svalue ())
5262 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
5263 {
5264 const binding_key *key = iter.first;
5265 const concrete_binding *ckey
5266 = key->dyn_cast_concrete_binding ();
5267 gcc_assert (ckey);
5268 result += ckey->get_size_in_bits ();
5269 }
5270 }
5271 return result;
5272 }
5273 }
5274 }
5275
5276 void inform_number_of_uninit_bits (location_t loc) const
5277 {
5278 bit_size_t num_uninit_bits = calc_num_uninit_bits ();
5279 if (num_uninit_bits <= 0)
5280 return;
5281 if (num_uninit_bits % BITS_PER_UNIT == 0)
5282 {
5283 /* Express in bytes. */
5284 byte_size_t num_uninit_bytes = num_uninit_bits / BITS_PER_UNIT;
5285 if (num_uninit_bytes == 1)
5286 inform (loc, "1 byte is uninitialized");
5287 else
5288 inform (loc,
5289 "%wu bytes are uninitialized", num_uninit_bytes.to_uhwi ());
5290 }
5291 else
5292 {
5293 /* Express in bits. */
5294 if (num_uninit_bits == 1)
5295 inform (loc, "1 bit is uninitialized");
5296 else
5297 inform (loc,
5298 "%wu bits are uninitialized", num_uninit_bits.to_uhwi ());
5299 }
5300 }
5301
5302 void complain_about_uninit_ranges (location_t loc) const
5303 {
5304 if (const compound_svalue *compound_sval
5305 = m_copied_sval->dyn_cast_compound_svalue ())
5306 {
5307 /* Find keys for uninit svals. */
5308 auto_vec<const concrete_binding *> uninit_keys;
5309 for (auto iter : *compound_sval)
5310 {
5311 const svalue *sval = iter.second;
5312 if (const poisoned_svalue *psval
5313 = sval->dyn_cast_poisoned_svalue ())
5314 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
5315 {
5316 const binding_key *key = iter.first;
5317 const concrete_binding *ckey
5318 = key->dyn_cast_concrete_binding ();
5319 gcc_assert (ckey);
5320 uninit_keys.safe_push (ckey);
5321 }
5322 }
5323 /* Complain about them in sorted order. */
5324 uninit_keys.qsort (concrete_binding::cmp_ptr_ptr);
5325
5326 std::unique_ptr<record_layout> layout;
5327
5328 tree type = m_copied_sval->get_type ();
5329 if (type && TREE_CODE (type) == RECORD_TYPE)
5330 {
5331 // (std::make_unique is C++14)
5332 layout = std::unique_ptr<record_layout> (new record_layout (type));
5333
5334 if (0)
5335 layout->dump ();
5336 }
5337
5338 unsigned i;
5339 const concrete_binding *ckey;
5340 FOR_EACH_VEC_ELT (uninit_keys, i, ckey)
5341 {
5342 bit_offset_t start_bit = ckey->get_start_bit_offset ();
5343 bit_offset_t next_bit = ckey->get_next_bit_offset ();
5344 complain_about_uninit_range (loc, start_bit, next_bit,
5345 layout.get ());
5346 }
5347 }
5348 }
5349
5350 void complain_about_uninit_range (location_t loc,
5351 bit_offset_t start_bit,
5352 bit_offset_t next_bit,
5353 const record_layout *layout) const
5354 {
5355 if (layout)
5356 {
5357 while (start_bit < next_bit)
5358 {
5359 if (const record_layout::item *item
5360 = layout->get_item_at (start_bit))
5361 {
5362 gcc_assert (start_bit >= item->get_start_bit_offset ());
5363 gcc_assert (start_bit < item->get_next_bit_offset ());
5364 if (item->get_start_bit_offset () == start_bit
5365 && item->get_next_bit_offset () <= next_bit)
5366 complain_about_fully_uninit_item (*item);
5367 else
5368 complain_about_partially_uninit_item (*item);
5369 start_bit = item->get_next_bit_offset ();
5370 continue;
5371 }
5372 else
5373 break;
5374 }
5375 }
5376
5377 if (start_bit >= next_bit)
5378 return;
5379
5380 if (start_bit % 8 == 0 && next_bit % 8 == 0)
5381 {
5382 /* Express in bytes. */
5383 byte_offset_t start_byte = start_bit / 8;
5384 byte_offset_t last_byte = (next_bit / 8) - 1;
5385 if (last_byte == start_byte)
5386 inform (loc,
5387 "byte %wu is uninitialized",
5388 start_byte.to_uhwi ());
5389 else
5390 inform (loc,
5391 "bytes %wu - %wu are uninitialized",
5392 start_byte.to_uhwi (),
5393 last_byte.to_uhwi ());
5394 }
5395 else
5396 {
5397 /* Express in bits. */
5398 bit_offset_t last_bit = next_bit - 1;
5399 if (last_bit == start_bit)
5400 inform (loc,
5401 "bit %wu is uninitialized",
5402 start_bit.to_uhwi ());
5403 else
5404 inform (loc,
5405 "bits %wu - %wu are uninitialized",
5406 start_bit.to_uhwi (),
5407 last_bit.to_uhwi ());
5408 }
5409 }
5410
5411 static void
5412 complain_about_fully_uninit_item (const record_layout::item &item)
5413 {
5414 tree field = item.m_field;
5415 bit_size_t num_bits = item.m_bit_range.m_size_in_bits;
5416 if (item.m_is_padding)
5417 {
5418 if (num_bits % 8 == 0)
5419 {
5420 /* Express in bytes. */
5421 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
5422 if (num_bytes == 1)
5423 inform (DECL_SOURCE_LOCATION (field),
5424 "padding after field %qD is uninitialized (1 byte)",
5425 field);
5426 else
5427 inform (DECL_SOURCE_LOCATION (field),
5428 "padding after field %qD is uninitialized (%wu bytes)",
5429 field, num_bytes.to_uhwi ());
5430 }
5431 else
5432 {
5433 /* Express in bits. */
5434 if (num_bits == 1)
5435 inform (DECL_SOURCE_LOCATION (field),
5436 "padding after field %qD is uninitialized (1 bit)",
5437 field);
5438 else
5439 inform (DECL_SOURCE_LOCATION (field),
5440 "padding after field %qD is uninitialized (%wu bits)",
5441 field, num_bits.to_uhwi ());
5442 }
5443 }
5444 else
5445 {
5446 if (num_bits % 8 == 0)
5447 {
5448 /* Express in bytes. */
5449 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
5450 if (num_bytes == 1)
5451 inform (DECL_SOURCE_LOCATION (field),
5452 "field %qD is uninitialized (1 byte)", field);
5453 else
5454 inform (DECL_SOURCE_LOCATION (field),
5455 "field %qD is uninitialized (%wu bytes)",
5456 field, num_bytes.to_uhwi ());
5457 }
5458 else
5459 {
5460 /* Express in bits. */
5461 if (num_bits == 1)
5462 inform (DECL_SOURCE_LOCATION (field),
5463 "field %qD is uninitialized (1 bit)", field);
5464 else
5465 inform (DECL_SOURCE_LOCATION (field),
5466 "field %qD is uninitialized (%wu bits)",
5467 field, num_bits.to_uhwi ());
5468 }
5469 }
5470 }
5471
5472 static void
5473 complain_about_partially_uninit_item (const record_layout::item &item)
5474 {
5475 tree field = item.m_field;
5476 if (item.m_is_padding)
5477 inform (DECL_SOURCE_LOCATION (field),
5478 "padding after field %qD is partially uninitialized",
5479 field);
5480 else
5481 inform (DECL_SOURCE_LOCATION (field),
5482 "field %qD is partially uninitialized",
5483 field);
5484 /* TODO: ideally we'd describe what parts are uninitialized. */
5485 }
5486
5487 void maybe_emit_fixit_hint () const
5488 {
5489 if (tree decl = m_src_region->maybe_get_decl ())
5490 {
5491 gcc_rich_location hint_richloc (DECL_SOURCE_LOCATION (decl));
5492 hint_richloc.add_fixit_insert_after (" = {0}");
5493 inform (&hint_richloc,
5494 "suggest forcing zero-initialization by"
5495 " providing a %<{0}%> initializer");
5496 }
5497 }
5498
5499private:
5500 const region *m_src_region;
5501 const region *m_dest_region;
5502 const svalue *m_copied_sval;
c81b60b8
DM
5503};
5504
5505/* Return true if any part of SVAL is uninitialized. */
5506
5507static bool
5508contains_uninit_p (const svalue *sval)
5509{
5510 struct uninit_finder : public visitor
5511 {
5512 public:
5513 uninit_finder () : m_found_uninit (false) {}
5514 void visit_poisoned_svalue (const poisoned_svalue *sval)
5515 {
5516 if (sval->get_poison_kind () == POISON_KIND_UNINIT)
5517 m_found_uninit = true;
5518 }
5519 bool m_found_uninit;
5520 };
5521
5522 uninit_finder v;
5523 sval->accept (&v);
5524
5525 return v.m_found_uninit;
5526}
5527
5528/* Function for use by plugins when simulating writing data through a
5529 pointer to an "untrusted" region DST_REG (and thus crossing a security
5530 boundary), such as copying data to user space in an OS kernel.
5531
5532 Check that COPIED_SVAL is fully initialized. If not, complain about
5533 an infoleak to CTXT.
5534
5535 SRC_REG can be NULL; if non-NULL it is used as a hint in the diagnostic
5536 as to where COPIED_SVAL came from. */
5537
5538void
5539region_model::maybe_complain_about_infoleak (const region *dst_reg,
5540 const svalue *copied_sval,
5541 const region *src_reg,
5542 region_model_context *ctxt)
5543{
5544 /* Check for exposure. */
5545 if (contains_uninit_p (copied_sval))
6341f14e
DM
5546 ctxt->warn (make_unique<exposure_through_uninit_copy> (src_reg,
5547 dst_reg,
5548 copied_sval));
c81b60b8
DM
5549}
5550
3d2d04cd
DM
5551/* Set errno to a positive symbolic int, as if some error has occurred. */
5552
5553void
5554region_model::set_errno (const call_details &cd)
5555{
5556 const region *errno_reg = m_mgr->get_errno_region ();
5557 conjured_purge p (this, cd.get_ctxt ());
5558 const svalue *new_errno_sval
5559 = m_mgr->get_or_create_conjured_svalue (integer_type_node,
5560 cd.get_call_stmt (),
5561 errno_reg, p);
5562 const svalue *zero
5563 = m_mgr->get_or_create_int_cst (integer_type_node, 0);
5564 add_constraint (new_errno_sval, GT_EXPR, zero, cd.get_ctxt ());
5565 set_value (errno_reg, new_errno_sval, cd.get_ctxt ());
5566}
5567
eafa9d96
DM
5568/* class noop_region_model_context : public region_model_context. */
5569
c65d3c7f 5570void
6341f14e 5571noop_region_model_context::add_note (std::unique_ptr<pending_note>)
c65d3c7f 5572{
c65d3c7f
DM
5573}
5574
eafa9d96 5575void
accece8c 5576noop_region_model_context::bifurcate (std::unique_ptr<custom_edge_info>)
eafa9d96 5577{
eafa9d96
DM
5578}
5579
5580void
5581noop_region_model_context::terminate_path ()
5582{
5583}
5584
808f4dfe 5585/* struct model_merger. */
757bf1df 5586
808f4dfe 5587/* Dump a multiline representation of this merger to PP. */
757bf1df
DM
5588
5589void
808f4dfe 5590model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
757bf1df 5591{
808f4dfe
DM
5592 pp_string (pp, "model A:");
5593 pp_newline (pp);
5594 m_model_a->dump_to_pp (pp, simple, true);
5595 pp_newline (pp);
757bf1df 5596
808f4dfe 5597 pp_string (pp, "model B:");
757bf1df 5598 pp_newline (pp);
808f4dfe 5599 m_model_b->dump_to_pp (pp, simple, true);
757bf1df
DM
5600 pp_newline (pp);
5601
808f4dfe 5602 pp_string (pp, "merged model:");
757bf1df 5603 pp_newline (pp);
808f4dfe 5604 m_merged_model->dump_to_pp (pp, simple, true);
757bf1df
DM
5605 pp_newline (pp);
5606}
5607
808f4dfe 5608/* Dump a multiline representation of this merger to FILE. */
757bf1df
DM
5609
5610void
808f4dfe 5611model_merger::dump (FILE *fp, bool simple) const
757bf1df
DM
5612{
5613 pretty_printer pp;
5614 pp_format_decoder (&pp) = default_tree_printer;
5615 pp_show_color (&pp) = pp_show_color (global_dc->printer);
5616 pp.buffer->stream = fp;
808f4dfe 5617 dump_to_pp (&pp, simple);
757bf1df
DM
5618 pp_flush (&pp);
5619}
5620
808f4dfe 5621/* Dump a multiline representation of this merger to stderr. */
757bf1df
DM
5622
5623DEBUG_FUNCTION void
808f4dfe 5624model_merger::dump (bool simple) const
757bf1df 5625{
808f4dfe 5626 dump (stderr, simple);
757bf1df
DM
5627}
5628
f573d351
DM
5629/* Return true if it's OK to merge SVAL with other svalues. */
5630
5631bool
5632model_merger::mergeable_svalue_p (const svalue *sval) const
5633{
5634 if (m_ext_state)
5635 {
5636 /* Reject merging svalues that have non-purgable sm-state,
5637 to avoid falsely reporting memory leaks by merging them
5638 with something else. For example, given a local var "p",
5639 reject the merger of a:
5640 store_a mapping "p" to a malloc-ed ptr
5641 with:
5642 store_b mapping "p" to a NULL ptr. */
5643 if (m_state_a)
5644 if (!m_state_a->can_purge_p (*m_ext_state, sval))
5645 return false;
5646 if (m_state_b)
5647 if (!m_state_b->can_purge_p (*m_ext_state, sval))
5648 return false;
5649 }
5650 return true;
5651}
5652
75038aa6
DM
5653} // namespace ana
5654
808f4dfe 5655/* Dump RMODEL fully to stderr (i.e. without summarization). */
757bf1df 5656
808f4dfe
DM
5657DEBUG_FUNCTION void
5658debug (const region_model &rmodel)
757bf1df 5659{
808f4dfe 5660 rmodel.dump (false);
757bf1df
DM
5661}
5662
8ca7fa84 5663/* class rejected_op_constraint : public rejected_constraint. */
84fb3546
DM
5664
5665void
8ca7fa84 5666rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
84fb3546
DM
5667{
5668 region_model m (m_model);
5669 const svalue *lhs_sval = m.get_rvalue (m_lhs, NULL);
5670 const svalue *rhs_sval = m.get_rvalue (m_rhs, NULL);
5671 lhs_sval->dump_to_pp (pp, true);
5672 pp_printf (pp, " %s ", op_symbol_code (m_op));
5673 rhs_sval->dump_to_pp (pp, true);
5674}
5675
8ca7fa84
DM
5676/* class rejected_ranges_constraint : public rejected_constraint. */
5677
5678void
5679rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
5680{
5681 region_model m (m_model);
5682 const svalue *sval = m.get_rvalue (m_expr, NULL);
5683 sval->dump_to_pp (pp, true);
5684 pp_string (pp, " in ");
5685 m_ranges->dump_to_pp (pp, true);
5686}
5687
808f4dfe 5688/* class engine. */
757bf1df 5689
11a2ff8d
DM
5690/* engine's ctor. */
5691
4cebae09
DM
5692engine::engine (const supergraph *sg, logger *logger)
5693: m_sg (sg), m_mgr (logger)
11a2ff8d
DM
5694{
5695}
5696
808f4dfe 5697/* Dump the managed objects by class to LOGGER, and the per-class totals. */
757bf1df 5698
808f4dfe
DM
5699void
5700engine::log_stats (logger *logger) const
757bf1df 5701{
808f4dfe 5702 m_mgr.log_stats (logger, true);
757bf1df
DM
5703}
5704
75038aa6
DM
5705namespace ana {
5706
757bf1df
DM
5707#if CHECKING_P
5708
5709namespace selftest {
5710
8c08c983
DM
5711/* Build a constant tree of the given type from STR. */
5712
5713static tree
5714build_real_cst_from_string (tree type, const char *str)
5715{
5716 REAL_VALUE_TYPE real;
5717 real_from_string (&real, str);
5718 return build_real (type, real);
5719}
5720
5721/* Append various "interesting" constants to OUT (e.g. NaN). */
5722
5723static void
5724append_interesting_constants (auto_vec<tree> *out)
5725{
5726 out->safe_push (build_int_cst (integer_type_node, 0));
5727 out->safe_push (build_int_cst (integer_type_node, 42));
5728 out->safe_push (build_int_cst (unsigned_type_node, 0));
5729 out->safe_push (build_int_cst (unsigned_type_node, 42));
5730 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
5731 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
5732 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
5733 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
5734 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
5735 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
5736 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
5737 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
5738}
5739
5740/* Verify that tree_cmp is a well-behaved comparator for qsort, even
5741 if the underlying constants aren't comparable. */
5742
5743static void
5744test_tree_cmp_on_constants ()
5745{
5746 auto_vec<tree> csts;
5747 append_interesting_constants (&csts);
5748
5749 /* Try sorting every triple. */
5750 const unsigned num = csts.length ();
5751 for (unsigned i = 0; i < num; i++)
5752 for (unsigned j = 0; j < num; j++)
5753 for (unsigned k = 0; k < num; k++)
5754 {
5755 auto_vec<tree> v (3);
5756 v.quick_push (csts[i]);
5757 v.quick_push (csts[j]);
5758 v.quick_push (csts[k]);
5759 v.qsort (tree_cmp);
5760 }
5761}
5762
757bf1df
DM
5763/* Implementation detail of the ASSERT_CONDITION_* macros. */
5764
808f4dfe
DM
5765void
5766assert_condition (const location &loc,
5767 region_model &model,
5768 const svalue *lhs, tree_code op, const svalue *rhs,
5769 tristate expected)
5770{
5771 tristate actual = model.eval_condition (lhs, op, rhs);
5772 ASSERT_EQ_AT (loc, actual, expected);
5773}
5774
5775/* Implementation detail of the ASSERT_CONDITION_* macros. */
5776
757bf1df
DM
5777void
5778assert_condition (const location &loc,
5779 region_model &model,
5780 tree lhs, tree_code op, tree rhs,
5781 tristate expected)
5782{
5783 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
5784 ASSERT_EQ_AT (loc, actual, expected);
5785}
5786
90f7c300
DM
5787/* Implementation detail of ASSERT_DUMP_TREE_EQ. */
5788
5789static void
5790assert_dump_tree_eq (const location &loc, tree t, const char *expected)
5791{
5792 auto_fix_quotes sentinel;
5793 pretty_printer pp;
5794 pp_format_decoder (&pp) = default_tree_printer;
5795 dump_tree (&pp, t);
5796 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
5797}
5798
5799/* Assert that dump_tree (T) is EXPECTED. */
5800
5801#define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
5802 SELFTEST_BEGIN_STMT \
5803 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
5804 SELFTEST_END_STMT
5805
757bf1df
DM
5806/* Implementation detail of ASSERT_DUMP_EQ. */
5807
5808static void
5809assert_dump_eq (const location &loc,
5810 const region_model &model,
5811 bool summarize,
5812 const char *expected)
5813{
5814 auto_fix_quotes sentinel;
5815 pretty_printer pp;
5816 pp_format_decoder (&pp) = default_tree_printer;
808f4dfe
DM
5817
5818 model.dump_to_pp (&pp, summarize, true);
757bf1df
DM
5819 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
5820}
5821
5822/* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
5823
5824#define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
5825 SELFTEST_BEGIN_STMT \
5826 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
5827 SELFTEST_END_STMT
5828
5829/* Smoketest for region_model::dump_to_pp. */
5830
5831static void
5832test_dump ()
5833{
808f4dfe
DM
5834 region_model_manager mgr;
5835 region_model model (&mgr);
757bf1df
DM
5836
5837 ASSERT_DUMP_EQ (model, false,
808f4dfe
DM
5838 "stack depth: 0\n"
5839 "m_called_unknown_fn: FALSE\n"
5840 "constraint_manager:\n"
5841 " equiv classes:\n"
5842 " constraints:\n");
5843 ASSERT_DUMP_EQ (model, true,
5844 "stack depth: 0\n"
5845 "m_called_unknown_fn: FALSE\n"
5846 "constraint_manager:\n"
757bf1df
DM
5847 " equiv classes:\n"
5848 " constraints:\n");
757bf1df
DM
5849}
5850
884d9141
DM
5851/* Helper function for selftests. Create a struct or union type named NAME,
5852 with the fields given by the FIELD_DECLS in FIELDS.
5853 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
5854 create a UNION_TYPE. */
5855
5856static tree
5857make_test_compound_type (const char *name, bool is_struct,
5858 const auto_vec<tree> *fields)
5859{
5860 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
5861 TYPE_NAME (t) = get_identifier (name);
5862 TYPE_SIZE (t) = 0;
5863
5864 tree fieldlist = NULL;
5865 int i;
5866 tree field;
5867 FOR_EACH_VEC_ELT (*fields, i, field)
5868 {
5869 gcc_assert (TREE_CODE (field) == FIELD_DECL);
5870 DECL_CONTEXT (field) = t;
5871 fieldlist = chainon (field, fieldlist);
5872 }
5873 fieldlist = nreverse (fieldlist);
5874 TYPE_FIELDS (t) = fieldlist;
5875
5876 layout_type (t);
5877 return t;
5878}
5879
a96f1c38
DM
5880/* Selftest fixture for creating the type "struct coord {int x; int y; };". */
5881
5882struct coord_test
5883{
5884 coord_test ()
5885 {
5886 auto_vec<tree> fields;
5887 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
5888 get_identifier ("x"), integer_type_node);
5889 fields.safe_push (m_x_field);
5890 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
5891 get_identifier ("y"), integer_type_node);
5892 fields.safe_push (m_y_field);
5893 m_coord_type = make_test_compound_type ("coord", true, &fields);
5894 }
5895
5896 tree m_x_field;
5897 tree m_y_field;
5898 tree m_coord_type;
5899};
5900
808f4dfe 5901/* Verify usage of a struct. */
884d9141
DM
5902
5903static void
808f4dfe 5904test_struct ()
884d9141 5905{
a96f1c38
DM
5906 coord_test ct;
5907
5908 tree c = build_global_decl ("c", ct.m_coord_type);
5909 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
5910 c, ct.m_x_field, NULL_TREE);
5911 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
5912 c, ct.m_y_field, NULL_TREE);
884d9141
DM
5913
5914 tree int_17 = build_int_cst (integer_type_node, 17);
5915 tree int_m3 = build_int_cst (integer_type_node, -3);
5916
808f4dfe
DM
5917 region_model_manager mgr;
5918 region_model model (&mgr);
884d9141
DM
5919 model.set_value (c_x, int_17, NULL);
5920 model.set_value (c_y, int_m3, NULL);
5921
808f4dfe
DM
5922 /* Verify get_offset for "c.x". */
5923 {
5924 const region *c_x_reg = model.get_lvalue (c_x, NULL);
7a6564c9 5925 region_offset offset = c_x_reg->get_offset (&mgr);
808f4dfe
DM
5926 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
5927 ASSERT_EQ (offset.get_bit_offset (), 0);
5928 }
5929
5930 /* Verify get_offset for "c.y". */
5931 {
5932 const region *c_y_reg = model.get_lvalue (c_y, NULL);
7a6564c9 5933 region_offset offset = c_y_reg->get_offset (&mgr);
808f4dfe
DM
5934 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
5935 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
5936 }
884d9141
DM
5937}
5938
808f4dfe 5939/* Verify usage of an array element. */
884d9141
DM
5940
5941static void
808f4dfe 5942test_array_1 ()
884d9141
DM
5943{
5944 tree tlen = size_int (10);
5945 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
5946
5947 tree a = build_global_decl ("a", arr_type);
5948
808f4dfe
DM
5949 region_model_manager mgr;
5950 region_model model (&mgr);
884d9141
DM
5951 tree int_0 = build_int_cst (integer_type_node, 0);
5952 tree a_0 = build4 (ARRAY_REF, char_type_node,
5953 a, int_0, NULL_TREE, NULL_TREE);
5954 tree char_A = build_int_cst (char_type_node, 'A');
5955 model.set_value (a_0, char_A, NULL);
884d9141
DM
5956}
5957
90f7c300
DM
5958/* Verify that region_model::get_representative_tree works as expected. */
5959
5960static void
5961test_get_representative_tree ()
5962{
808f4dfe
DM
5963 region_model_manager mgr;
5964
90f7c300
DM
5965 /* STRING_CST. */
5966 {
5967 tree string_cst = build_string (4, "foo");
808f4dfe
DM
5968 region_model m (&mgr);
5969 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
5970 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
5971 ASSERT_EQ (rep, string_cst);
5972 }
5973
5974 /* String literal. */
5975 {
5976 tree string_cst_ptr = build_string_literal (4, "foo");
808f4dfe
DM
5977 region_model m (&mgr);
5978 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
5979 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
5980 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
5981 }
808f4dfe
DM
5982
5983 /* Value of an element within an array. */
5984 {
5985 tree tlen = size_int (10);
5986 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
5987 tree a = build_global_decl ("a", arr_type);
5988 placeholder_svalue test_sval (char_type_node, "test value");
5989
5990 /* Value of a[3]. */
5991 {
5992 test_region_model_context ctxt;
5993 region_model model (&mgr);
5994 tree int_3 = build_int_cst (integer_type_node, 3);
5995 tree a_3 = build4 (ARRAY_REF, char_type_node,
5996 a, int_3, NULL_TREE, NULL_TREE);
5997 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
5998 model.set_value (a_3_reg, &test_sval, &ctxt);
5999 tree rep = model.get_representative_tree (&test_sval);
6000 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
6001 }
6002
6003 /* Value of a[0]. */
6004 {
6005 test_region_model_context ctxt;
6006 region_model model (&mgr);
6007 tree idx = build_int_cst (integer_type_node, 0);
6008 tree a_0 = build4 (ARRAY_REF, char_type_node,
6009 a, idx, NULL_TREE, NULL_TREE);
6010 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
6011 model.set_value (a_0_reg, &test_sval, &ctxt);
6012 tree rep = model.get_representative_tree (&test_sval);
6013 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
6014 }
6015 }
6016
6017 /* Value of a field within a struct. */
6018 {
6019 coord_test ct;
6020
6021 tree c = build_global_decl ("c", ct.m_coord_type);
6022 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6023 c, ct.m_x_field, NULL_TREE);
6024 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6025 c, ct.m_y_field, NULL_TREE);
6026
6027 test_region_model_context ctxt;
6028
6029 /* Value of initial field. */
6030 {
6031 region_model m (&mgr);
6032 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
6033 placeholder_svalue test_sval_x (integer_type_node, "test x val");
6034 m.set_value (c_x_reg, &test_sval_x, &ctxt);
6035 tree rep = m.get_representative_tree (&test_sval_x);
6036 ASSERT_DUMP_TREE_EQ (rep, "c.x");
6037 }
6038
6039 /* Value of non-initial field. */
6040 {
6041 region_model m (&mgr);
6042 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
6043 placeholder_svalue test_sval_y (integer_type_node, "test y val");
6044 m.set_value (c_y_reg, &test_sval_y, &ctxt);
6045 tree rep = m.get_representative_tree (&test_sval_y);
6046 ASSERT_DUMP_TREE_EQ (rep, "c.y");
6047 }
6048 }
90f7c300
DM
6049}
6050
757bf1df 6051/* Verify that calling region_model::get_rvalue repeatedly on the same
808f4dfe 6052 tree constant retrieves the same svalue *. */
757bf1df
DM
6053
6054static void
6055test_unique_constants ()
6056{
6057 tree int_0 = build_int_cst (integer_type_node, 0);
6058 tree int_42 = build_int_cst (integer_type_node, 42);
6059
6060 test_region_model_context ctxt;
808f4dfe
DM
6061 region_model_manager mgr;
6062 region_model model (&mgr);
757bf1df
DM
6063 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
6064 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
6065 model.get_rvalue (int_42, &ctxt));
6066 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
6067 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
757bf1df 6068
808f4dfe
DM
6069 /* A "(const int)42" will be a different tree from "(int)42)"... */
6070 tree const_int_type_node
6071 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
6072 tree const_int_42 = build_int_cst (const_int_type_node, 42);
6073 ASSERT_NE (int_42, const_int_42);
6074 /* It should have a different const_svalue. */
6075 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
6076 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
6077 ASSERT_NE (int_42_sval, const_int_42_sval);
6078 /* But they should compare as equal. */
6079 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
6080 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
757bf1df
DM
6081}
6082
808f4dfe
DM
6083/* Verify that each type gets its own singleton unknown_svalue within a
6084 region_model_manager, and that NULL_TREE gets its own singleton. */
757bf1df
DM
6085
6086static void
808f4dfe 6087test_unique_unknowns ()
757bf1df 6088{
808f4dfe
DM
6089 region_model_manager mgr;
6090 const svalue *unknown_int
6091 = mgr.get_or_create_unknown_svalue (integer_type_node);
6092 /* Repeated calls with the same type should get the same "unknown"
6093 svalue. */
6094 const svalue *unknown_int_2
6095 = mgr.get_or_create_unknown_svalue (integer_type_node);
6096 ASSERT_EQ (unknown_int, unknown_int_2);
757bf1df 6097
808f4dfe
DM
6098 /* Different types (or the NULL type) should have different
6099 unknown_svalues. */
6100 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
6101 ASSERT_NE (unknown_NULL_type, unknown_int);
757bf1df 6102
808f4dfe
DM
6103 /* Repeated calls with NULL for the type should get the same "unknown"
6104 svalue. */
6105 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
6106 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
757bf1df
DM
6107}
6108
808f4dfe 6109/* Verify that initial_svalue are handled as expected. */
757bf1df 6110
808f4dfe
DM
6111static void
6112test_initial_svalue_folding ()
757bf1df 6113{
808f4dfe
DM
6114 region_model_manager mgr;
6115 tree x = build_global_decl ("x", integer_type_node);
6116 tree y = build_global_decl ("y", integer_type_node);
757bf1df 6117
808f4dfe
DM
6118 test_region_model_context ctxt;
6119 region_model model (&mgr);
6120 const svalue *x_init = model.get_rvalue (x, &ctxt);
6121 const svalue *y_init = model.get_rvalue (y, &ctxt);
6122 ASSERT_NE (x_init, y_init);
6123 const region *x_reg = model.get_lvalue (x, &ctxt);
6124 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
757bf1df 6125
808f4dfe 6126}
757bf1df 6127
808f4dfe 6128/* Verify that unary ops are folded as expected. */
757bf1df
DM
6129
6130static void
808f4dfe 6131test_unaryop_svalue_folding ()
757bf1df 6132{
808f4dfe 6133 region_model_manager mgr;
757bf1df
DM
6134 tree x = build_global_decl ("x", integer_type_node);
6135 tree y = build_global_decl ("y", integer_type_node);
6136
808f4dfe
DM
6137 test_region_model_context ctxt;
6138 region_model model (&mgr);
6139 const svalue *x_init = model.get_rvalue (x, &ctxt);
6140 const svalue *y_init = model.get_rvalue (y, &ctxt);
6141 const region *x_reg = model.get_lvalue (x, &ctxt);
6142 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
6143
6144 /* "(int)x" -> "x". */
6145 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
6146
6147 /* "(void *)x" -> something other than "x". */
6148 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
6149
6150 /* "!(x == y)" -> "x != y". */
6151 ASSERT_EQ (mgr.get_or_create_unaryop
6152 (boolean_type_node, TRUTH_NOT_EXPR,
6153 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
6154 x_init, y_init)),
6155 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
6156 x_init, y_init));
6157 /* "!(x > y)" -> "x <= y". */
6158 ASSERT_EQ (mgr.get_or_create_unaryop
6159 (boolean_type_node, TRUTH_NOT_EXPR,
6160 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
6161 x_init, y_init)),
6162 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
6163 x_init, y_init));
6164}
6165
6166/* Verify that binops on constant svalues are folded. */
757bf1df 6167
808f4dfe
DM
6168static void
6169test_binop_svalue_folding ()
6170{
6171#define NUM_CSTS 10
6172 tree cst_int[NUM_CSTS];
6173 region_model_manager mgr;
6174 const svalue *cst_sval[NUM_CSTS];
6175 for (int i = 0; i < NUM_CSTS; i++)
6176 {
6177 cst_int[i] = build_int_cst (integer_type_node, i);
6178 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
6179 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
6180 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
6181 }
757bf1df 6182
808f4dfe
DM
6183 for (int i = 0; i < NUM_CSTS; i++)
6184 for (int j = 0; j < NUM_CSTS; j++)
6185 {
6186 if (i != j)
6187 ASSERT_NE (cst_sval[i], cst_sval[j]);
6188 if (i + j < NUM_CSTS)
6189 {
6190 const svalue *sum
6191 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6192 cst_sval[i], cst_sval[j]);
6193 ASSERT_EQ (sum, cst_sval[i + j]);
6194 }
6195 if (i - j >= 0)
6196 {
6197 const svalue *difference
6198 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
6199 cst_sval[i], cst_sval[j]);
6200 ASSERT_EQ (difference, cst_sval[i - j]);
6201 }
6202 if (i * j < NUM_CSTS)
6203 {
6204 const svalue *product
6205 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6206 cst_sval[i], cst_sval[j]);
6207 ASSERT_EQ (product, cst_sval[i * j]);
6208 }
6209 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
6210 cst_sval[i], cst_sval[j]);
6211 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
6212 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
6213 cst_sval[i], cst_sval[j]);
6214 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
6215 // etc
6216 }
757bf1df 6217
808f4dfe 6218 tree x = build_global_decl ("x", integer_type_node);
757bf1df 6219
808f4dfe
DM
6220 test_region_model_context ctxt;
6221 region_model model (&mgr);
6222 const svalue *x_init = model.get_rvalue (x, &ctxt);
6223
6224 /* PLUS_EXPR folding. */
6225 const svalue *x_init_plus_zero
6226 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6227 x_init, cst_sval[0]);
6228 ASSERT_EQ (x_init_plus_zero, x_init);
6229 const svalue *zero_plus_x_init
6230 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6231 cst_sval[0], x_init);
6232 ASSERT_EQ (zero_plus_x_init, x_init);
6233
6234 /* MULT_EXPR folding. */
6235 const svalue *x_init_times_zero
6236 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6237 x_init, cst_sval[0]);
6238 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
6239 const svalue *zero_times_x_init
6240 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6241 cst_sval[0], x_init);
6242 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
6243
6244 const svalue *x_init_times_one
6245 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6246 x_init, cst_sval[1]);
6247 ASSERT_EQ (x_init_times_one, x_init);
6248 const svalue *one_times_x_init
6249 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6250 cst_sval[1], x_init);
6251 ASSERT_EQ (one_times_x_init, x_init);
6252
6253 // etc
6254 // TODO: do we want to use the match-and-simplify DSL for this?
6255
6256 /* Verify that binops put any constants on the RHS. */
6257 const svalue *four_times_x_init
6258 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6259 cst_sval[4], x_init);
6260 const svalue *x_init_times_four
6261 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6262 x_init, cst_sval[4]);
6263 ASSERT_EQ (four_times_x_init, x_init_times_four);
6264 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
6265 ASSERT_EQ (binop->get_op (), MULT_EXPR);
6266 ASSERT_EQ (binop->get_arg0 (), x_init);
6267 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
6268
6269 /* Verify that ((x + 1) + 1) == (x + 2). */
6270 const svalue *x_init_plus_one
6271 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6272 x_init, cst_sval[1]);
6273 const svalue *x_init_plus_two
6274 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6275 x_init, cst_sval[2]);
6276 const svalue *x_init_plus_one_plus_one
6277 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6278 x_init_plus_one, cst_sval[1]);
6279 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
4f34f8cc
DM
6280
6281 /* Verify various binops on booleans. */
6282 {
6283 const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1);
6284 const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0);
6285 const svalue *sval_unknown
6286 = mgr.get_or_create_unknown_svalue (boolean_type_node);
6287 const placeholder_svalue sval_placeholder (boolean_type_node, "v");
6288 for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR})
6289 {
6290 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6291 sval_true, sval_unknown),
6292 sval_true);
6293 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6294 sval_false, sval_unknown),
6295 sval_unknown);
6296 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6297 sval_false, &sval_placeholder),
6298 &sval_placeholder);
6299 }
6300 for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR})
6301 {
6302 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6303 sval_false, sval_unknown),
6304 sval_false);
6305 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6306 sval_true, sval_unknown),
6307 sval_unknown);
6308 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6309 sval_true, &sval_placeholder),
6310 &sval_placeholder);
6311 }
6312 }
808f4dfe
DM
6313}
6314
6315/* Verify that sub_svalues are folded as expected. */
757bf1df 6316
808f4dfe
DM
6317static void
6318test_sub_svalue_folding ()
6319{
6320 coord_test ct;
6321 tree c = build_global_decl ("c", ct.m_coord_type);
6322 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6323 c, ct.m_x_field, NULL_TREE);
757bf1df 6324
808f4dfe
DM
6325 region_model_manager mgr;
6326 region_model model (&mgr);
6327 test_region_model_context ctxt;
6328 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
757bf1df 6329
808f4dfe
DM
6330 /* Verify that sub_svalue of "unknown" simply
6331 yields an unknown. */
757bf1df 6332
808f4dfe
DM
6333 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
6334 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
6335 unknown, c_x_reg);
6336 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
6337 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
757bf1df
DM
6338}
6339
f09b9955
DM
6340/* Get BIT within VAL as a symbolic value within MGR. */
6341
6342static const svalue *
6343get_bit (region_model_manager *mgr,
6344 bit_offset_t bit,
6345 unsigned HOST_WIDE_INT val)
6346{
6347 const svalue *inner_svalue
6348 = mgr->get_or_create_int_cst (unsigned_type_node, val);
6349 return mgr->get_or_create_bits_within (boolean_type_node,
6350 bit_range (bit, 1),
6351 inner_svalue);
6352}
6353
6354/* Verify that bits_within_svalues are folded as expected. */
6355
6356static void
6357test_bits_within_svalue_folding ()
6358{
6359 region_model_manager mgr;
6360
6361 const svalue *zero = mgr.get_or_create_int_cst (boolean_type_node, 0);
6362 const svalue *one = mgr.get_or_create_int_cst (boolean_type_node, 1);
6363
6364 {
6365 const unsigned val = 0x0000;
6366 for (unsigned bit = 0; bit < 16; bit++)
6367 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
6368 }
6369
6370 {
6371 const unsigned val = 0x0001;
6372 ASSERT_EQ (get_bit (&mgr, 0, val), one);
6373 for (unsigned bit = 1; bit < 16; bit++)
6374 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
6375 }
6376
6377 {
6378 const unsigned val = 0x8000;
6379 for (unsigned bit = 0; bit < 15; bit++)
6380 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
6381 ASSERT_EQ (get_bit (&mgr, 15, val), one);
6382 }
6383
6384 {
6385 const unsigned val = 0xFFFF;
6386 for (unsigned bit = 0; bit < 16; bit++)
6387 ASSERT_EQ (get_bit (&mgr, bit, val), one);
6388 }
6389}
6390
808f4dfe 6391/* Test that region::descendent_of_p works as expected. */
757bf1df
DM
6392
6393static void
808f4dfe 6394test_descendent_of_p ()
757bf1df 6395{
808f4dfe
DM
6396 region_model_manager mgr;
6397 const region *stack = mgr.get_stack_region ();
6398 const region *heap = mgr.get_heap_region ();
6399 const region *code = mgr.get_code_region ();
6400 const region *globals = mgr.get_globals_region ();
757bf1df 6401
808f4dfe
DM
6402 /* descendent_of_p should return true when used on the region itself. */
6403 ASSERT_TRUE (stack->descendent_of_p (stack));
6404 ASSERT_FALSE (stack->descendent_of_p (heap));
6405 ASSERT_FALSE (stack->descendent_of_p (code));
6406 ASSERT_FALSE (stack->descendent_of_p (globals));
757bf1df 6407
808f4dfe
DM
6408 tree x = build_global_decl ("x", integer_type_node);
6409 const region *x_reg = mgr.get_region_for_global (x);
6410 ASSERT_TRUE (x_reg->descendent_of_p (globals));
757bf1df 6411
808f4dfe
DM
6412 /* A cast_region should be a descendent of the original region. */
6413 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
6414 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
757bf1df
DM
6415}
6416
391512ad
DM
6417/* Verify that bit_range_region works as expected. */
6418
6419static void
6420test_bit_range_regions ()
6421{
6422 tree x = build_global_decl ("x", integer_type_node);
6423 region_model_manager mgr;
6424 const region *x_reg = mgr.get_region_for_global (x);
6425 const region *byte0
6426 = mgr.get_bit_range (x_reg, char_type_node, bit_range (0, 8));
6427 const region *byte1
6428 = mgr.get_bit_range (x_reg, char_type_node, bit_range (8, 8));
6429 ASSERT_TRUE (byte0->descendent_of_p (x_reg));
6430 ASSERT_TRUE (byte1->descendent_of_p (x_reg));
6431 ASSERT_NE (byte0, byte1);
6432}
6433
757bf1df
DM
6434/* Verify that simple assignments work as expected. */
6435
6436static void
6437test_assignment ()
6438{
6439 tree int_0 = build_int_cst (integer_type_node, 0);
6440 tree x = build_global_decl ("x", integer_type_node);
6441 tree y = build_global_decl ("y", integer_type_node);
6442
6443 /* "x == 0", then use of y, then "y = 0;". */
808f4dfe
DM
6444 region_model_manager mgr;
6445 region_model model (&mgr);
757bf1df
DM
6446 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
6447 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
6448 model.set_value (model.get_lvalue (y, NULL),
6449 model.get_rvalue (int_0, NULL),
6450 NULL);
6451 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
6452 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
757bf1df
DM
6453}
6454
a96f1c38
DM
6455/* Verify that compound assignments work as expected. */
6456
6457static void
6458test_compound_assignment ()
6459{
6460 coord_test ct;
6461
6462 tree c = build_global_decl ("c", ct.m_coord_type);
6463 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6464 c, ct.m_x_field, NULL_TREE);
6465 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6466 c, ct.m_y_field, NULL_TREE);
6467 tree d = build_global_decl ("d", ct.m_coord_type);
6468 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6469 d, ct.m_x_field, NULL_TREE);
6470 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6471 d, ct.m_y_field, NULL_TREE);
6472
6473 tree int_17 = build_int_cst (integer_type_node, 17);
6474 tree int_m3 = build_int_cst (integer_type_node, -3);
6475
808f4dfe
DM
6476 region_model_manager mgr;
6477 region_model model (&mgr);
a96f1c38
DM
6478 model.set_value (c_x, int_17, NULL);
6479 model.set_value (c_y, int_m3, NULL);
6480
a96f1c38 6481 /* Copy c to d. */
13ad6d9f
DM
6482 const svalue *sval = model.get_rvalue (c, NULL);
6483 model.set_value (model.get_lvalue (d, NULL), sval, NULL);
6484
a96f1c38
DM
6485 /* Check that the fields have the same svalues. */
6486 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
6487 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
6488}
6489
757bf1df
DM
6490/* Verify the details of pushing and popping stack frames. */
6491
6492static void
6493test_stack_frames ()
6494{
6495 tree int_42 = build_int_cst (integer_type_node, 42);
6496 tree int_10 = build_int_cst (integer_type_node, 10);
6497 tree int_5 = build_int_cst (integer_type_node, 5);
6498 tree int_0 = build_int_cst (integer_type_node, 0);
6499
6500 auto_vec <tree> param_types;
6501 tree parent_fndecl = make_fndecl (integer_type_node,
6502 "parent_fn",
6503 param_types);
6504 allocate_struct_function (parent_fndecl, true);
6505
6506 tree child_fndecl = make_fndecl (integer_type_node,
6507 "child_fn",
6508 param_types);
6509 allocate_struct_function (child_fndecl, true);
6510
6511 /* "a" and "b" in the parent frame. */
6512 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6513 get_identifier ("a"),
6514 integer_type_node);
4cebae09 6515 DECL_CONTEXT (a) = parent_fndecl;
757bf1df
DM
6516 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6517 get_identifier ("b"),
6518 integer_type_node);
4cebae09 6519 DECL_CONTEXT (b) = parent_fndecl;
757bf1df
DM
6520 /* "x" and "y" in a child frame. */
6521 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6522 get_identifier ("x"),
6523 integer_type_node);
4cebae09 6524 DECL_CONTEXT (x) = child_fndecl;
757bf1df
DM
6525 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6526 get_identifier ("y"),
6527 integer_type_node);
4cebae09 6528 DECL_CONTEXT (y) = child_fndecl;
757bf1df
DM
6529
6530 /* "p" global. */
6531 tree p = build_global_decl ("p", ptr_type_node);
6532
6533 /* "q" global. */
6534 tree q = build_global_decl ("q", ptr_type_node);
6535
808f4dfe 6536 region_model_manager mgr;
757bf1df 6537 test_region_model_context ctxt;
808f4dfe 6538 region_model model (&mgr);
757bf1df
DM
6539
6540 /* Push stack frame for "parent_fn". */
808f4dfe
DM
6541 const region *parent_frame_reg
6542 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl),
6543 NULL, &ctxt);
6544 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
6545 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
6546 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
6547 model.set_value (a_in_parent_reg,
6548 model.get_rvalue (int_42, &ctxt),
6549 &ctxt);
6550 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
6551
757bf1df
DM
6552 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
6553 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
6554 tristate (tristate::TS_TRUE));
6555
6556 /* Push stack frame for "child_fn". */
808f4dfe 6557 const region *child_frame_reg
757bf1df 6558 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
808f4dfe
DM
6559 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
6560 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
6561 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
6562 model.set_value (x_in_child_reg,
6563 model.get_rvalue (int_0, &ctxt),
6564 &ctxt);
6565 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
6566
757bf1df
DM
6567 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
6568 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
6569 tristate (tristate::TS_TRUE));
6570
6571 /* Point a global pointer at a local in the child frame: p = &x. */
808f4dfe
DM
6572 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
6573 model.set_value (p_in_globals_reg,
6574 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
757bf1df 6575 &ctxt);
808f4dfe 6576 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
757bf1df
DM
6577
6578 /* Point another global pointer at p: q = &p. */
808f4dfe
DM
6579 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
6580 model.set_value (q_in_globals_reg,
6581 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
757bf1df
DM
6582 &ctxt);
6583
808f4dfe
DM
6584 /* Test region::descendent_of_p. */
6585 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
6586 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
6587 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
757bf1df
DM
6588
6589 /* Pop the "child_fn" frame from the stack. */
808f4dfe
DM
6590 model.pop_frame (NULL, NULL, &ctxt);
6591 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
6592 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
757bf1df
DM
6593
6594 /* Verify that p (which was pointing at the local "x" in the popped
6595 frame) has been poisoned. */
33255ad3 6596 const svalue *new_p_sval = model.get_rvalue (p, NULL);
757bf1df
DM
6597 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
6598 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
6599 POISON_KIND_POPPED_STACK);
6600
6601 /* Verify that q still points to p, in spite of the region
6602 renumbering. */
808f4dfe 6603 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
757bf1df 6604 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
5932dd35 6605 ASSERT_EQ (new_q_sval->maybe_get_region (),
757bf1df
DM
6606 model.get_lvalue (p, &ctxt));
6607
6608 /* Verify that top of stack has been updated. */
808f4dfe 6609 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
757bf1df
DM
6610
6611 /* Verify locals in parent frame. */
6612 /* Verify "a" still has its value. */
808f4dfe 6613 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
757bf1df
DM
6614 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
6615 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
6616 int_42);
6617 /* Verify "b" still has its constraint. */
6618 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
6619 tristate (tristate::TS_TRUE));
6620}
6621
6622/* Verify that get_representative_path_var works as expected, that
808f4dfe 6623 we can map from regions to parms and back within a recursive call
757bf1df
DM
6624 stack. */
6625
6626static void
6627test_get_representative_path_var ()
6628{
6629 auto_vec <tree> param_types;
6630 tree fndecl = make_fndecl (integer_type_node,
6631 "factorial",
6632 param_types);
6633 allocate_struct_function (fndecl, true);
6634
6635 /* Parm "n". */
6636 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6637 get_identifier ("n"),
6638 integer_type_node);
4cebae09 6639 DECL_CONTEXT (n) = fndecl;
757bf1df 6640
808f4dfe
DM
6641 region_model_manager mgr;
6642 test_region_model_context ctxt;
6643 region_model model (&mgr);
757bf1df
DM
6644
6645 /* Push 5 stack frames for "factorial", each with a param */
808f4dfe
DM
6646 auto_vec<const region *> parm_regs;
6647 auto_vec<const svalue *> parm_svals;
757bf1df
DM
6648 for (int depth = 0; depth < 5; depth++)
6649 {
808f4dfe
DM
6650 const region *frame_n_reg
6651 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
6652 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
6653 parm_regs.safe_push (parm_n_reg);
757bf1df 6654
808f4dfe
DM
6655 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
6656 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
6657 parm_svals.safe_push (sval_n);
757bf1df
DM
6658 }
6659
6660 /* Verify that we can recognize that the regions are the parms,
6661 at every depth. */
6662 for (int depth = 0; depth < 5; depth++)
6663 {
808f4dfe
DM
6664 {
6665 svalue_set visited;
6666 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
6667 &visited),
6668 path_var (n, depth + 1));
6669 }
757bf1df
DM
6670 /* ...and that we can lookup lvalues for locals for all frames,
6671 not just the top. */
6672 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
808f4dfe 6673 parm_regs[depth]);
757bf1df 6674 /* ...and that we can locate the svalues. */
808f4dfe
DM
6675 {
6676 svalue_set visited;
6677 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
6678 &visited),
6679 path_var (n, depth + 1));
6680 }
757bf1df
DM
6681 }
6682}
6683
808f4dfe 6684/* Ensure that region_model::operator== works as expected. */
757bf1df
DM
6685
6686static void
808f4dfe 6687test_equality_1 ()
757bf1df 6688{
808f4dfe
DM
6689 tree int_42 = build_int_cst (integer_type_node, 42);
6690 tree int_17 = build_int_cst (integer_type_node, 17);
757bf1df 6691
808f4dfe
DM
6692/* Verify that "empty" region_model instances are equal to each other. */
6693 region_model_manager mgr;
6694 region_model model0 (&mgr);
6695 region_model model1 (&mgr);
757bf1df 6696 ASSERT_EQ (model0, model1);
808f4dfe
DM
6697
6698 /* Verify that setting state in model1 makes the models non-equal. */
6699 tree x = build_global_decl ("x", integer_type_node);
6700 model0.set_value (x, int_42, NULL);
6701 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
6702 ASSERT_NE (model0, model1);
6703
6704 /* Verify the copy-ctor. */
6705 region_model model2 (model0);
6706 ASSERT_EQ (model0, model2);
6707 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
6708 ASSERT_NE (model1, model2);
6709
6710 /* Verify that models obtained from copy-ctor are independently editable
6711 w/o affecting the original model. */
6712 model2.set_value (x, int_17, NULL);
6713 ASSERT_NE (model0, model2);
6714 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
6715 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
757bf1df
DM
6716}
6717
6718/* Verify that region models for
6719 x = 42; y = 113;
6720 and
6721 y = 113; x = 42;
808f4dfe 6722 are equal. */
757bf1df
DM
6723
6724static void
6725test_canonicalization_2 ()
6726{
6727 tree int_42 = build_int_cst (integer_type_node, 42);
6728 tree int_113 = build_int_cst (integer_type_node, 113);
6729 tree x = build_global_decl ("x", integer_type_node);
6730 tree y = build_global_decl ("y", integer_type_node);
6731
808f4dfe
DM
6732 region_model_manager mgr;
6733 region_model model0 (&mgr);
757bf1df
DM
6734 model0.set_value (model0.get_lvalue (x, NULL),
6735 model0.get_rvalue (int_42, NULL),
6736 NULL);
6737 model0.set_value (model0.get_lvalue (y, NULL),
6738 model0.get_rvalue (int_113, NULL),
6739 NULL);
6740
808f4dfe 6741 region_model model1 (&mgr);
757bf1df
DM
6742 model1.set_value (model1.get_lvalue (y, NULL),
6743 model1.get_rvalue (int_113, NULL),
6744 NULL);
6745 model1.set_value (model1.get_lvalue (x, NULL),
6746 model1.get_rvalue (int_42, NULL),
6747 NULL);
6748
757bf1df
DM
6749 ASSERT_EQ (model0, model1);
6750}
6751
6752/* Verify that constraints for
6753 x > 3 && y > 42
6754 and
6755 y > 42 && x > 3
6756 are equal after canonicalization. */
6757
6758static void
6759test_canonicalization_3 ()
6760{
6761 tree int_3 = build_int_cst (integer_type_node, 3);
6762 tree int_42 = build_int_cst (integer_type_node, 42);
6763 tree x = build_global_decl ("x", integer_type_node);
6764 tree y = build_global_decl ("y", integer_type_node);
6765
808f4dfe
DM
6766 region_model_manager mgr;
6767 region_model model0 (&mgr);
757bf1df
DM
6768 model0.add_constraint (x, GT_EXPR, int_3, NULL);
6769 model0.add_constraint (y, GT_EXPR, int_42, NULL);
6770
808f4dfe 6771 region_model model1 (&mgr);
757bf1df
DM
6772 model1.add_constraint (y, GT_EXPR, int_42, NULL);
6773 model1.add_constraint (x, GT_EXPR, int_3, NULL);
6774
808f4dfe
DM
6775 model0.canonicalize ();
6776 model1.canonicalize ();
757bf1df
DM
6777 ASSERT_EQ (model0, model1);
6778}
6779
8c08c983
DM
6780/* Verify that we can canonicalize a model containing NaN and other real
6781 constants. */
6782
6783static void
6784test_canonicalization_4 ()
6785{
6786 auto_vec<tree> csts;
6787 append_interesting_constants (&csts);
6788
808f4dfe
DM
6789 region_model_manager mgr;
6790 region_model model (&mgr);
8c08c983 6791
3f207ab3 6792 for (tree cst : csts)
8c08c983
DM
6793 model.get_rvalue (cst, NULL);
6794
808f4dfe 6795 model.canonicalize ();
8c08c983
DM
6796}
6797
757bf1df
DM
6798/* Assert that if we have two region_model instances
6799 with values VAL_A and VAL_B for EXPR that they are
6800 mergable. Write the merged model to *OUT_MERGED_MODEL,
6801 and the merged svalue ptr to *OUT_MERGED_SVALUE.
6802 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
6803 for that region_model. */
6804
6805static void
6806assert_region_models_merge (tree expr, tree val_a, tree val_b,
808f4dfe
DM
6807 region_model *out_merged_model,
6808 const svalue **out_merged_svalue)
757bf1df 6809{
808f4dfe 6810 region_model_manager *mgr = out_merged_model->get_manager ();
bb8e93eb
DM
6811 program_point point (program_point::origin (*mgr));
6812 test_region_model_context ctxt;
808f4dfe
DM
6813 region_model model0 (mgr);
6814 region_model model1 (mgr);
757bf1df
DM
6815 if (val_a)
6816 model0.set_value (model0.get_lvalue (expr, &ctxt),
6817 model0.get_rvalue (val_a, &ctxt),
6818 &ctxt);
6819 if (val_b)
6820 model1.set_value (model1.get_lvalue (expr, &ctxt),
6821 model1.get_rvalue (val_b, &ctxt),
6822 &ctxt);
6823
6824 /* They should be mergeable. */
808f4dfe
DM
6825 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
6826 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
757bf1df
DM
6827}
6828
6829/* Verify that we can merge region_model instances. */
6830
6831static void
6832test_state_merging ()
6833{
6834 tree int_42 = build_int_cst (integer_type_node, 42);
6835 tree int_113 = build_int_cst (integer_type_node, 113);
6836 tree x = build_global_decl ("x", integer_type_node);
6837 tree y = build_global_decl ("y", integer_type_node);
6838 tree z = build_global_decl ("z", integer_type_node);
6839 tree p = build_global_decl ("p", ptr_type_node);
6840
6841 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
6842 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
6843
6844 auto_vec <tree> param_types;
6845 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
6846 allocate_struct_function (test_fndecl, true);
6847
6848 /* Param "a". */
6849 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6850 get_identifier ("a"),
6851 integer_type_node);
4cebae09 6852 DECL_CONTEXT (a) = test_fndecl;
757bf1df
DM
6853 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
6854
455f58ec
DM
6855 /* Param "q", a pointer. */
6856 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6857 get_identifier ("q"),
6858 ptr_type_node);
4cebae09 6859 DECL_CONTEXT (q) = test_fndecl;
455f58ec 6860
808f4dfe 6861 region_model_manager mgr;
bb8e93eb 6862 program_point point (program_point::origin (mgr));
808f4dfe 6863
757bf1df 6864 {
808f4dfe
DM
6865 region_model model0 (&mgr);
6866 region_model model1 (&mgr);
6867 region_model merged (&mgr);
757bf1df 6868 /* Verify empty models can be merged. */
808f4dfe 6869 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
6870 ASSERT_EQ (model0, merged);
6871 }
6872
6873 /* Verify that we can merge two contradictory constraints on the
6874 value for a global. */
6875 /* TODO: verify that the merged model doesn't have a value for
6876 the global */
6877 {
808f4dfe
DM
6878 region_model model0 (&mgr);
6879 region_model model1 (&mgr);
6880 region_model merged (&mgr);
757bf1df
DM
6881 test_region_model_context ctxt;
6882 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
6883 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe 6884 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
6885 ASSERT_NE (model0, merged);
6886 ASSERT_NE (model1, merged);
6887 }
6888
6889 /* Verify handling of a PARM_DECL. */
6890 {
6891 test_region_model_context ctxt;
808f4dfe
DM
6892 region_model model0 (&mgr);
6893 region_model model1 (&mgr);
757bf1df
DM
6894 ASSERT_EQ (model0.get_stack_depth (), 0);
6895 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
6896 ASSERT_EQ (model0.get_stack_depth (), 1);
757bf1df
DM
6897 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
6898
808f4dfe
DM
6899 placeholder_svalue test_sval (integer_type_node, "test sval");
6900 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
6901 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
757bf1df
DM
6902 ASSERT_EQ (model0, model1);
6903
757bf1df 6904 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
6905 region_model merged (&mgr);
6906 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 6907 ASSERT_EQ (model0, merged);
808f4dfe
DM
6908 /* In particular, "a" should have the placeholder value. */
6909 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
757bf1df
DM
6910 }
6911
6912 /* Verify handling of a global. */
6913 {
6914 test_region_model_context ctxt;
808f4dfe
DM
6915 region_model model0 (&mgr);
6916 region_model model1 (&mgr);
757bf1df 6917
808f4dfe
DM
6918 placeholder_svalue test_sval (integer_type_node, "test sval");
6919 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
6920 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
6921 ASSERT_EQ (model0, model1);
757bf1df
DM
6922
6923 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
6924 region_model merged (&mgr);
6925 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 6926 ASSERT_EQ (model0, merged);
808f4dfe
DM
6927 /* In particular, "x" should have the placeholder value. */
6928 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
757bf1df
DM
6929 }
6930
6931 /* Use global-handling to verify various combinations of values. */
6932
6933 /* Two equal constant values. */
6934 {
808f4dfe
DM
6935 region_model merged (&mgr);
6936 const svalue *merged_x_sval;
757bf1df
DM
6937 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
6938
6939 /* In particular, there should be a constant value for "x". */
6940 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
6941 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
6942 int_42);
6943 }
6944
6945 /* Two non-equal constant values. */
6946 {
808f4dfe
DM
6947 region_model merged (&mgr);
6948 const svalue *merged_x_sval;
757bf1df
DM
6949 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
6950
808f4dfe
DM
6951 /* In particular, there should be a "widening" value for "x". */
6952 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
757bf1df
DM
6953 }
6954
808f4dfe 6955 /* Initial and constant. */
757bf1df 6956 {
808f4dfe
DM
6957 region_model merged (&mgr);
6958 const svalue *merged_x_sval;
757bf1df
DM
6959 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
6960
6961 /* In particular, there should be an unknown value for "x". */
6962 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
6963 }
6964
808f4dfe 6965 /* Constant and initial. */
757bf1df 6966 {
808f4dfe
DM
6967 region_model merged (&mgr);
6968 const svalue *merged_x_sval;
757bf1df
DM
6969 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
6970
6971 /* In particular, there should be an unknown value for "x". */
6972 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
6973 }
6974
6975 /* Unknown and constant. */
6976 // TODO
6977
6978 /* Pointers: NULL and NULL. */
6979 // TODO
6980
6981 /* Pointers: NULL and non-NULL. */
6982 // TODO
6983
6984 /* Pointers: non-NULL and non-NULL: ptr to a local. */
6985 {
808f4dfe 6986 region_model model0 (&mgr);
757bf1df 6987 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
757bf1df
DM
6988 model0.set_value (model0.get_lvalue (p, NULL),
6989 model0.get_rvalue (addr_of_a, NULL), NULL);
6990
6991 region_model model1 (model0);
6992 ASSERT_EQ (model0, model1);
6993
6994 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
6995 region_model merged (&mgr);
6996 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
6997 ASSERT_EQ (model0, merged);
6998 }
6999
7000 /* Pointers: non-NULL and non-NULL: ptr to a global. */
7001 {
808f4dfe 7002 region_model merged (&mgr);
757bf1df 7003 /* p == &y in both input models. */
808f4dfe 7004 const svalue *merged_p_sval;
757bf1df
DM
7005 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
7006 &merged_p_sval);
7007
7008 /* We should get p == &y in the merged model. */
7009 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
808f4dfe
DM
7010 const region_svalue *merged_p_ptr
7011 = merged_p_sval->dyn_cast_region_svalue ();
7012 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
7013 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
757bf1df
DM
7014 }
7015
7016 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
7017 {
808f4dfe
DM
7018 region_model merged (&mgr);
7019 /* x == &y vs x == &z in the input models; these are actually casts
7020 of the ptrs to "int". */
7021 const svalue *merged_x_sval;
7022 // TODO:
757bf1df
DM
7023 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
7024 &merged_x_sval);
7025
7026 /* We should get x == unknown in the merged model. */
7027 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7028 }
7029
7030 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
7031 {
7032 test_region_model_context ctxt;
808f4dfe 7033 region_model model0 (&mgr);
9a2c9579 7034 tree size = build_int_cst (size_type_node, 1024);
808f4dfe 7035 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
b9365b93 7036 const region *new_reg
ce917b04 7037 = model0.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe 7038 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
757bf1df 7039 model0.set_value (model0.get_lvalue (p, &ctxt),
808f4dfe 7040 ptr_sval, &ctxt);
757bf1df
DM
7041
7042 region_model model1 (model0);
7043
7044 ASSERT_EQ (model0, model1);
7045
808f4dfe
DM
7046 region_model merged (&mgr);
7047 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7048
808f4dfe 7049 /* The merged model ought to be identical. */
757bf1df
DM
7050 ASSERT_EQ (model0, merged);
7051 }
7052
808f4dfe
DM
7053 /* Two regions sharing the same placeholder svalue should continue sharing
7054 it after self-merger. */
757bf1df
DM
7055 {
7056 test_region_model_context ctxt;
808f4dfe
DM
7057 region_model model0 (&mgr);
7058 placeholder_svalue placeholder_sval (integer_type_node, "test");
7059 model0.set_value (model0.get_lvalue (x, &ctxt),
7060 &placeholder_sval, &ctxt);
7061 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
757bf1df
DM
7062 region_model model1 (model0);
7063
7064 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7065 region_model merged (&mgr);
7066 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7067 ASSERT_EQ (model0, merged);
7068
7069 /* In particular, we should have x == y. */
7070 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
7071 tristate (tristate::TS_TRUE));
7072 }
7073
757bf1df 7074 {
808f4dfe
DM
7075 region_model model0 (&mgr);
7076 region_model model1 (&mgr);
757bf1df
DM
7077 test_region_model_context ctxt;
7078 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7079 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
808f4dfe
DM
7080 region_model merged (&mgr);
7081 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7082 }
7083
7084 {
808f4dfe
DM
7085 region_model model0 (&mgr);
7086 region_model model1 (&mgr);
757bf1df
DM
7087 test_region_model_context ctxt;
7088 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7089 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
7090 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe
DM
7091 region_model merged (&mgr);
7092 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7093 }
757bf1df
DM
7094
7095 // TODO: what can't we merge? need at least one such test
7096
7097 /* TODO: various things
7098 - heap regions
7099 - value merging:
7100 - every combination, but in particular
808f4dfe 7101 - pairs of regions
757bf1df
DM
7102 */
7103
7104 /* Views. */
7105 {
7106 test_region_model_context ctxt;
808f4dfe 7107 region_model model0 (&mgr);
757bf1df 7108
808f4dfe
DM
7109 const region *x_reg = model0.get_lvalue (x, &ctxt);
7110 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
757bf1df
DM
7111 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
7112
7113 region_model model1 (model0);
7114 ASSERT_EQ (model1, model0);
7115
7116 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7117 region_model merged (&mgr);
7118 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7119 }
455f58ec
DM
7120
7121 /* Verify that we can merge a model in which a local in an older stack
7122 frame points to a local in a more recent stack frame. */
7123 {
808f4dfe 7124 region_model model0 (&mgr);
455f58ec 7125 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
808f4dfe 7126 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
455f58ec
DM
7127
7128 /* Push a second frame. */
808f4dfe 7129 const region *reg_2nd_frame
455f58ec
DM
7130 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7131
7132 /* Have a pointer in the older frame point to a local in the
7133 more recent frame. */
808f4dfe
DM
7134 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
7135 model0.set_value (q_in_first_frame, sval_ptr, NULL);
455f58ec
DM
7136
7137 /* Verify that it's pointing at the newer frame. */
5932dd35 7138 const region *reg_pointee = sval_ptr->maybe_get_region ();
808f4dfe 7139 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
455f58ec 7140
808f4dfe 7141 model0.canonicalize ();
455f58ec
DM
7142
7143 region_model model1 (model0);
7144 ASSERT_EQ (model0, model1);
7145
7146 /* They should be mergeable, and the result should be the same
7147 (after canonicalization, at least). */
808f4dfe
DM
7148 region_model merged (&mgr);
7149 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7150 merged.canonicalize ();
455f58ec
DM
7151 ASSERT_EQ (model0, merged);
7152 }
7153
7154 /* Verify that we can merge a model in which a local points to a global. */
7155 {
808f4dfe 7156 region_model model0 (&mgr);
455f58ec
DM
7157 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7158 model0.set_value (model0.get_lvalue (q, NULL),
7159 model0.get_rvalue (addr_of_y, NULL), NULL);
7160
455f58ec
DM
7161 region_model model1 (model0);
7162 ASSERT_EQ (model0, model1);
7163
7164 /* They should be mergeable, and the result should be the same
7165 (after canonicalization, at least). */
808f4dfe
DM
7166 region_model merged (&mgr);
7167 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
455f58ec
DM
7168 ASSERT_EQ (model0, merged);
7169 }
757bf1df
DM
7170}
7171
7172/* Verify that constraints are correctly merged when merging region_model
7173 instances. */
7174
7175static void
7176test_constraint_merging ()
7177{
7178 tree int_0 = build_int_cst (integer_type_node, 0);
7179 tree int_5 = build_int_cst (integer_type_node, 5);
7180 tree x = build_global_decl ("x", integer_type_node);
7181 tree y = build_global_decl ("y", integer_type_node);
7182 tree z = build_global_decl ("z", integer_type_node);
7183 tree n = build_global_decl ("n", integer_type_node);
7184
808f4dfe 7185 region_model_manager mgr;
757bf1df
DM
7186 test_region_model_context ctxt;
7187
7188 /* model0: 0 <= (x == y) < n. */
808f4dfe 7189 region_model model0 (&mgr);
757bf1df
DM
7190 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
7191 model0.add_constraint (x, GE_EXPR, int_0, NULL);
7192 model0.add_constraint (x, LT_EXPR, n, NULL);
7193
7194 /* model1: z != 5 && (0 <= x < n). */
808f4dfe 7195 region_model model1 (&mgr);
757bf1df
DM
7196 model1.add_constraint (z, NE_EXPR, int_5, NULL);
7197 model1.add_constraint (x, GE_EXPR, int_0, NULL);
7198 model1.add_constraint (x, LT_EXPR, n, NULL);
7199
7200 /* They should be mergeable; the merged constraints should
7201 be: (0 <= x < n). */
bb8e93eb 7202 program_point point (program_point::origin (mgr));
808f4dfe
DM
7203 region_model merged (&mgr);
7204 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7205
7206 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
7207 tristate (tristate::TS_TRUE));
7208 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
7209 tristate (tristate::TS_TRUE));
7210
7211 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
7212 tristate (tristate::TS_UNKNOWN));
7213 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
7214 tristate (tristate::TS_UNKNOWN));
7215}
7216
808f4dfe
DM
7217/* Verify that widening_svalue::eval_condition_without_cm works as
7218 expected. */
7219
7220static void
7221test_widening_constraints ()
7222{
bb8e93eb 7223 region_model_manager mgr;
e6fe02d8 7224 function_point point (program_point::origin (mgr).get_function_point ());
808f4dfe
DM
7225 tree int_0 = build_int_cst (integer_type_node, 0);
7226 tree int_m1 = build_int_cst (integer_type_node, -1);
7227 tree int_1 = build_int_cst (integer_type_node, 1);
7228 tree int_256 = build_int_cst (integer_type_node, 256);
808f4dfe
DM
7229 test_region_model_context ctxt;
7230 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
7231 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
7232 const svalue *w_zero_then_one_sval
7233 = mgr.get_or_create_widening_svalue (integer_type_node, point,
7234 int_0_sval, int_1_sval);
7235 const widening_svalue *w_zero_then_one
7236 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
7237 ASSERT_EQ (w_zero_then_one->get_direction (),
7238 widening_svalue::DIR_ASCENDING);
7239 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
7240 tristate::TS_FALSE);
7241 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
7242 tristate::TS_FALSE);
7243 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
7244 tristate::TS_UNKNOWN);
7245 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
7246 tristate::TS_UNKNOWN);
7247
7248 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
7249 tristate::TS_FALSE);
7250 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
7251 tristate::TS_UNKNOWN);
7252 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
7253 tristate::TS_UNKNOWN);
7254 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
7255 tristate::TS_UNKNOWN);
7256
7257 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
7258 tristate::TS_TRUE);
7259 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
7260 tristate::TS_UNKNOWN);
7261 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
7262 tristate::TS_UNKNOWN);
7263 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
7264 tristate::TS_UNKNOWN);
7265
7266 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
7267 tristate::TS_TRUE);
7268 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
7269 tristate::TS_TRUE);
7270 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
7271 tristate::TS_UNKNOWN);
7272 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
7273 tristate::TS_UNKNOWN);
7274
7275 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
7276 tristate::TS_FALSE);
7277 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
7278 tristate::TS_UNKNOWN);
7279 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
7280 tristate::TS_UNKNOWN);
7281 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
7282 tristate::TS_UNKNOWN);
7283
7284 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
7285 tristate::TS_TRUE);
7286 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
7287 tristate::TS_UNKNOWN);
7288 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
7289 tristate::TS_UNKNOWN);
7290 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
7291 tristate::TS_UNKNOWN);
7292}
7293
7294/* Verify merging constraints for states simulating successive iterations
7295 of a loop.
7296 Simulate:
7297 for (i = 0; i < 256; i++)
7298 [...body...]
7299 i.e. this gimple:.
7300 i_15 = 0;
7301 goto <bb 4>;
7302
7303 <bb 4> :
7304 i_11 = PHI <i_15(2), i_23(3)>
7305 if (i_11 <= 255)
7306 goto <bb 3>;
7307 else
7308 goto [AFTER LOOP]
7309
7310 <bb 3> :
7311 [LOOP BODY]
7312 i_23 = i_11 + 1;
7313
7314 and thus these ops (and resultant states):
7315 i_11 = PHI()
7316 {i_11: 0}
7317 add_constraint (i_11 <= 255) [for the true edge]
7318 {i_11: 0} [constraint was a no-op]
7319 i_23 = i_11 + 1;
7320 {i_22: 1}
7321 i_11 = PHI()
7322 {i_11: WIDENED (at phi, 0, 1)}
7323 add_constraint (i_11 <= 255) [for the true edge]
7324 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
7325 i_23 = i_11 + 1;
7326 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
7327 i_11 = PHI(); merge with state at phi above
7328 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
7329 [changing meaning of "WIDENED" here]
7330 if (i_11 <= 255)
7331 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
7332 F: {i_11: 256}
7333 */
7334
7335static void
7336test_iteration_1 ()
7337{
bb8e93eb
DM
7338 region_model_manager mgr;
7339 program_point point (program_point::origin (mgr));
808f4dfe
DM
7340
7341 tree int_0 = build_int_cst (integer_type_node, 0);
7342 tree int_1 = build_int_cst (integer_type_node, 1);
7343 tree int_256 = build_int_cst (integer_type_node, 256);
7344 tree int_257 = build_int_cst (integer_type_node, 257);
7345 tree i = build_global_decl ("i", integer_type_node);
7346
808f4dfe
DM
7347 test_region_model_context ctxt;
7348
7349 /* model0: i: 0. */
7350 region_model model0 (&mgr);
7351 model0.set_value (i, int_0, &ctxt);
7352
7353 /* model1: i: 1. */
7354 region_model model1 (&mgr);
7355 model1.set_value (i, int_1, &ctxt);
7356
7357 /* Should merge "i" to a widened value. */
7358 region_model model2 (&mgr);
7359 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
7360 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
7361 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
7362 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
7363 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
7364
7365 /* Add constraint: i < 256 */
7366 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
7367 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
7368 tristate (tristate::TS_TRUE));
7369 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
7370 tristate (tristate::TS_TRUE));
7371
7372 /* Try merging with the initial state. */
7373 region_model model3 (&mgr);
7374 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
7375 /* Merging the merged value with the initial value should be idempotent,
7376 so that the analysis converges. */
7377 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
7378 /* Merger of 0 and a widening value with constraint < CST
7379 should retain the constraint, even though it was implicit
7380 for the 0 case. */
7381 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
7382 tristate (tristate::TS_TRUE));
7383 /* ...and we should have equality: the analysis should have converged. */
7384 ASSERT_EQ (model3, model2);
7385
7386 /* "i_23 = i_11 + 1;" */
7387 region_model model4 (model3);
7388 ASSERT_EQ (model4, model2);
7389 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
7390 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
7391 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
7392
7393 /* Try merging with the "i: 1" state. */
7394 region_model model5 (&mgr);
7395 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
7396 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
7397 ASSERT_EQ (model5, model4);
7398
7399 /* "i_11 = PHI();" merge with state at phi above.
7400 For i, we should have a merger of WIDENING with WIDENING + 1,
7401 and this should be WIDENING again. */
7402 region_model model6 (&mgr);
7403 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
7404 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
7405 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
7406
7407 ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257);
7408}
7409
6969ac30
DM
7410/* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
7411 all cast pointers to that region are also known to be non-NULL. */
7412
7413static void
7414test_malloc_constraints ()
7415{
808f4dfe
DM
7416 region_model_manager mgr;
7417 region_model model (&mgr);
6969ac30
DM
7418 tree p = build_global_decl ("p", ptr_type_node);
7419 tree char_star = build_pointer_type (char_type_node);
7420 tree q = build_global_decl ("q", char_star);
7421 tree null_ptr = build_int_cst (ptr_type_node, 0);
7422
808f4dfe 7423 const svalue *size_in_bytes
9a2c9579 7424 = mgr.get_or_create_unknown_svalue (size_type_node);
ce917b04
DM
7425 const region *reg
7426 = model.get_or_create_region_for_heap_alloc (size_in_bytes, NULL);
808f4dfe
DM
7427 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
7428 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
6969ac30
DM
7429 model.set_value (q, p, NULL);
7430
6969ac30
DM
7431 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
7432 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
7433 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
7434 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
7435
7436 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
7437
6969ac30
DM
7438 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
7439 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
7440 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
7441 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
7442}
7443
808f4dfe
DM
7444/* Smoketest of getting and setting the value of a variable. */
7445
7446static void
7447test_var ()
7448{
7449 /* "int i;" */
7450 tree i = build_global_decl ("i", integer_type_node);
7451
7452 tree int_17 = build_int_cst (integer_type_node, 17);
7453 tree int_m3 = build_int_cst (integer_type_node, -3);
7454
7455 region_model_manager mgr;
7456 region_model model (&mgr);
7457
7458 const region *i_reg = model.get_lvalue (i, NULL);
7459 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
7460
7461 /* Reading "i" should give a symbolic "initial value". */
7462 const svalue *sval_init = model.get_rvalue (i, NULL);
7463 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
7464 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
7465 /* ..and doing it again should give the same "initial value". */
7466 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
7467
7468 /* "i = 17;". */
7469 model.set_value (i, int_17, NULL);
7470 ASSERT_EQ (model.get_rvalue (i, NULL),
7471 model.get_rvalue (int_17, NULL));
7472
7473 /* "i = -3;". */
7474 model.set_value (i, int_m3, NULL);
7475 ASSERT_EQ (model.get_rvalue (i, NULL),
7476 model.get_rvalue (int_m3, NULL));
7477
7478 /* Verify get_offset for "i". */
7479 {
7a6564c9 7480 region_offset offset = i_reg->get_offset (&mgr);
808f4dfe
DM
7481 ASSERT_EQ (offset.get_base_region (), i_reg);
7482 ASSERT_EQ (offset.get_bit_offset (), 0);
7483 }
7484}
7485
7486static void
7487test_array_2 ()
7488{
7489 /* "int arr[10];" */
7490 tree tlen = size_int (10);
7491 tree arr_type
7492 = build_array_type (integer_type_node, build_index_type (tlen));
7493 tree arr = build_global_decl ("arr", arr_type);
7494
7495 /* "int i;" */
7496 tree i = build_global_decl ("i", integer_type_node);
7497
7498 tree int_0 = build_int_cst (integer_type_node, 0);
7499 tree int_1 = build_int_cst (integer_type_node, 1);
7500
7501 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
7502 arr, int_0, NULL_TREE, NULL_TREE);
7503 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
7504 arr, int_1, NULL_TREE, NULL_TREE);
7505 tree arr_i = build4 (ARRAY_REF, integer_type_node,
7506 arr, i, NULL_TREE, NULL_TREE);
7507
7508 tree int_17 = build_int_cst (integer_type_node, 17);
7509 tree int_42 = build_int_cst (integer_type_node, 42);
7510 tree int_m3 = build_int_cst (integer_type_node, -3);
7511
7512 region_model_manager mgr;
7513 region_model model (&mgr);
7514 /* "arr[0] = 17;". */
7515 model.set_value (arr_0, int_17, NULL);
7516 /* "arr[1] = -3;". */
7517 model.set_value (arr_1, int_m3, NULL);
7518
7519 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
7520 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
7521
7522 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
7523 model.set_value (arr_1, int_42, NULL);
7524 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
7525
7526 /* Verify get_offset for "arr[0]". */
7527 {
7528 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
7a6564c9 7529 region_offset offset = arr_0_reg->get_offset (&mgr);
808f4dfe
DM
7530 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7531 ASSERT_EQ (offset.get_bit_offset (), 0);
7532 }
7533
7534 /* Verify get_offset for "arr[1]". */
7535 {
7536 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
7a6564c9 7537 region_offset offset = arr_1_reg->get_offset (&mgr);
808f4dfe
DM
7538 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7539 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
7540 }
7541
7a6564c9
TL
7542 /* Verify get_offset for "arr[i]". */
7543 {
7544 const region *arr_i_reg = model.get_lvalue (arr_i, NULL);
7545 region_offset offset = arr_i_reg->get_offset (&mgr);
7546 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7547 ASSERT_EQ (offset.get_symbolic_byte_offset ()->get_kind (), SK_BINOP);
7548 }
7549
808f4dfe
DM
7550 /* "arr[i] = i;" - this should remove the earlier bindings. */
7551 model.set_value (arr_i, i, NULL);
7552 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
7553 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
7554
7555 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
7556 model.set_value (arr_0, int_17, NULL);
7557 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
7558 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
7559}
7560
7561/* Smoketest of dereferencing a pointer via MEM_REF. */
7562
7563static void
7564test_mem_ref ()
7565{
7566 /*
7567 x = 17;
7568 p = &x;
7569 *p;
7570 */
7571 tree x = build_global_decl ("x", integer_type_node);
7572 tree int_star = build_pointer_type (integer_type_node);
7573 tree p = build_global_decl ("p", int_star);
7574
7575 tree int_17 = build_int_cst (integer_type_node, 17);
7576 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
7577 tree offset_0 = build_int_cst (integer_type_node, 0);
7578 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
7579
7580 region_model_manager mgr;
7581 region_model model (&mgr);
7582
7583 /* "x = 17;". */
7584 model.set_value (x, int_17, NULL);
7585
7586 /* "p = &x;". */
7587 model.set_value (p, addr_of_x, NULL);
7588
7589 const svalue *sval = model.get_rvalue (star_p, NULL);
7590 ASSERT_EQ (sval->maybe_get_constant (), int_17);
7591}
7592
7593/* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
7594 Analogous to this code:
7595 void test_6 (int a[10])
7596 {
7597 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
7598 a[3] = 42;
7599 __analyzer_eval (a[3] == 42); [should be TRUE]
7600 }
7601 from data-model-1.c, which looks like this at the gimple level:
7602 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
7603 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
7604 int _2 = *_1; # MEM_REF
7605 _Bool _3 = _2 == 42;
7606 int _4 = (int) _3;
7607 __analyzer_eval (_4);
7608
7609 # a[3] = 42;
7610 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
7611 *_5 = 42; # MEM_REF
7612
7613 # __analyzer_eval (a[3] == 42); [should be TRUE]
7614 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
7615 int _7 = *_6; # MEM_REF
7616 _Bool _8 = _7 == 42;
7617 int _9 = (int) _8;
7618 __analyzer_eval (_9); */
7619
7620static void
7621test_POINTER_PLUS_EXPR_then_MEM_REF ()
7622{
7623 tree int_star = build_pointer_type (integer_type_node);
7624 tree a = build_global_decl ("a", int_star);
7625 tree offset_12 = build_int_cst (size_type_node, 12);
7626 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
7627 tree offset_0 = build_int_cst (integer_type_node, 0);
7628 tree mem_ref = build2 (MEM_REF, integer_type_node,
7629 pointer_plus_expr, offset_0);
7630 region_model_manager mgr;
7631 region_model m (&mgr);
7632
7633 tree int_42 = build_int_cst (integer_type_node, 42);
7634 m.set_value (mem_ref, int_42, NULL);
7635 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
7636}
7637
7638/* Verify that malloc works. */
7639
7640static void
7641test_malloc ()
7642{
7643 tree int_star = build_pointer_type (integer_type_node);
7644 tree p = build_global_decl ("p", int_star);
7645 tree n = build_global_decl ("n", integer_type_node);
7646 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
7647 n, build_int_cst (size_type_node, 4));
7648
7649 region_model_manager mgr;
7650 test_region_model_context ctxt;
7651 region_model model (&mgr);
7652
7653 /* "p = malloc (n * 4);". */
7654 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
ce917b04
DM
7655 const region *reg
7656 = model.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe
DM
7657 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
7658 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 7659 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
7660}
7661
7662/* Verify that alloca works. */
7663
7664static void
7665test_alloca ()
7666{
7667 auto_vec <tree> param_types;
7668 tree fndecl = make_fndecl (integer_type_node,
7669 "test_fn",
7670 param_types);
7671 allocate_struct_function (fndecl, true);
7672
7673
7674 tree int_star = build_pointer_type (integer_type_node);
7675 tree p = build_global_decl ("p", int_star);
7676 tree n = build_global_decl ("n", integer_type_node);
7677 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
7678 n, build_int_cst (size_type_node, 4));
7679
7680 region_model_manager mgr;
7681 test_region_model_context ctxt;
7682 region_model model (&mgr);
7683
7684 /* Push stack frame. */
7685 const region *frame_reg
7686 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl),
7687 NULL, &ctxt);
7688 /* "p = alloca (n * 4);". */
7689 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
b9365b93 7690 const region *reg = model.create_region_for_alloca (size_sval, &ctxt);
808f4dfe
DM
7691 ASSERT_EQ (reg->get_parent_region (), frame_reg);
7692 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
7693 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 7694 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
7695
7696 /* Verify that the pointers to the alloca region are replaced by
7697 poisoned values when the frame is popped. */
7698 model.pop_frame (NULL, NULL, &ctxt);
33255ad3 7699 ASSERT_EQ (model.get_rvalue (p, NULL)->get_kind (), SK_POISONED);
808f4dfe
DM
7700}
7701
71fc4655
DM
7702/* Verify that svalue::involves_p works. */
7703
7704static void
7705test_involves_p ()
7706{
7707 region_model_manager mgr;
7708 tree int_star = build_pointer_type (integer_type_node);
7709 tree p = build_global_decl ("p", int_star);
7710 tree q = build_global_decl ("q", int_star);
7711
7712 test_region_model_context ctxt;
7713 region_model model (&mgr);
7714 const svalue *p_init = model.get_rvalue (p, &ctxt);
7715 const svalue *q_init = model.get_rvalue (q, &ctxt);
7716
7717 ASSERT_TRUE (p_init->involves_p (p_init));
7718 ASSERT_FALSE (p_init->involves_p (q_init));
7719
7720 const region *star_p_reg = mgr.get_symbolic_region (p_init);
7721 const region *star_q_reg = mgr.get_symbolic_region (q_init);
7722
7723 const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
7724 const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
7725
7726 ASSERT_TRUE (init_star_p->involves_p (p_init));
7727 ASSERT_FALSE (p_init->involves_p (init_star_p));
7728 ASSERT_FALSE (init_star_p->involves_p (q_init));
7729 ASSERT_TRUE (init_star_q->involves_p (q_init));
7730 ASSERT_FALSE (init_star_q->involves_p (p_init));
7731}
7732
757bf1df
DM
7733/* Run all of the selftests within this file. */
7734
7735void
7736analyzer_region_model_cc_tests ()
7737{
8c08c983 7738 test_tree_cmp_on_constants ();
757bf1df 7739 test_dump ();
808f4dfe
DM
7740 test_struct ();
7741 test_array_1 ();
90f7c300 7742 test_get_representative_tree ();
757bf1df 7743 test_unique_constants ();
808f4dfe
DM
7744 test_unique_unknowns ();
7745 test_initial_svalue_folding ();
7746 test_unaryop_svalue_folding ();
7747 test_binop_svalue_folding ();
7748 test_sub_svalue_folding ();
f09b9955 7749 test_bits_within_svalue_folding ();
808f4dfe 7750 test_descendent_of_p ();
391512ad 7751 test_bit_range_regions ();
757bf1df 7752 test_assignment ();
a96f1c38 7753 test_compound_assignment ();
757bf1df
DM
7754 test_stack_frames ();
7755 test_get_representative_path_var ();
808f4dfe 7756 test_equality_1 ();
757bf1df
DM
7757 test_canonicalization_2 ();
7758 test_canonicalization_3 ();
8c08c983 7759 test_canonicalization_4 ();
757bf1df
DM
7760 test_state_merging ();
7761 test_constraint_merging ();
808f4dfe
DM
7762 test_widening_constraints ();
7763 test_iteration_1 ();
6969ac30 7764 test_malloc_constraints ();
808f4dfe
DM
7765 test_var ();
7766 test_array_2 ();
7767 test_mem_ref ();
7768 test_POINTER_PLUS_EXPR_then_MEM_REF ();
7769 test_malloc ();
7770 test_alloca ();
71fc4655 7771 test_involves_p ();
757bf1df
DM
7772}
7773
7774} // namespace selftest
7775
7776#endif /* CHECKING_P */
7777
75038aa6
DM
7778} // namespace ana
7779
757bf1df 7780#endif /* #if ENABLE_ANALYZER */