]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/analyzer/region-model.cc
doc: Reword how to get possible values of a parameter
[thirdparty/gcc.git] / gcc / analyzer / region-model.cc
CommitLineData
757bf1df 1/* Classes for modeling the state of memory.
83ffe9cd 2 Copyright (C) 2019-2023 Free Software Foundation, Inc.
757bf1df
DM
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
c81b60b8 22#define INCLUDE_MEMORY
757bf1df
DM
23#include "system.h"
24#include "coretypes.h"
6341f14e 25#include "make-unique.h"
757bf1df
DM
26#include "tree.h"
27#include "function.h"
28#include "basic-block.h"
29#include "gimple.h"
30#include "gimple-iterator.h"
7892ff37 31#include "diagnostic-core.h"
757bf1df
DM
32#include "graphviz.h"
33#include "options.h"
34#include "cgraph.h"
35#include "tree-dfa.h"
36#include "stringpool.h"
37#include "convert.h"
38#include "target.h"
39#include "fold-const.h"
40#include "tree-pretty-print.h"
41#include "diagnostic-color.h"
42#include "diagnostic-metadata.h"
ef7827b0 43#include "bitmap.h"
757bf1df 44#include "selftest.h"
757bf1df
DM
45#include "analyzer/analyzer.h"
46#include "analyzer/analyzer-logging.h"
47#include "ordered-hash-map.h"
48#include "options.h"
49#include "cgraph.h"
50#include "cfg.h"
757bf1df
DM
51#include "analyzer/supergraph.h"
52#include "sbitmap.h"
808f4dfe
DM
53#include "analyzer/call-string.h"
54#include "analyzer/program-point.h"
55#include "analyzer/store.h"
757bf1df
DM
56#include "analyzer/region-model.h"
57#include "analyzer/constraint-manager.h"
58#include "diagnostic-event-id.h"
59#include "analyzer/sm.h"
60#include "diagnostic-event-id.h"
61#include "analyzer/sm.h"
62#include "analyzer/pending-diagnostic.h"
808f4dfe 63#include "analyzer/region-model-reachability.h"
757bf1df 64#include "analyzer/analyzer-selftests.h"
f573d351 65#include "analyzer/program-state.h"
bfca9505 66#include "analyzer/call-summary.h"
884d9141 67#include "stor-layout.h"
c7e276b8 68#include "attribs.h"
9a2c9579 69#include "tree-object-size.h"
1e2fe671
DM
70#include "gimple-ssa.h"
71#include "tree-phinodes.h"
72#include "tree-ssa-operands.h"
73#include "ssa-iterators.h"
5fbcbcaf 74#include "calls.h"
e6c3bb37 75#include "is-a.h"
c81b60b8 76#include "gcc-rich-location.h"
f5758fe5
DM
77#include "analyzer/checker-event.h"
78#include "analyzer/checker-path.h"
b03a10b0 79#include "analyzer/feasible-graph.h"
757bf1df
DM
80
81#if ENABLE_ANALYZER
82
75038aa6
DM
83namespace ana {
84
757bf1df
DM
85/* Dump T to PP in language-independent form, for debugging/logging/dumping
86 purposes. */
87
757bf1df 88void
808f4dfe 89dump_tree (pretty_printer *pp, tree t)
757bf1df 90{
808f4dfe 91 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
757bf1df
DM
92}
93
808f4dfe
DM
94/* Dump T to PP in language-independent form in quotes, for
95 debugging/logging/dumping purposes. */
757bf1df
DM
96
97void
808f4dfe 98dump_quoted_tree (pretty_printer *pp, tree t)
757bf1df 99{
808f4dfe
DM
100 pp_begin_quote (pp, pp_show_color (pp));
101 dump_tree (pp, t);
102 pp_end_quote (pp, pp_show_color (pp));
757bf1df
DM
103}
104
808f4dfe
DM
105/* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
106 calls within other pp_printf calls.
757bf1df 107
808f4dfe
DM
108 default_tree_printer handles 'T' and some other codes by calling
109 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
110 dump_generic_node calls pp_printf in various places, leading to
111 garbled output.
757bf1df 112
808f4dfe
DM
113 Ideally pp_printf could be made to be reentrant, but in the meantime
114 this function provides a workaround. */
6969ac30
DM
115
116void
808f4dfe 117print_quoted_type (pretty_printer *pp, tree t)
6969ac30 118{
808f4dfe
DM
119 pp_begin_quote (pp, pp_show_color (pp));
120 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
121 pp_end_quote (pp, pp_show_color (pp));
6969ac30
DM
122}
123
d726a57b
DM
124/* class region_to_value_map. */
125
126/* Assignment operator for region_to_value_map. */
127
128region_to_value_map &
129region_to_value_map::operator= (const region_to_value_map &other)
130{
131 m_hash_map.empty ();
132 for (auto iter : other.m_hash_map)
133 {
134 const region *reg = iter.first;
135 const svalue *sval = iter.second;
136 m_hash_map.put (reg, sval);
137 }
138 return *this;
139}
140
141/* Equality operator for region_to_value_map. */
142
143bool
144region_to_value_map::operator== (const region_to_value_map &other) const
145{
146 if (m_hash_map.elements () != other.m_hash_map.elements ())
147 return false;
148
149 for (auto iter : *this)
150 {
151 const region *reg = iter.first;
152 const svalue *sval = iter.second;
153 const svalue * const *other_slot = other.get (reg);
154 if (other_slot == NULL)
155 return false;
156 if (sval != *other_slot)
157 return false;
158 }
159
160 return true;
161}
162
163/* Dump this object to PP. */
164
165void
166region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
167 bool multiline) const
168{
169 auto_vec<const region *> regs;
170 for (iterator iter = begin (); iter != end (); ++iter)
171 regs.safe_push ((*iter).first);
172 regs.qsort (region::cmp_ptr_ptr);
173 if (multiline)
174 pp_newline (pp);
175 else
176 pp_string (pp, " {");
177 unsigned i;
178 const region *reg;
179 FOR_EACH_VEC_ELT (regs, i, reg)
180 {
181 if (multiline)
182 pp_string (pp, " ");
183 else if (i > 0)
184 pp_string (pp, ", ");
185 reg->dump_to_pp (pp, simple);
186 pp_string (pp, ": ");
187 const svalue *sval = *get (reg);
188 sval->dump_to_pp (pp, true);
189 if (multiline)
190 pp_newline (pp);
191 }
192 if (!multiline)
193 pp_string (pp, "}");
194}
195
196/* Dump this object to stderr. */
197
198DEBUG_FUNCTION void
199region_to_value_map::dump (bool simple) const
200{
201 pretty_printer pp;
202 pp_format_decoder (&pp) = default_tree_printer;
203 pp_show_color (&pp) = pp_show_color (global_dc->printer);
204 pp.buffer->stream = stderr;
205 dump_to_pp (&pp, simple, true);
206 pp_newline (&pp);
207 pp_flush (&pp);
208}
209
210
211/* Attempt to merge THIS with OTHER, writing the result
212 to OUT.
213
214 For now, write (region, value) mappings that are in common between THIS
ce917b04
DM
215 and OTHER to OUT, effectively taking the intersection.
216
217 Reject merger of different values. */
d726a57b
DM
218
219bool
220region_to_value_map::can_merge_with_p (const region_to_value_map &other,
221 region_to_value_map *out) const
222{
223 for (auto iter : *this)
224 {
225 const region *iter_reg = iter.first;
226 const svalue *iter_sval = iter.second;
227 const svalue * const * other_slot = other.get (iter_reg);
228 if (other_slot)
ce917b04
DM
229 {
230 if (iter_sval == *other_slot)
231 out->put (iter_reg, iter_sval);
232 else
233 return false;
234 }
d726a57b
DM
235 }
236 return true;
237}
238
33255ad3
DM
239/* Purge any state involving SVAL. */
240
241void
242region_to_value_map::purge_state_involving (const svalue *sval)
243{
244 auto_vec<const region *> to_purge;
245 for (auto iter : *this)
246 {
247 const region *iter_reg = iter.first;
248 const svalue *iter_sval = iter.second;
249 if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
250 to_purge.safe_push (iter_reg);
251 }
252 for (auto iter : to_purge)
253 m_hash_map.remove (iter);
254}
255
757bf1df
DM
256/* class region_model. */
257
808f4dfe 258/* Ctor for region_model: construct an "empty" model. */
757bf1df 259
808f4dfe 260region_model::region_model (region_model_manager *mgr)
9a2c9579
DM
261: m_mgr (mgr), m_store (), m_current_frame (NULL),
262 m_dynamic_extents ()
757bf1df 263{
808f4dfe 264 m_constraints = new constraint_manager (mgr);
757bf1df
DM
265}
266
267/* region_model's copy ctor. */
268
269region_model::region_model (const region_model &other)
808f4dfe
DM
270: m_mgr (other.m_mgr), m_store (other.m_store),
271 m_constraints (new constraint_manager (*other.m_constraints)),
9a2c9579
DM
272 m_current_frame (other.m_current_frame),
273 m_dynamic_extents (other.m_dynamic_extents)
757bf1df 274{
757bf1df
DM
275}
276
277/* region_model's dtor. */
278
279region_model::~region_model ()
280{
281 delete m_constraints;
282}
283
284/* region_model's assignment operator. */
285
286region_model &
287region_model::operator= (const region_model &other)
288{
808f4dfe
DM
289 /* m_mgr is const. */
290 gcc_assert (m_mgr == other.m_mgr);
757bf1df 291
808f4dfe 292 m_store = other.m_store;
757bf1df
DM
293
294 delete m_constraints;
808f4dfe 295 m_constraints = new constraint_manager (*other.m_constraints);
757bf1df 296
808f4dfe 297 m_current_frame = other.m_current_frame;
757bf1df 298
9a2c9579
DM
299 m_dynamic_extents = other.m_dynamic_extents;
300
757bf1df
DM
301 return *this;
302}
303
304/* Equality operator for region_model.
305
808f4dfe
DM
306 Amongst other things this directly compares the stores and the constraint
307 managers, so for this to be meaningful both this and OTHER should
757bf1df
DM
308 have been canonicalized. */
309
310bool
311region_model::operator== (const region_model &other) const
312{
808f4dfe
DM
313 /* We can only compare instances that use the same manager. */
314 gcc_assert (m_mgr == other.m_mgr);
757bf1df 315
808f4dfe 316 if (m_store != other.m_store)
757bf1df
DM
317 return false;
318
319 if (*m_constraints != *other.m_constraints)
320 return false;
321
808f4dfe
DM
322 if (m_current_frame != other.m_current_frame)
323 return false;
757bf1df 324
9a2c9579
DM
325 if (m_dynamic_extents != other.m_dynamic_extents)
326 return false;
327
757bf1df
DM
328 gcc_checking_assert (hash () == other.hash ());
329
330 return true;
331}
332
333/* Generate a hash value for this region_model. */
334
335hashval_t
808f4dfe
DM
336region_model::hash () const
337{
338 hashval_t result = m_store.hash ();
339 result ^= m_constraints->hash ();
340 return result;
757bf1df
DM
341}
342
808f4dfe
DM
343/* Dump a representation of this model to PP, showing the
344 stack, the store, and any constraints.
345 Use SIMPLE to control how svalues and regions are printed. */
757bf1df
DM
346
347void
808f4dfe
DM
348region_model::dump_to_pp (pretty_printer *pp, bool simple,
349 bool multiline) const
757bf1df 350{
808f4dfe
DM
351 /* Dump stack. */
352 pp_printf (pp, "stack depth: %i", get_stack_depth ());
353 if (multiline)
354 pp_newline (pp);
355 else
356 pp_string (pp, " {");
357 for (const frame_region *iter_frame = m_current_frame; iter_frame;
358 iter_frame = iter_frame->get_calling_frame ())
359 {
360 if (multiline)
361 pp_string (pp, " ");
362 else if (iter_frame != m_current_frame)
363 pp_string (pp, ", ");
364 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
365 iter_frame->dump_to_pp (pp, simple);
366 if (multiline)
367 pp_newline (pp);
368 }
369 if (!multiline)
370 pp_string (pp, "}");
371
372 /* Dump store. */
373 if (!multiline)
374 pp_string (pp, ", {");
375 m_store.dump_to_pp (pp, simple, multiline,
376 m_mgr->get_store_manager ());
377 if (!multiline)
378 pp_string (pp, "}");
379
380 /* Dump constraints. */
381 pp_string (pp, "constraint_manager:");
382 if (multiline)
383 pp_newline (pp);
384 else
385 pp_string (pp, " {");
386 m_constraints->dump_to_pp (pp, multiline);
387 if (!multiline)
388 pp_string (pp, "}");
9a2c9579
DM
389
390 /* Dump sizes of dynamic regions, if any are known. */
391 if (!m_dynamic_extents.is_empty ())
392 {
393 pp_string (pp, "dynamic_extents:");
394 m_dynamic_extents.dump_to_pp (pp, simple, multiline);
395 }
808f4dfe 396}
757bf1df 397
808f4dfe 398/* Dump a representation of this model to FILE. */
757bf1df 399
808f4dfe
DM
400void
401region_model::dump (FILE *fp, bool simple, bool multiline) const
402{
403 pretty_printer pp;
404 pp_format_decoder (&pp) = default_tree_printer;
405 pp_show_color (&pp) = pp_show_color (global_dc->printer);
406 pp.buffer->stream = fp;
407 dump_to_pp (&pp, simple, multiline);
408 pp_newline (&pp);
409 pp_flush (&pp);
757bf1df
DM
410}
411
808f4dfe 412/* Dump a multiline representation of this model to stderr. */
757bf1df 413
808f4dfe
DM
414DEBUG_FUNCTION void
415region_model::dump (bool simple) const
416{
417 dump (stderr, simple, true);
418}
757bf1df 419
808f4dfe 420/* Dump a multiline representation of this model to stderr. */
757bf1df 421
808f4dfe
DM
422DEBUG_FUNCTION void
423region_model::debug () const
757bf1df 424{
808f4dfe 425 dump (true);
757bf1df
DM
426}
427
e61ffa20
DM
428/* Assert that this object is valid. */
429
430void
431region_model::validate () const
432{
433 m_store.validate ();
434}
435
808f4dfe
DM
436/* Canonicalize the store and constraints, to maximize the chance of
437 equality between region_model instances. */
757bf1df
DM
438
439void
808f4dfe 440region_model::canonicalize ()
757bf1df 441{
808f4dfe
DM
442 m_store.canonicalize (m_mgr->get_store_manager ());
443 m_constraints->canonicalize ();
757bf1df
DM
444}
445
446/* Return true if this region_model is in canonical form. */
447
448bool
449region_model::canonicalized_p () const
450{
451 region_model copy (*this);
808f4dfe 452 copy.canonicalize ();
757bf1df
DM
453 return *this == copy;
454}
455
808f4dfe
DM
456/* See the comment for store::loop_replay_fixup. */
457
458void
459region_model::loop_replay_fixup (const region_model *dst_state)
460{
461 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
462}
463
757bf1df
DM
464/* A subclass of pending_diagnostic for complaining about uses of
465 poisoned values. */
466
467class poisoned_value_diagnostic
468: public pending_diagnostic_subclass<poisoned_value_diagnostic>
469{
470public:
00e7d024 471 poisoned_value_diagnostic (tree expr, enum poison_kind pkind,
b03a10b0
DM
472 const region *src_region,
473 tree check_expr)
00e7d024 474 : m_expr (expr), m_pkind (pkind),
b03a10b0
DM
475 m_src_region (src_region),
476 m_check_expr (check_expr)
757bf1df
DM
477 {}
478
ff171cb1 479 const char *get_kind () const final override { return "poisoned_value_diagnostic"; }
757bf1df 480
ff171cb1 481 bool use_of_uninit_p () const final override
33255ad3
DM
482 {
483 return m_pkind == POISON_KIND_UNINIT;
484 }
485
757bf1df
DM
486 bool operator== (const poisoned_value_diagnostic &other) const
487 {
00e7d024
DM
488 return (m_expr == other.m_expr
489 && m_pkind == other.m_pkind
490 && m_src_region == other.m_src_region);
757bf1df
DM
491 }
492
ff171cb1 493 int get_controlling_option () const final override
7fd6e36e
DM
494 {
495 switch (m_pkind)
496 {
497 default:
498 gcc_unreachable ();
499 case POISON_KIND_UNINIT:
500 return OPT_Wanalyzer_use_of_uninitialized_value;
501 case POISON_KIND_FREED:
502 return OPT_Wanalyzer_use_after_free;
503 case POISON_KIND_POPPED_STACK:
504 return OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame;
505 }
506 }
507
ff171cb1 508 bool emit (rich_location *rich_loc) final override
757bf1df
DM
509 {
510 switch (m_pkind)
511 {
512 default:
513 gcc_unreachable ();
33255ad3
DM
514 case POISON_KIND_UNINIT:
515 {
516 diagnostic_metadata m;
517 m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
7fd6e36e 518 return warning_meta (rich_loc, m, get_controlling_option (),
33255ad3
DM
519 "use of uninitialized value %qE",
520 m_expr);
521 }
522 break;
757bf1df
DM
523 case POISON_KIND_FREED:
524 {
525 diagnostic_metadata m;
526 m.add_cwe (416); /* "CWE-416: Use After Free". */
7fd6e36e 527 return warning_meta (rich_loc, m, get_controlling_option (),
6c8e5844
DM
528 "use after %<free%> of %qE",
529 m_expr);
757bf1df
DM
530 }
531 break;
532 case POISON_KIND_POPPED_STACK:
533 {
757bf1df 534 /* TODO: which CWE? */
808f4dfe 535 return warning_at
7fd6e36e 536 (rich_loc, get_controlling_option (),
808f4dfe
DM
537 "dereferencing pointer %qE to within stale stack frame",
538 m_expr);
757bf1df
DM
539 }
540 break;
541 }
542 }
543
ff171cb1 544 label_text describe_final_event (const evdesc::final_event &ev) final override
757bf1df
DM
545 {
546 switch (m_pkind)
547 {
548 default:
549 gcc_unreachable ();
33255ad3
DM
550 case POISON_KIND_UNINIT:
551 return ev.formatted_print ("use of uninitialized value %qE here",
552 m_expr);
757bf1df
DM
553 case POISON_KIND_FREED:
554 return ev.formatted_print ("use after %<free%> of %qE here",
555 m_expr);
556 case POISON_KIND_POPPED_STACK:
557 return ev.formatted_print
808f4dfe 558 ("dereferencing pointer %qE to within stale stack frame",
757bf1df
DM
559 m_expr);
560 }
561 }
562
ff171cb1 563 void mark_interesting_stuff (interesting_t *interest) final override
00e7d024
DM
564 {
565 if (m_src_region)
566 interest->add_region_creation (m_src_region);
567 }
568
b03a10b0
DM
569 /* Attempt to suppress false positives.
570 Reject paths where the value of the underlying region isn't poisoned.
571 This can happen due to state merging when exploring the exploded graph,
572 where the more precise analysis during feasibility analysis finds that
573 the region is in fact valid.
574 To do this we need to get the value from the fgraph. Unfortunately
575 we can't simply query the state of m_src_region (from the enode),
576 since it might be a different region in the fnode state (e.g. with
577 heap-allocated regions, the numbering could be different).
578 Hence we access m_check_expr, if available. */
579
580 bool check_valid_fpath_p (const feasible_node &fnode,
581 const gimple *emission_stmt)
582 const final override
583 {
584 if (!m_check_expr)
585 return true;
586
587 /* We've reached the enode, but not necessarily the right function_point.
588 Try to get the state at the correct stmt. */
589 region_model emission_model (fnode.get_model ().get_manager());
590 if (!fnode.get_state_at_stmt (emission_stmt, &emission_model))
591 /* Couldn't get state; accept this diagnostic. */
592 return true;
593
594 const svalue *fsval = emission_model.get_rvalue (m_check_expr, NULL);
595 /* Check to see if the expr is also poisoned in FNODE (and in the
596 same way). */
597 const poisoned_svalue * fspval = fsval->dyn_cast_poisoned_svalue ();
598 if (!fspval)
599 return false;
600 if (fspval->get_poison_kind () != m_pkind)
601 return false;
602 return true;
603 }
604
757bf1df
DM
605private:
606 tree m_expr;
607 enum poison_kind m_pkind;
00e7d024 608 const region *m_src_region;
b03a10b0 609 tree m_check_expr;
757bf1df
DM
610};
611
5e00ad3f
DM
612/* A subclass of pending_diagnostic for complaining about shifts
613 by negative counts. */
614
615class shift_count_negative_diagnostic
616: public pending_diagnostic_subclass<shift_count_negative_diagnostic>
617{
618public:
619 shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
620 : m_assign (assign), m_count_cst (count_cst)
621 {}
622
ff171cb1 623 const char *get_kind () const final override
5e00ad3f
DM
624 {
625 return "shift_count_negative_diagnostic";
626 }
627
628 bool operator== (const shift_count_negative_diagnostic &other) const
629 {
630 return (m_assign == other.m_assign
631 && same_tree_p (m_count_cst, other.m_count_cst));
632 }
633
ff171cb1 634 int get_controlling_option () const final override
7fd6e36e
DM
635 {
636 return OPT_Wanalyzer_shift_count_negative;
637 }
638
ff171cb1 639 bool emit (rich_location *rich_loc) final override
5e00ad3f 640 {
7fd6e36e 641 return warning_at (rich_loc, get_controlling_option (),
5e00ad3f
DM
642 "shift by negative count (%qE)", m_count_cst);
643 }
644
ff171cb1 645 label_text describe_final_event (const evdesc::final_event &ev) final override
5e00ad3f
DM
646 {
647 return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst);
648 }
649
650private:
651 const gassign *m_assign;
652 tree m_count_cst;
653};
654
655/* A subclass of pending_diagnostic for complaining about shifts
656 by counts >= the width of the operand type. */
657
658class shift_count_overflow_diagnostic
659: public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
660{
661public:
662 shift_count_overflow_diagnostic (const gassign *assign,
663 int operand_precision,
664 tree count_cst)
665 : m_assign (assign), m_operand_precision (operand_precision),
666 m_count_cst (count_cst)
667 {}
668
ff171cb1 669 const char *get_kind () const final override
5e00ad3f
DM
670 {
671 return "shift_count_overflow_diagnostic";
672 }
673
674 bool operator== (const shift_count_overflow_diagnostic &other) const
675 {
676 return (m_assign == other.m_assign
677 && m_operand_precision == other.m_operand_precision
678 && same_tree_p (m_count_cst, other.m_count_cst));
679 }
680
ff171cb1 681 int get_controlling_option () const final override
7fd6e36e
DM
682 {
683 return OPT_Wanalyzer_shift_count_overflow;
684 }
685
ff171cb1 686 bool emit (rich_location *rich_loc) final override
5e00ad3f 687 {
7fd6e36e 688 return warning_at (rich_loc, get_controlling_option (),
5e00ad3f
DM
689 "shift by count (%qE) >= precision of type (%qi)",
690 m_count_cst, m_operand_precision);
691 }
692
ff171cb1 693 label_text describe_final_event (const evdesc::final_event &ev) final override
5e00ad3f
DM
694 {
695 return ev.formatted_print ("shift by count %qE here", m_count_cst);
696 }
697
698private:
699 const gassign *m_assign;
700 int m_operand_precision;
701 tree m_count_cst;
702};
703
808f4dfe
DM
704/* If ASSIGN is a stmt that can be modelled via
705 set_value (lhs_reg, SVALUE, CTXT)
706 for some SVALUE, get the SVALUE.
707 Otherwise return NULL. */
757bf1df 708
808f4dfe
DM
709const svalue *
710region_model::get_gassign_result (const gassign *assign,
711 region_model_context *ctxt)
757bf1df
DM
712{
713 tree lhs = gimple_assign_lhs (assign);
714 tree rhs1 = gimple_assign_rhs1 (assign);
757bf1df
DM
715 enum tree_code op = gimple_assign_rhs_code (assign);
716 switch (op)
717 {
718 default:
808f4dfe 719 return NULL;
757bf1df
DM
720
721 case POINTER_PLUS_EXPR:
722 {
723 /* e.g. "_1 = a_10(D) + 12;" */
724 tree ptr = rhs1;
725 tree offset = gimple_assign_rhs2 (assign);
726
808f4dfe
DM
727 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
728 const svalue *offset_sval = get_rvalue (offset, ctxt);
729 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
730 is an integer of type sizetype". */
731 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
732
733 const svalue *sval_binop
734 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
735 ptr_sval, offset_sval);
736 return sval_binop;
757bf1df
DM
737 }
738 break;
739
740 case POINTER_DIFF_EXPR:
741 {
742 /* e.g. "_1 = p_2(D) - q_3(D);". */
808f4dfe
DM
743 tree rhs2 = gimple_assign_rhs2 (assign);
744 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
745 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 746
808f4dfe 747 // TODO: perhaps fold to zero if they're known to be equal?
757bf1df 748
808f4dfe
DM
749 const svalue *sval_binop
750 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
751 rhs1_sval, rhs2_sval);
752 return sval_binop;
757bf1df
DM
753 }
754 break;
755
808f4dfe
DM
756 /* Assignments of the form
757 set_value (lvalue (LHS), rvalue (EXPR))
758 for various EXPR.
759 We already have the lvalue for the LHS above, as "lhs_reg". */
760 case ADDR_EXPR: /* LHS = &RHS; */
761 case BIT_FIELD_REF:
762 case COMPONENT_REF: /* LHS = op0.op1; */
757bf1df 763 case MEM_REF:
757bf1df 764 case REAL_CST:
808f4dfe
DM
765 case COMPLEX_CST:
766 case VECTOR_CST:
757bf1df
DM
767 case INTEGER_CST:
768 case ARRAY_REF:
808f4dfe
DM
769 case SSA_NAME: /* LHS = VAR; */
770 case VAR_DECL: /* LHS = VAR; */
771 case PARM_DECL:/* LHS = VAR; */
772 case REALPART_EXPR:
773 case IMAGPART_EXPR:
774 return get_rvalue (rhs1, ctxt);
775
776 case ABS_EXPR:
777 case ABSU_EXPR:
778 case CONJ_EXPR:
779 case BIT_NOT_EXPR:
757bf1df
DM
780 case FIX_TRUNC_EXPR:
781 case FLOAT_EXPR:
808f4dfe 782 case NEGATE_EXPR:
757bf1df 783 case NOP_EXPR:
808f4dfe 784 case VIEW_CONVERT_EXPR:
757bf1df 785 {
808f4dfe
DM
786 /* Unary ops. */
787 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
788 const svalue *sval_unaryop
789 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
790 return sval_unaryop;
757bf1df 791 }
757bf1df
DM
792
793 case EQ_EXPR:
794 case GE_EXPR:
795 case LE_EXPR:
796 case NE_EXPR:
797 case GT_EXPR:
798 case LT_EXPR:
808f4dfe
DM
799 case UNORDERED_EXPR:
800 case ORDERED_EXPR:
757bf1df
DM
801 {
802 tree rhs2 = gimple_assign_rhs2 (assign);
803
808f4dfe
DM
804 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
805 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 806
2f5951bd 807 if (TREE_TYPE (lhs) == boolean_type_node)
808f4dfe 808 {
2f5951bd
DM
809 /* Consider constraints between svalues. */
810 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
811 if (t.is_known ())
812 return m_mgr->get_or_create_constant_svalue
813 (t.is_true () ? boolean_true_node : boolean_false_node);
808f4dfe 814 }
2f5951bd
DM
815
816 /* Otherwise, generate a symbolic binary op. */
817 const svalue *sval_binop
818 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
819 rhs1_sval, rhs2_sval);
820 return sval_binop;
757bf1df
DM
821 }
822 break;
823
824 case PLUS_EXPR:
825 case MINUS_EXPR:
826 case MULT_EXPR:
808f4dfe 827 case MULT_HIGHPART_EXPR:
757bf1df 828 case TRUNC_DIV_EXPR:
808f4dfe
DM
829 case CEIL_DIV_EXPR:
830 case FLOOR_DIV_EXPR:
831 case ROUND_DIV_EXPR:
757bf1df 832 case TRUNC_MOD_EXPR:
808f4dfe
DM
833 case CEIL_MOD_EXPR:
834 case FLOOR_MOD_EXPR:
835 case ROUND_MOD_EXPR:
836 case RDIV_EXPR:
837 case EXACT_DIV_EXPR:
757bf1df
DM
838 case LSHIFT_EXPR:
839 case RSHIFT_EXPR:
808f4dfe
DM
840 case LROTATE_EXPR:
841 case RROTATE_EXPR:
757bf1df
DM
842 case BIT_IOR_EXPR:
843 case BIT_XOR_EXPR:
844 case BIT_AND_EXPR:
845 case MIN_EXPR:
846 case MAX_EXPR:
808f4dfe 847 case COMPLEX_EXPR:
757bf1df
DM
848 {
849 /* Binary ops. */
850 tree rhs2 = gimple_assign_rhs2 (assign);
851
808f4dfe
DM
852 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
853 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 854
5e00ad3f
DM
855 if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
856 {
857 /* "INT34-C. Do not shift an expression by a negative number of bits
858 or by greater than or equal to the number of bits that exist in
859 the operand." */
860 if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
861 if (TREE_CODE (rhs2_cst) == INTEGER_CST)
862 {
863 if (tree_int_cst_sgn (rhs2_cst) < 0)
6341f14e
DM
864 ctxt->warn
865 (make_unique<shift_count_negative_diagnostic>
866 (assign, rhs2_cst));
5e00ad3f
DM
867 else if (compare_tree_int (rhs2_cst,
868 TYPE_PRECISION (TREE_TYPE (rhs1)))
869 >= 0)
6341f14e
DM
870 ctxt->warn
871 (make_unique<shift_count_overflow_diagnostic>
872 (assign,
873 int (TYPE_PRECISION (TREE_TYPE (rhs1))),
874 rhs2_cst));
5e00ad3f
DM
875 }
876 }
877
808f4dfe
DM
878 const svalue *sval_binop
879 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
880 rhs1_sval, rhs2_sval);
881 return sval_binop;
882 }
883
884 /* Vector expressions. In theory we could implement these elementwise,
885 but for now, simply return unknown values. */
886 case VEC_DUPLICATE_EXPR:
887 case VEC_SERIES_EXPR:
888 case VEC_COND_EXPR:
889 case VEC_PERM_EXPR:
1b0be822
DM
890 case VEC_WIDEN_MULT_HI_EXPR:
891 case VEC_WIDEN_MULT_LO_EXPR:
892 case VEC_WIDEN_MULT_EVEN_EXPR:
893 case VEC_WIDEN_MULT_ODD_EXPR:
894 case VEC_UNPACK_HI_EXPR:
895 case VEC_UNPACK_LO_EXPR:
896 case VEC_UNPACK_FLOAT_HI_EXPR:
897 case VEC_UNPACK_FLOAT_LO_EXPR:
898 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
899 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
900 case VEC_PACK_TRUNC_EXPR:
901 case VEC_PACK_SAT_EXPR:
902 case VEC_PACK_FIX_TRUNC_EXPR:
903 case VEC_PACK_FLOAT_EXPR:
904 case VEC_WIDEN_LSHIFT_HI_EXPR:
905 case VEC_WIDEN_LSHIFT_LO_EXPR:
808f4dfe
DM
906 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
907 }
908}
909
1e2fe671
DM
910/* Workaround for discarding certain false positives from
911 -Wanalyzer-use-of-uninitialized-value
912 of the form:
913 ((A OR-IF B) OR-IF C)
914 and:
915 ((A AND-IF B) AND-IF C)
916 where evaluating B is redundant, but could involve simple accesses of
917 uninitialized locals.
918
919 When optimization is turned on the FE can immediately fold compound
920 conditionals. Specifically, c_parser_condition parses this condition:
921 ((A OR-IF B) OR-IF C)
922 and calls c_fully_fold on the condition.
923 Within c_fully_fold, fold_truth_andor is called, which bails when
924 optimization is off, but if any optimization is turned on can convert the
925 ((A OR-IF B) OR-IF C)
926 into:
927 ((A OR B) OR_IF C)
928 for sufficiently simple B
929 i.e. the inner OR-IF becomes an OR.
930 At gimplification time the inner OR becomes BIT_IOR_EXPR (in gimplify_expr),
931 giving this for the inner condition:
932 tmp = A | B;
933 if (tmp)
934 thus effectively synthesizing a redundant access of B when optimization
935 is turned on, when compared to:
936 if (A) goto L1; else goto L4;
937 L1: if (B) goto L2; else goto L4;
938 L2: if (C) goto L3; else goto L4;
939 for the unoptimized case.
940
941 Return true if CTXT appears to be handling such a short-circuitable stmt,
942 such as the def-stmt for B for the:
943 tmp = A | B;
944 case above, for the case where A is true and thus B would have been
945 short-circuited without optimization, using MODEL for the value of A. */
946
947static bool
948within_short_circuited_stmt_p (const region_model *model,
b33dd787 949 const gassign *assign_stmt)
1e2fe671 950{
1e2fe671 951 /* We must have an assignment to a temporary of _Bool type. */
1e2fe671
DM
952 tree lhs = gimple_assign_lhs (assign_stmt);
953 if (TREE_TYPE (lhs) != boolean_type_node)
954 return false;
955 if (TREE_CODE (lhs) != SSA_NAME)
956 return false;
957 if (SSA_NAME_VAR (lhs) != NULL_TREE)
958 return false;
959
960 /* The temporary bool must be used exactly once: as the second arg of
961 a BIT_IOR_EXPR or BIT_AND_EXPR. */
962 use_operand_p use_op;
963 gimple *use_stmt;
964 if (!single_imm_use (lhs, &use_op, &use_stmt))
965 return false;
966 const gassign *use_assign = dyn_cast <const gassign *> (use_stmt);
967 if (!use_assign)
968 return false;
969 enum tree_code op = gimple_assign_rhs_code (use_assign);
970 if (!(op == BIT_IOR_EXPR ||op == BIT_AND_EXPR))
971 return false;
972 if (!(gimple_assign_rhs1 (use_assign) != lhs
973 && gimple_assign_rhs2 (use_assign) == lhs))
974 return false;
975
976 /* The first arg of the bitwise stmt must have a known value in MODEL
977 that implies that the value of the second arg doesn't matter, i.e.
978 1 for bitwise or, 0 for bitwise and. */
979 tree other_arg = gimple_assign_rhs1 (use_assign);
980 /* Use a NULL ctxt here to avoid generating warnings. */
981 const svalue *other_arg_sval = model->get_rvalue (other_arg, NULL);
982 tree other_arg_cst = other_arg_sval->maybe_get_constant ();
983 if (!other_arg_cst)
984 return false;
985 switch (op)
986 {
987 default:
988 gcc_unreachable ();
989 case BIT_IOR_EXPR:
990 if (zerop (other_arg_cst))
991 return false;
992 break;
993 case BIT_AND_EXPR:
994 if (!zerop (other_arg_cst))
995 return false;
996 break;
997 }
998
999 /* All tests passed. We appear to be in a stmt that generates a boolean
1000 temporary with a value that won't matter. */
1001 return true;
1002}
1003
b33dd787
DM
1004/* Workaround for discarding certain false positives from
1005 -Wanalyzer-use-of-uninitialized-value
1006 seen with -ftrivial-auto-var-init=.
1007
1008 -ftrivial-auto-var-init= will generate calls to IFN_DEFERRED_INIT.
1009
1010 If the address of the var is taken, gimplification will give us
1011 something like:
1012
1013 _1 = .DEFERRED_INIT (4, 2, &"len"[0]);
1014 len = _1;
1015
1016 The result of DEFERRED_INIT will be an uninit value; we don't
1017 want to emit a false positive for "len = _1;"
1018
1019 Return true if ASSIGN_STMT is such a stmt. */
1020
1021static bool
1022due_to_ifn_deferred_init_p (const gassign *assign_stmt)
1023
1024{
1025 /* We must have an assignment to a decl from an SSA name that's the
1026 result of a IFN_DEFERRED_INIT call. */
1027 if (gimple_assign_rhs_code (assign_stmt) != SSA_NAME)
1028 return false;
1029 tree lhs = gimple_assign_lhs (assign_stmt);
1030 if (TREE_CODE (lhs) != VAR_DECL)
1031 return false;
1032 tree rhs = gimple_assign_rhs1 (assign_stmt);
1033 if (TREE_CODE (rhs) != SSA_NAME)
1034 return false;
1035 const gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1036 const gcall *call = dyn_cast <const gcall *> (def_stmt);
1037 if (!call)
1038 return false;
1039 if (gimple_call_internal_p (call)
1040 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1041 return true;
1042 return false;
1043}
1044
33255ad3
DM
1045/* Check for SVAL being poisoned, adding a warning to CTXT.
1046 Return SVAL, or, if a warning is added, another value, to avoid
2fdc8546
DM
1047 repeatedly complaining about the same poisoned value in followup code.
1048 SRC_REGION is a hint about where SVAL came from, and can be NULL. */
33255ad3
DM
1049
1050const svalue *
1051region_model::check_for_poison (const svalue *sval,
1052 tree expr,
2fdc8546 1053 const region *src_region,
33255ad3
DM
1054 region_model_context *ctxt) const
1055{
1056 if (!ctxt)
1057 return sval;
1058
1059 if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
1060 {
cc68ad87
DM
1061 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
1062
1063 /* Ignore uninitialized uses of empty types; there's nothing
1064 to initialize. */
1065 if (pkind == POISON_KIND_UNINIT
1066 && sval->get_type ()
1067 && is_empty_type (sval->get_type ()))
1068 return sval;
1069
b33dd787
DM
1070 if (pkind == POISON_KIND_UNINIT)
1071 if (const gimple *curr_stmt = ctxt->get_stmt ())
1072 if (const gassign *assign_stmt
1073 = dyn_cast <const gassign *> (curr_stmt))
1074 {
1075 /* Special case to avoid certain false positives. */
1076 if (within_short_circuited_stmt_p (this, assign_stmt))
1077 return sval;
1078
1079 /* Special case to avoid false positive on
1080 -ftrivial-auto-var-init=. */
1081 if (due_to_ifn_deferred_init_p (assign_stmt))
1082 return sval;
1083 }
1e2fe671 1084
33255ad3
DM
1085 /* If we have an SSA name for a temporary, we don't want to print
1086 '<unknown>'.
1087 Poisoned values are shared by type, and so we can't reconstruct
1088 the tree other than via the def stmts, using
1089 fixup_tree_for_diagnostic. */
1090 tree diag_arg = fixup_tree_for_diagnostic (expr);
2fdc8546 1091 if (src_region == NULL && pkind == POISON_KIND_UNINIT)
00e7d024 1092 src_region = get_region_for_poisoned_expr (expr);
b03a10b0
DM
1093
1094 /* Can we reliably get the poisoned value from "expr"?
1095 This is for use by poisoned_value_diagnostic::check_valid_fpath_p.
1096 Unfortunately, we might not have a reliable value for EXPR.
1097 Hence we only query its value now, and only use it if we get the
1098 poisoned value back again. */
1099 tree check_expr = expr;
1100 const svalue *foo_sval = get_rvalue (expr, NULL);
1101 if (foo_sval == sval)
1102 check_expr = expr;
1103 else
1104 check_expr = NULL;
6341f14e
DM
1105 if (ctxt->warn (make_unique<poisoned_value_diagnostic> (diag_arg,
1106 pkind,
b03a10b0
DM
1107 src_region,
1108 check_expr)))
33255ad3
DM
1109 {
1110 /* We only want to report use of a poisoned value at the first
1111 place it gets used; return an unknown value to avoid generating
1112 a chain of followup warnings. */
1113 sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
1114 }
1115
1116 return sval;
1117 }
1118
1119 return sval;
1120}
1121
00e7d024
DM
1122/* Attempt to get a region for describing EXPR, the source of region of
1123 a poisoned_svalue for use in a poisoned_value_diagnostic.
1124 Return NULL if there is no good region to use. */
1125
1126const region *
1127region_model::get_region_for_poisoned_expr (tree expr) const
1128{
1129 if (TREE_CODE (expr) == SSA_NAME)
1130 {
1131 tree decl = SSA_NAME_VAR (expr);
1132 if (decl && DECL_P (decl))
1133 expr = decl;
1134 else
1135 return NULL;
1136 }
1137 return get_lvalue (expr, NULL);
1138}
1139
808f4dfe
DM
1140/* Update this model for the ASSIGN stmt, using CTXT to report any
1141 diagnostics. */
1142
1143void
1144region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
1145{
1146 tree lhs = gimple_assign_lhs (assign);
1147 tree rhs1 = gimple_assign_rhs1 (assign);
1148
1149 const region *lhs_reg = get_lvalue (lhs, ctxt);
1150
1151 /* Most assignments are handled by:
1152 set_value (lhs_reg, SVALUE, CTXT)
1153 for some SVALUE. */
1154 if (const svalue *sval = get_gassign_result (assign, ctxt))
1155 {
33255ad3 1156 tree expr = get_diagnostic_tree_for_gassign (assign);
2fdc8546 1157 check_for_poison (sval, expr, NULL, ctxt);
808f4dfe
DM
1158 set_value (lhs_reg, sval, ctxt);
1159 return;
1160 }
1161
1162 enum tree_code op = gimple_assign_rhs_code (assign);
1163 switch (op)
1164 {
1165 default:
1166 {
1b0be822 1167 if (0)
808f4dfe
DM
1168 sorry_at (assign->location, "unhandled assignment op: %qs",
1169 get_tree_code_name (op));
1b0be822
DM
1170 const svalue *unknown_sval
1171 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1172 set_value (lhs_reg, unknown_sval, ctxt);
757bf1df
DM
1173 }
1174 break;
1175
808f4dfe
DM
1176 case CONSTRUCTOR:
1177 {
1178 if (TREE_CLOBBER_P (rhs1))
1179 {
1180 /* e.g. "x ={v} {CLOBBER};" */
1181 clobber_region (lhs_reg);
1182 }
1183 else
1184 {
1185 /* Any CONSTRUCTOR that survives to this point is either
1186 just a zero-init of everything, or a vector. */
1187 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
1188 zero_fill_region (lhs_reg);
1189 unsigned ix;
1190 tree index;
1191 tree val;
1192 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
1193 {
1194 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
1195 if (!index)
1196 index = build_int_cst (integer_type_node, ix);
1197 gcc_assert (TREE_CODE (index) == INTEGER_CST);
1198 const svalue *index_sval
1199 = m_mgr->get_or_create_constant_svalue (index);
1200 gcc_assert (index_sval);
1201 const region *sub_reg
1202 = m_mgr->get_element_region (lhs_reg,
1203 TREE_TYPE (val),
1204 index_sval);
1205 const svalue *val_sval = get_rvalue (val, ctxt);
1206 set_value (sub_reg, val_sval, ctxt);
1207 }
1208 }
1209 }
1210 break;
1211
1212 case STRING_CST:
757bf1df 1213 {
808f4dfe 1214 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
808f4dfe
DM
1215 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1216 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 1217 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
1218 }
1219 break;
1220 }
1221}
1222
33255ad3 1223/* Handle the pre-sm-state part of STMT, modifying this object in-place.
33255ad3
DM
1224 Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
1225 side effects. */
1226
1227void
1228region_model::on_stmt_pre (const gimple *stmt,
33255ad3
DM
1229 bool *out_unknown_side_effects,
1230 region_model_context *ctxt)
1231{
1232 switch (gimple_code (stmt))
1233 {
1234 default:
1235 /* No-op for now. */
1236 break;
1237
1238 case GIMPLE_ASSIGN:
1239 {
1240 const gassign *assign = as_a <const gassign *> (stmt);
1241 on_assignment (assign, ctxt);
1242 }
1243 break;
1244
1245 case GIMPLE_ASM:
ded2c2c0
DM
1246 {
1247 const gasm *asm_stmt = as_a <const gasm *> (stmt);
1248 on_asm_stmt (asm_stmt, ctxt);
1249 }
33255ad3
DM
1250 break;
1251
1252 case GIMPLE_CALL:
1253 {
1254 /* Track whether we have a gcall to a function that's not recognized by
1255 anything, for which we don't have a function body, or for which we
1256 don't know the fndecl. */
1257 const gcall *call = as_a <const gcall *> (stmt);
6bd31b33 1258 *out_unknown_side_effects = on_call_pre (call, ctxt);
33255ad3
DM
1259 }
1260 break;
1261
1262 case GIMPLE_RETURN:
1263 {
1264 const greturn *return_ = as_a <const greturn *> (stmt);
1265 on_return (return_, ctxt);
1266 }
1267 break;
1268 }
1269}
1270
9ff3e236
DM
1271/* Ensure that all arguments at the call described by CD are checked
1272 for poisoned values, by calling get_rvalue on each argument. */
1273
1274void
1275region_model::check_call_args (const call_details &cd) const
1276{
1277 for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++)
1278 cd.get_arg_svalue (arg_idx);
1279}
1280
aee1adf2
DM
1281/* Return true if CD is known to be a call to a function with
1282 __attribute__((const)). */
1283
1284static bool
1285const_fn_p (const call_details &cd)
1286{
1287 tree fndecl = cd.get_fndecl_for_call ();
1288 if (!fndecl)
1289 return false;
1290 gcc_assert (DECL_P (fndecl));
1291 return TREE_READONLY (fndecl);
1292}
1293
1294/* If this CD is known to be a call to a function with
1295 __attribute__((const)), attempt to get a const_fn_result_svalue
1296 based on the arguments, or return NULL otherwise. */
1297
1298static const svalue *
1299maybe_get_const_fn_result (const call_details &cd)
1300{
1301 if (!const_fn_p (cd))
1302 return NULL;
1303
1304 unsigned num_args = cd.num_args ();
1305 if (num_args > const_fn_result_svalue::MAX_INPUTS)
1306 /* Too many arguments. */
1307 return NULL;
1308
1309 auto_vec<const svalue *> inputs (num_args);
1310 for (unsigned arg_idx = 0; arg_idx < num_args; arg_idx++)
1311 {
1312 const svalue *arg_sval = cd.get_arg_svalue (arg_idx);
1313 if (!arg_sval->can_have_associated_state_p ())
1314 return NULL;
1315 inputs.quick_push (arg_sval);
1316 }
1317
1318 region_model_manager *mgr = cd.get_manager ();
1319 const svalue *sval
1320 = mgr->get_or_create_const_fn_result_svalue (cd.get_lhs_type (),
1321 cd.get_fndecl_for_call (),
1322 inputs);
1323 return sval;
1324}
1325
792f039f
DM
1326/* Update this model for an outcome of a call that returns a specific
1327 integer constant.
07e30160
DM
1328 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1329 the state-merger code from merging success and failure outcomes. */
1330
1331void
792f039f
DM
1332region_model::update_for_int_cst_return (const call_details &cd,
1333 int retval,
1334 bool unmergeable)
07e30160
DM
1335{
1336 if (!cd.get_lhs_type ())
1337 return;
4e4e45a4
DM
1338 if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1339 return;
07e30160 1340 const svalue *result
792f039f 1341 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), retval);
07e30160
DM
1342 if (unmergeable)
1343 result = m_mgr->get_or_create_unmergeable (result);
1344 set_value (cd.get_lhs_region (), result, cd.get_ctxt ());
1345}
1346
792f039f
DM
1347/* Update this model for an outcome of a call that returns zero.
1348 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1349 the state-merger code from merging success and failure outcomes. */
1350
1351void
1352region_model::update_for_zero_return (const call_details &cd,
1353 bool unmergeable)
1354{
1355 update_for_int_cst_return (cd, 0, unmergeable);
1356}
1357
07e30160
DM
1358/* Update this model for an outcome of a call that returns non-zero. */
1359
1360void
1361region_model::update_for_nonzero_return (const call_details &cd)
1362{
1363 if (!cd.get_lhs_type ())
1364 return;
4e4e45a4
DM
1365 if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1366 return;
07e30160
DM
1367 const svalue *zero
1368 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), 0);
1369 const svalue *result
1370 = get_store_value (cd.get_lhs_region (), cd.get_ctxt ());
1371 add_constraint (result, NE_EXPR, zero, cd.get_ctxt ());
1372}
1373
1374/* Subroutine of region_model::maybe_get_copy_bounds.
1375 The Linux kernel commonly uses
1376 min_t([unsigned] long, VAR, sizeof(T));
1377 to set an upper bound on the size of a copy_to_user.
1378 Attempt to simplify such sizes by trying to get the upper bound as a
1379 constant.
1380 Return the simplified svalue if possible, or NULL otherwise. */
1381
1382static const svalue *
1383maybe_simplify_upper_bound (const svalue *num_bytes_sval,
1384 region_model_manager *mgr)
1385{
1386 tree type = num_bytes_sval->get_type ();
1387 while (const svalue *raw = num_bytes_sval->maybe_undo_cast ())
1388 num_bytes_sval = raw;
1389 if (const binop_svalue *binop_sval = num_bytes_sval->dyn_cast_binop_svalue ())
1390 if (binop_sval->get_op () == MIN_EXPR)
1391 if (binop_sval->get_arg1 ()->get_kind () == SK_CONSTANT)
1392 {
1393 return mgr->get_or_create_cast (type, binop_sval->get_arg1 ());
1394 /* TODO: we might want to also capture the constraint
1395 when recording the diagnostic, or note that we're using
1396 the upper bound. */
1397 }
1398 return NULL;
1399}
1400
1401/* Attempt to get an upper bound for the size of a copy when simulating a
1402 copy function.
1403
1404 NUM_BYTES_SVAL is the symbolic value for the size of the copy.
1405 Use it if it's constant, otherwise try to simplify it. Failing
1406 that, use the size of SRC_REG if constant.
1407
1408 Return a symbolic value for an upper limit on the number of bytes
1409 copied, or NULL if no such value could be determined. */
1410
1411const svalue *
1412region_model::maybe_get_copy_bounds (const region *src_reg,
1413 const svalue *num_bytes_sval)
1414{
1415 if (num_bytes_sval->maybe_get_constant ())
1416 return num_bytes_sval;
1417
1418 if (const svalue *simplified
1419 = maybe_simplify_upper_bound (num_bytes_sval, m_mgr))
1420 num_bytes_sval = simplified;
1421
1422 if (num_bytes_sval->maybe_get_constant ())
1423 return num_bytes_sval;
1424
1425 /* For now, try just guessing the size as the capacity of the
1426 base region of the src.
1427 This is a hack; we might get too large a value. */
1428 const region *src_base_reg = src_reg->get_base_region ();
1429 num_bytes_sval = get_capacity (src_base_reg);
1430
1431 if (num_bytes_sval->maybe_get_constant ())
1432 return num_bytes_sval;
1433
1434 /* Non-constant: give up. */
1435 return NULL;
1436}
1437
6bd31b33
DM
1438/* Get any known_function for FNDECL for call CD.
1439
1440 The call must match all assumptions made by the known_function (such as
1441 e.g. "argument 1's type must be a pointer type").
1442
1443 Return NULL if no known_function is found, or it does not match the
1444 assumption(s). */
1445
1446const known_function *
1447region_model::get_known_function (tree fndecl, const call_details &cd) const
1448{
1449 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
1450 return known_fn_mgr->get_match (fndecl, cd);
1451}
1452
1453/* Get any known_function for IFN, or NULL. */
07e30160
DM
1454
1455const known_function *
6bd31b33 1456region_model::get_known_function (enum internal_fn ifn) const
07e30160
DM
1457{
1458 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
6bd31b33 1459 return known_fn_mgr->get_internal_fn (ifn);
07e30160
DM
1460}
1461
757bf1df
DM
1462/* Update this model for the CALL stmt, using CTXT to report any
1463 diagnostics - the first half.
1464
1465 Updates to the region_model that should be made *before* sm-states
1466 are updated are done here; other updates to the region_model are done
ef7827b0 1467 in region_model::on_call_post.
757bf1df 1468
ef7827b0
DM
1469 Return true if the function call has unknown side effects (it wasn't
1470 recognized and we don't have a body for it, or are unable to tell which
6bd31b33 1471 fndecl it is). */
ef7827b0
DM
1472
1473bool
6bd31b33 1474region_model::on_call_pre (const gcall *call, region_model_context *ctxt)
757bf1df 1475{
48e8a7a6
DM
1476 call_details cd (call, this, ctxt);
1477
ef7827b0
DM
1478 bool unknown_side_effects = false;
1479
9b4eee5f
DM
1480 /* Special-case for IFN_DEFERRED_INIT.
1481 We want to report uninitialized variables with -fanalyzer (treating
1482 -ftrivial-auto-var-init= as purely a mitigation feature).
1483 Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the
1484 lhs of the call, so that it is still uninitialized from the point of
1485 view of the analyzer. */
1486 if (gimple_call_internal_p (call)
1487 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1488 return false;
1489
bddd8d86
DM
1490 /* Get svalues for all of the arguments at the callsite, to ensure that we
1491 complain about any uninitialized arguments. This might lead to
1492 duplicates if any of the handling below also looks up the svalues,
1493 but the deduplication code should deal with that. */
1494 if (ctxt)
ca123e01 1495 check_call_args (cd);
bddd8d86 1496
688fc162
DM
1497 tree callee_fndecl = get_fndecl_for_call (call, ctxt);
1498
33255ad3
DM
1499 /* Some of the cases below update the lhs of the call based on the
1500 return value, but not all. Provide a default value, which may
1501 get overwritten below. */
1502 if (tree lhs = gimple_call_lhs (call))
1503 {
1504 const region *lhs_region = get_lvalue (lhs, ctxt);
aee1adf2
DM
1505 const svalue *sval = maybe_get_const_fn_result (cd);
1506 if (!sval)
1507 {
688fc162
DM
1508 if (callee_fndecl
1509 && lookup_attribute ("malloc", DECL_ATTRIBUTES (callee_fndecl)))
1510 {
1511 const region *new_reg
1512 = get_or_create_region_for_heap_alloc (NULL, ctxt);
1513 mark_region_as_unknown (new_reg, NULL);
1514 sval = m_mgr->get_ptr_svalue (cd.get_lhs_type (), new_reg);
1515 }
1516 else
1517 /* For the common case of functions without __attribute__((const)),
1518 use a conjured value, and purge any prior state involving that
1519 value (in case this is in a loop). */
1520 sval = m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs), call,
1521 lhs_region,
1522 conjured_purge (this,
1523 ctxt));
aee1adf2 1524 }
3a1d168e 1525 set_value (lhs_region, sval, ctxt);
33255ad3
DM
1526 }
1527
48e8a7a6 1528 if (gimple_call_internal_p (call))
6bd31b33
DM
1529 if (const known_function *kf
1530 = get_known_function (gimple_call_internal_fn (call)))
1531 {
1532 kf->impl_call_pre (cd);
1533 return false;
1534 }
808f4dfe 1535
688fc162 1536 if (callee_fndecl)
48e8a7a6 1537 {
5fbcbcaf 1538 int callee_fndecl_flags = flags_from_decl_or_type (callee_fndecl);
ee7bfbe5 1539
6bd31b33 1540 if (const known_function *kf = get_known_function (callee_fndecl, cd))
b5081130 1541 {
6bd31b33 1542 kf->impl_call_pre (cd);
b5081130
DM
1543 return false;
1544 }
6bd31b33
DM
1545 else if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
1546 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
5ee4ba03 1547 {
6bd31b33 1548 if (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
5ee4ba03
DM
1549 unknown_side_effects = true;
1550 }
ef7827b0 1551 else if (!fndecl_has_gimple_body_p (callee_fndecl)
5fbcbcaf 1552 && (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
808f4dfe 1553 && !fndecl_built_in_p (callee_fndecl))
ef7827b0 1554 unknown_side_effects = true;
757bf1df 1555 }
ef7827b0
DM
1556 else
1557 unknown_side_effects = true;
757bf1df 1558
ef7827b0 1559 return unknown_side_effects;
757bf1df
DM
1560}
1561
1562/* Update this model for the CALL stmt, using CTXT to report any
1563 diagnostics - the second half.
1564
1565 Updates to the region_model that should be made *after* sm-states
1566 are updated are done here; other updates to the region_model are done
ef7827b0
DM
1567 in region_model::on_call_pre.
1568
1569 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
1570 to purge state. */
757bf1df
DM
1571
1572void
ef7827b0
DM
1573region_model::on_call_post (const gcall *call,
1574 bool unknown_side_effects,
1575 region_model_context *ctxt)
757bf1df 1576{
757bf1df 1577 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1690a839 1578 {
eafa9d96 1579 call_details cd (call, this, ctxt);
6bd31b33 1580 if (const known_function *kf = get_known_function (callee_fndecl, cd))
55e04240 1581 {
6bd31b33 1582 kf->impl_call_post (cd);
55e04240
DM
1583 return;
1584 }
c7e276b8
DM
1585 /* Was this fndecl referenced by
1586 __attribute__((malloc(FOO)))? */
1587 if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
1588 {
c7e276b8
DM
1589 impl_deallocation_call (cd);
1590 return;
1591 }
1690a839 1592 }
ef7827b0
DM
1593
1594 if (unknown_side_effects)
1595 handle_unrecognized_call (call, ctxt);
1596}
1597
33255ad3
DM
1598/* Purge state involving SVAL from this region_model, using CTXT
1599 (if non-NULL) to purge other state in a program_state.
1600
1601 For example, if we're at the def-stmt of an SSA name, then we need to
1602 purge any state for svalues that involve that SSA name. This avoids
1603 false positives in loops, since a symbolic value referring to the
1604 SSA name will be referring to the previous value of that SSA name.
1605
1606 For example, in:
1607 while ((e = hashmap_iter_next(&iter))) {
1608 struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
1609 free (e_strbuf->value);
1610 }
1611 at the def-stmt of e_8:
1612 e_8 = hashmap_iter_next (&iter);
1613 we should purge the "freed" state of:
1614 INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
1615 which is the "e_strbuf->value" value from the previous iteration,
1616 or we will erroneously report a double-free - the "e_8" within it
1617 refers to the previous value. */
1618
1619void
1620region_model::purge_state_involving (const svalue *sval,
1621 region_model_context *ctxt)
1622{
a113b143
DM
1623 if (!sval->can_have_associated_state_p ())
1624 return;
33255ad3
DM
1625 m_store.purge_state_involving (sval, m_mgr);
1626 m_constraints->purge_state_involving (sval);
1627 m_dynamic_extents.purge_state_involving (sval);
1628 if (ctxt)
1629 ctxt->purge_state_involving (sval);
1630}
1631
c65d3c7f
DM
1632/* A pending_note subclass for adding a note about an
1633 __attribute__((access, ...)) to a diagnostic. */
1634
1635class reason_attr_access : public pending_note_subclass<reason_attr_access>
1636{
1637public:
1638 reason_attr_access (tree callee_fndecl, const attr_access &access)
1639 : m_callee_fndecl (callee_fndecl),
1640 m_ptr_argno (access.ptrarg),
1641 m_access_str (TREE_STRING_POINTER (access.to_external_string ()))
1642 {
1643 }
1644
ff171cb1 1645 const char *get_kind () const final override { return "reason_attr_access"; }
c65d3c7f 1646
2ac1459f 1647 void emit () const final override
c65d3c7f
DM
1648 {
1649 inform (DECL_SOURCE_LOCATION (m_callee_fndecl),
1650 "parameter %i of %qD marked with attribute %qs",
1651 m_ptr_argno + 1, m_callee_fndecl, m_access_str);
1652 }
1653
1654 bool operator== (const reason_attr_access &other) const
1655 {
1656 return (m_callee_fndecl == other.m_callee_fndecl
1657 && m_ptr_argno == other.m_ptr_argno
1658 && !strcmp (m_access_str, other.m_access_str));
1659 }
1660
1661private:
1662 tree m_callee_fndecl;
1663 unsigned m_ptr_argno;
1664 const char *m_access_str;
1665};
1666
b6eaf90c
DM
1667/* Check CALL a call to external function CALLEE_FNDECL based on
1668 any __attribute__ ((access, ....) on the latter, complaining to
1669 CTXT about any issues.
1670
1671 Currently we merely call check_region_for_write on any regions
1672 pointed to by arguments marked with a "write_only" or "read_write"
1673 attribute. */
1674
1675void
1676region_model::
1677check_external_function_for_access_attr (const gcall *call,
1678 tree callee_fndecl,
1679 region_model_context *ctxt) const
1680{
1681 gcc_assert (call);
1682 gcc_assert (callee_fndecl);
1683 gcc_assert (ctxt);
1684
1685 tree fntype = TREE_TYPE (callee_fndecl);
1686 if (!fntype)
1687 return;
1688
1689 if (!TYPE_ATTRIBUTES (fntype))
1690 return;
1691
1692 /* Initialize a map of attribute access specifications for arguments
1693 to the function call. */
1694 rdwr_map rdwr_idx;
1695 init_attr_rdwr_indices (&rdwr_idx, TYPE_ATTRIBUTES (fntype));
1696
1697 unsigned argno = 0;
1698
1699 for (tree iter = TYPE_ARG_TYPES (fntype); iter;
1700 iter = TREE_CHAIN (iter), ++argno)
1701 {
1702 const attr_access* access = rdwr_idx.get (argno);
1703 if (!access)
1704 continue;
1705
1706 /* Ignore any duplicate entry in the map for the size argument. */
1707 if (access->ptrarg != argno)
1708 continue;
1709
1710 if (access->mode == access_write_only
1711 || access->mode == access_read_write)
1712 {
c65d3c7f
DM
1713 /* Subclass of decorated_region_model_context that
1714 adds a note about the attr access to any saved diagnostics. */
1715 class annotating_ctxt : public note_adding_context
1716 {
1717 public:
1718 annotating_ctxt (tree callee_fndecl,
1719 const attr_access &access,
1720 region_model_context *ctxt)
1721 : note_adding_context (ctxt),
1722 m_callee_fndecl (callee_fndecl),
1723 m_access (access)
1724 {
1725 }
6341f14e 1726 std::unique_ptr<pending_note> make_note () final override
c65d3c7f 1727 {
6341f14e
DM
1728 return make_unique<reason_attr_access>
1729 (m_callee_fndecl, m_access);
c65d3c7f
DM
1730 }
1731 private:
1732 tree m_callee_fndecl;
1733 const attr_access &m_access;
1734 };
1735
1736 /* Use this ctxt below so that any diagnostics get the
1737 note added to them. */
1738 annotating_ctxt my_ctxt (callee_fndecl, *access, ctxt);
1739
b6eaf90c 1740 tree ptr_tree = gimple_call_arg (call, access->ptrarg);
c65d3c7f
DM
1741 const svalue *ptr_sval = get_rvalue (ptr_tree, &my_ctxt);
1742 const region *reg = deref_rvalue (ptr_sval, ptr_tree, &my_ctxt);
1743 check_region_for_write (reg, &my_ctxt);
b6eaf90c
DM
1744 /* We don't use the size arg for now. */
1745 }
1746 }
1747}
1748
ef7827b0
DM
1749/* Handle a call CALL to a function with unknown behavior.
1750
1751 Traverse the regions in this model, determining what regions are
1752 reachable from pointer arguments to CALL and from global variables,
1753 recursively.
1754
1755 Set all reachable regions to new unknown values and purge sm-state
1756 from their values, and from values that point to them. */
1757
1758void
1759region_model::handle_unrecognized_call (const gcall *call,
1760 region_model_context *ctxt)
1761{
1762 tree fndecl = get_fndecl_for_call (call, ctxt);
1763
b6eaf90c
DM
1764 if (fndecl && ctxt)
1765 check_external_function_for_access_attr (call, fndecl, ctxt);
1766
c710051a 1767 reachable_regions reachable_regs (this);
ef7827b0
DM
1768
1769 /* Determine the reachable regions and their mutability. */
1770 {
808f4dfe
DM
1771 /* Add globals and regions that already escaped in previous
1772 unknown calls. */
1773 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1774 &reachable_regs);
ef7827b0
DM
1775
1776 /* Params that are pointers. */
1777 tree iter_param_types = NULL_TREE;
1778 if (fndecl)
1779 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
1780 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
1781 {
1782 /* Track expected param type, where available. */
1783 tree param_type = NULL_TREE;
1784 if (iter_param_types)
1785 {
1786 param_type = TREE_VALUE (iter_param_types);
1787 gcc_assert (param_type);
1788 iter_param_types = TREE_CHAIN (iter_param_types);
1789 }
1790
1791 tree parm = gimple_call_arg (call, arg_idx);
808f4dfe
DM
1792 const svalue *parm_sval = get_rvalue (parm, ctxt);
1793 reachable_regs.handle_parm (parm_sval, param_type);
ef7827b0
DM
1794 }
1795 }
1796
33255ad3 1797 uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULL;
3a66c289 1798
808f4dfe
DM
1799 /* Purge sm-state for the svalues that were reachable,
1800 both in non-mutable and mutable form. */
1801 for (svalue_set::iterator iter
1802 = reachable_regs.begin_reachable_svals ();
1803 iter != reachable_regs.end_reachable_svals (); ++iter)
ef7827b0 1804 {
808f4dfe 1805 const svalue *sval = (*iter);
33255ad3
DM
1806 if (ctxt)
1807 ctxt->on_unknown_change (sval, false);
808f4dfe
DM
1808 }
1809 for (svalue_set::iterator iter
1810 = reachable_regs.begin_mutable_svals ();
1811 iter != reachable_regs.end_mutable_svals (); ++iter)
1812 {
1813 const svalue *sval = (*iter);
33255ad3
DM
1814 if (ctxt)
1815 ctxt->on_unknown_change (sval, true);
3a66c289
DM
1816 if (uncertainty)
1817 uncertainty->on_mutable_sval_at_unknown_call (sval);
808f4dfe 1818 }
ef7827b0 1819
808f4dfe 1820 /* Mark any clusters that have escaped. */
af66094d 1821 reachable_regs.mark_escaped_clusters (ctxt);
ef7827b0 1822
808f4dfe
DM
1823 /* Update bindings for all clusters that have escaped, whether above,
1824 or previously. */
3734527d
DM
1825 m_store.on_unknown_fncall (call, m_mgr->get_store_manager (),
1826 conjured_purge (this, ctxt));
9a2c9579
DM
1827
1828 /* Purge dynamic extents from any regions that have escaped mutably:
1829 realloc could have been called on them. */
1830 for (hash_set<const region *>::iterator
1831 iter = reachable_regs.begin_mutable_base_regs ();
1832 iter != reachable_regs.end_mutable_base_regs ();
1833 ++iter)
1834 {
1835 const region *base_reg = (*iter);
1836 unset_dynamic_extents (base_reg);
1837 }
808f4dfe 1838}
ef7827b0 1839
808f4dfe
DM
1840/* Traverse the regions in this model, determining what regions are
1841 reachable from the store and populating *OUT.
ef7827b0 1842
808f4dfe
DM
1843 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
1844 for reachability (for handling return values from functions when
1845 analyzing return of the only function on the stack).
1846
3a66c289
DM
1847 If UNCERTAINTY is non-NULL, treat any svalues that were recorded
1848 within it as being maybe-bound as additional "roots" for reachability.
1849
808f4dfe
DM
1850 Find svalues that haven't leaked. */
1851
1852void
1853region_model::get_reachable_svalues (svalue_set *out,
3a66c289
DM
1854 const svalue *extra_sval,
1855 const uncertainty_t *uncertainty)
808f4dfe 1856{
c710051a 1857 reachable_regions reachable_regs (this);
808f4dfe
DM
1858
1859 /* Add globals and regions that already escaped in previous
1860 unknown calls. */
1861 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1862 &reachable_regs);
1863
1864 if (extra_sval)
1865 reachable_regs.handle_sval (extra_sval);
ef7827b0 1866
3a66c289
DM
1867 if (uncertainty)
1868 for (uncertainty_t::iterator iter
1869 = uncertainty->begin_maybe_bound_svals ();
1870 iter != uncertainty->end_maybe_bound_svals (); ++iter)
1871 reachable_regs.handle_sval (*iter);
1872
808f4dfe
DM
1873 /* Get regions for locals that have explicitly bound values. */
1874 for (store::cluster_map_t::iterator iter = m_store.begin ();
1875 iter != m_store.end (); ++iter)
1876 {
1877 const region *base_reg = (*iter).first;
1878 if (const region *parent = base_reg->get_parent_region ())
1879 if (parent->get_kind () == RK_FRAME)
1880 reachable_regs.add (base_reg, false);
1881 }
1882
1883 /* Populate *OUT based on the values that were reachable. */
1884 for (svalue_set::iterator iter
1885 = reachable_regs.begin_reachable_svals ();
1886 iter != reachable_regs.end_reachable_svals (); ++iter)
1887 out->add (*iter);
757bf1df
DM
1888}
1889
1890/* Update this model for the RETURN_STMT, using CTXT to report any
1891 diagnostics. */
1892
1893void
1894region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
1895{
1896 tree callee = get_current_function ()->decl;
1897 tree lhs = DECL_RESULT (callee);
1898 tree rhs = gimple_return_retval (return_stmt);
1899
1900 if (lhs && rhs)
13ad6d9f
DM
1901 {
1902 const svalue *sval = get_rvalue (rhs, ctxt);
1903 const region *ret_reg = get_lvalue (lhs, ctxt);
1904 set_value (ret_reg, sval, ctxt);
1905 }
757bf1df
DM
1906}
1907
342e14ff
DM
1908/* Update this model for a call and return of setjmp/sigsetjmp at CALL within
1909 ENODE, using CTXT to report any diagnostics.
757bf1df 1910
342e14ff
DM
1911 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
1912 0), as opposed to any second return due to longjmp/sigsetjmp. */
757bf1df
DM
1913
1914void
1915region_model::on_setjmp (const gcall *call, const exploded_node *enode,
1916 region_model_context *ctxt)
1917{
808f4dfe
DM
1918 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
1919 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
1920 ctxt);
757bf1df 1921
808f4dfe
DM
1922 /* Create a setjmp_svalue for this call and store it in BUF_REG's
1923 region. */
1924 if (buf_reg)
757bf1df 1925 {
fd9982bb 1926 setjmp_record r (enode, call);
808f4dfe
DM
1927 const svalue *sval
1928 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
1929 set_value (buf_reg, sval, ctxt);
757bf1df
DM
1930 }
1931
1932 /* Direct calls to setjmp return 0. */
1933 if (tree lhs = gimple_call_lhs (call))
1934 {
1aff29d4
DM
1935 const svalue *new_sval
1936 = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
808f4dfe
DM
1937 const region *lhs_reg = get_lvalue (lhs, ctxt);
1938 set_value (lhs_reg, new_sval, ctxt);
757bf1df
DM
1939 }
1940}
1941
1942/* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
1943 to a "setjmp" at SETJMP_CALL where the final stack depth should be
808f4dfe
DM
1944 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
1945 done, and should be done by the caller. */
757bf1df
DM
1946
1947void
1948region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
808f4dfe 1949 int setjmp_stack_depth, region_model_context *ctxt)
757bf1df
DM
1950{
1951 /* Evaluate the val, using the frame of the "longjmp". */
1952 tree fake_retval = gimple_call_arg (longjmp_call, 1);
808f4dfe 1953 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
757bf1df
DM
1954
1955 /* Pop any frames until we reach the stack depth of the function where
1956 setjmp was called. */
1957 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
1958 while (get_stack_depth () > setjmp_stack_depth)
808f4dfe 1959 pop_frame (NULL, NULL, ctxt);
757bf1df
DM
1960
1961 gcc_assert (get_stack_depth () == setjmp_stack_depth);
1962
1963 /* Assign to LHS of "setjmp" in new_state. */
1964 if (tree lhs = gimple_call_lhs (setjmp_call))
1965 {
1966 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
1aff29d4
DM
1967 const svalue *zero_sval
1968 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
808f4dfe 1969 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
757bf1df
DM
1970 /* If we have 0, use 1. */
1971 if (eq_zero.is_true ())
1972 {
808f4dfe 1973 const svalue *one_sval
1aff29d4 1974 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
808f4dfe 1975 fake_retval_sval = one_sval;
757bf1df
DM
1976 }
1977 else
1978 {
1979 /* Otherwise note that the value is nonzero. */
808f4dfe 1980 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
757bf1df
DM
1981 }
1982
808f4dfe
DM
1983 /* Decorate the return value from setjmp as being unmergeable,
1984 so that we don't attempt to merge states with it as zero
1985 with states in which it's nonzero, leading to a clean distinction
1986 in the exploded_graph betweeen the first return and the second
1987 return. */
1988 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
757bf1df 1989
808f4dfe
DM
1990 const region *lhs_reg = get_lvalue (lhs, ctxt);
1991 set_value (lhs_reg, fake_retval_sval, ctxt);
1992 }
757bf1df
DM
1993}
1994
1995/* Update this region_model for a phi stmt of the form
1996 LHS = PHI <...RHS...>.
e0a7a675
DM
1997 where RHS is for the appropriate edge.
1998 Get state from OLD_STATE so that all of the phi stmts for a basic block
1999 are effectively handled simultaneously. */
757bf1df
DM
2000
2001void
8525d1f5 2002region_model::handle_phi (const gphi *phi,
808f4dfe 2003 tree lhs, tree rhs,
e0a7a675 2004 const region_model &old_state,
757bf1df
DM
2005 region_model_context *ctxt)
2006{
2007 /* For now, don't bother tracking the .MEM SSA names. */
2008 if (tree var = SSA_NAME_VAR (lhs))
2009 if (TREE_CODE (var) == VAR_DECL)
2010 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
2011 return;
2012
e0a7a675
DM
2013 const svalue *src_sval = old_state.get_rvalue (rhs, ctxt);
2014 const region *dst_reg = old_state.get_lvalue (lhs, ctxt);
757bf1df 2015
e0a7a675 2016 set_value (dst_reg, src_sval, ctxt);
8525d1f5
DM
2017
2018 if (ctxt)
2019 ctxt->on_phi (phi, rhs);
757bf1df
DM
2020}
2021
2022/* Implementation of region_model::get_lvalue; the latter adds type-checking.
2023
2024 Get the id of the region for PV within this region_model,
2025 emitting any diagnostics to CTXT. */
2026
808f4dfe 2027const region *
53cb324c 2028region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2029{
2030 tree expr = pv.m_tree;
2031
2032 gcc_assert (expr);
2033
2034 switch (TREE_CODE (expr))
2035 {
2036 default:
808f4dfe
DM
2037 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
2038 dump_location_t ());
757bf1df
DM
2039
2040 case ARRAY_REF:
2041 {
2042 tree array = TREE_OPERAND (expr, 0);
2043 tree index = TREE_OPERAND (expr, 1);
757bf1df 2044
808f4dfe
DM
2045 const region *array_reg = get_lvalue (array, ctxt);
2046 const svalue *index_sval = get_rvalue (index, ctxt);
2047 return m_mgr->get_element_region (array_reg,
2048 TREE_TYPE (TREE_TYPE (array)),
2049 index_sval);
757bf1df
DM
2050 }
2051 break;
2052
93e759fc
DM
2053 case BIT_FIELD_REF:
2054 {
2055 tree inner_expr = TREE_OPERAND (expr, 0);
2056 const region *inner_reg = get_lvalue (inner_expr, ctxt);
2057 tree num_bits = TREE_OPERAND (expr, 1);
2058 tree first_bit_offset = TREE_OPERAND (expr, 2);
2059 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2060 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2061 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2062 TREE_INT_CST_LOW (num_bits));
2063 return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits);
2064 }
2065 break;
2066
757bf1df
DM
2067 case MEM_REF:
2068 {
2069 tree ptr = TREE_OPERAND (expr, 0);
2070 tree offset = TREE_OPERAND (expr, 1);
808f4dfe
DM
2071 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2072 const svalue *offset_sval = get_rvalue (offset, ctxt);
2073 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
2074 return m_mgr->get_offset_region (star_ptr,
2075 TREE_TYPE (expr),
2076 offset_sval);
757bf1df
DM
2077 }
2078 break;
2079
808f4dfe
DM
2080 case FUNCTION_DECL:
2081 return m_mgr->get_region_for_fndecl (expr);
2082
2083 case LABEL_DECL:
2084 return m_mgr->get_region_for_label (expr);
2085
757bf1df
DM
2086 case VAR_DECL:
2087 /* Handle globals. */
2088 if (is_global_var (expr))
808f4dfe 2089 return m_mgr->get_region_for_global (expr);
757bf1df
DM
2090
2091 /* Fall through. */
2092
2093 case SSA_NAME:
2094 case PARM_DECL:
2095 case RESULT_DECL:
2096 {
2097 gcc_assert (TREE_CODE (expr) == SSA_NAME
2098 || TREE_CODE (expr) == PARM_DECL
2099 || TREE_CODE (expr) == VAR_DECL
2100 || TREE_CODE (expr) == RESULT_DECL);
2101
808f4dfe
DM
2102 int stack_index = pv.m_stack_depth;
2103 const frame_region *frame = get_frame_at_index (stack_index);
757bf1df 2104 gcc_assert (frame);
4cebae09 2105 return frame->get_region_for_local (m_mgr, expr, ctxt);
757bf1df
DM
2106 }
2107
2108 case COMPONENT_REF:
2109 {
2110 /* obj.field */
2111 tree obj = TREE_OPERAND (expr, 0);
2112 tree field = TREE_OPERAND (expr, 1);
808f4dfe
DM
2113 const region *obj_reg = get_lvalue (obj, ctxt);
2114 return m_mgr->get_field_region (obj_reg, field);
41a9e940
DM
2115 }
2116 break;
2117
757bf1df 2118 case STRING_CST:
808f4dfe 2119 return m_mgr->get_region_for_string (expr);
757bf1df
DM
2120 }
2121}
2122
2123/* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
2124
09bea584
DM
2125static void
2126assert_compat_types (tree src_type, tree dst_type)
2127{
2128 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
808f4dfe
DM
2129 {
2130#if CHECKING_P
2131 if (!(useless_type_conversion_p (src_type, dst_type)))
2132 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
2133#endif
2134 }
09bea584 2135}
757bf1df 2136
ea4e3218
DM
2137/* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
2138
e66b9f67 2139bool
ea4e3218
DM
2140compat_types_p (tree src_type, tree dst_type)
2141{
2142 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2143 if (!(useless_type_conversion_p (src_type, dst_type)))
2144 return false;
2145 return true;
2146}
2147
808f4dfe 2148/* Get the region for PV within this region_model,
757bf1df
DM
2149 emitting any diagnostics to CTXT. */
2150
808f4dfe 2151const region *
53cb324c 2152region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2153{
2154 if (pv.m_tree == NULL_TREE)
808f4dfe 2155 return NULL;
757bf1df 2156
808f4dfe
DM
2157 const region *result_reg = get_lvalue_1 (pv, ctxt);
2158 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
2159 return result_reg;
757bf1df
DM
2160}
2161
808f4dfe 2162/* Get the region for EXPR within this region_model (assuming the most
757bf1df
DM
2163 recent stack frame if it's a local). */
2164
808f4dfe 2165const region *
53cb324c 2166region_model::get_lvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
2167{
2168 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2169}
2170
2171/* Implementation of region_model::get_rvalue; the latter adds type-checking.
2172
2173 Get the value of PV within this region_model,
2174 emitting any diagnostics to CTXT. */
2175
808f4dfe 2176const svalue *
53cb324c 2177region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2178{
2179 gcc_assert (pv.m_tree);
2180
2181 switch (TREE_CODE (pv.m_tree))
2182 {
2183 default:
2242b975 2184 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
757bf1df
DM
2185
2186 case ADDR_EXPR:
2187 {
2188 /* "&EXPR". */
2189 tree expr = pv.m_tree;
2190 tree op0 = TREE_OPERAND (expr, 0);
808f4dfe
DM
2191 const region *expr_reg = get_lvalue (op0, ctxt);
2192 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
757bf1df
DM
2193 }
2194 break;
2195
808f4dfe 2196 case BIT_FIELD_REF:
d3b1ef7a
DM
2197 {
2198 tree expr = pv.m_tree;
2199 tree op0 = TREE_OPERAND (expr, 0);
2200 const region *reg = get_lvalue (op0, ctxt);
2201 tree num_bits = TREE_OPERAND (expr, 1);
2202 tree first_bit_offset = TREE_OPERAND (expr, 2);
2203 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2204 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2205 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2206 TREE_INT_CST_LOW (num_bits));
9faf8348 2207 return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
d3b1ef7a 2208 }
808f4dfe
DM
2209
2210 case SSA_NAME:
2211 case VAR_DECL:
2212 case PARM_DECL:
2213 case RESULT_DECL:
757bf1df
DM
2214 case ARRAY_REF:
2215 {
da7c2773 2216 const region *reg = get_lvalue (pv, ctxt);
9faf8348 2217 return get_store_value (reg, ctxt);
757bf1df
DM
2218 }
2219
808f4dfe
DM
2220 case REALPART_EXPR:
2221 case IMAGPART_EXPR:
2222 case VIEW_CONVERT_EXPR:
2223 {
2224 tree expr = pv.m_tree;
2225 tree arg = TREE_OPERAND (expr, 0);
2226 const svalue *arg_sval = get_rvalue (arg, ctxt);
2227 const svalue *sval_unaryop
2228 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
2229 arg_sval);
2230 return sval_unaryop;
2231 };
2232
757bf1df
DM
2233 case INTEGER_CST:
2234 case REAL_CST:
808f4dfe
DM
2235 case COMPLEX_CST:
2236 case VECTOR_CST:
757bf1df 2237 case STRING_CST:
808f4dfe
DM
2238 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
2239
2240 case POINTER_PLUS_EXPR:
2241 {
2242 tree expr = pv.m_tree;
2243 tree ptr = TREE_OPERAND (expr, 0);
2244 tree offset = TREE_OPERAND (expr, 1);
2245 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2246 const svalue *offset_sval = get_rvalue (offset, ctxt);
2247 const svalue *sval_binop
2248 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
2249 ptr_sval, offset_sval);
2250 return sval_binop;
2251 }
2252
2253 /* Binary ops. */
2254 case PLUS_EXPR:
2255 case MULT_EXPR:
2256 {
2257 tree expr = pv.m_tree;
2258 tree arg0 = TREE_OPERAND (expr, 0);
2259 tree arg1 = TREE_OPERAND (expr, 1);
2260 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
2261 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
2262 const svalue *sval_binop
2263 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
2264 arg0_sval, arg1_sval);
2265 return sval_binop;
2266 }
757bf1df
DM
2267
2268 case COMPONENT_REF:
2269 case MEM_REF:
757bf1df 2270 {
808f4dfe 2271 const region *ref_reg = get_lvalue (pv, ctxt);
9faf8348 2272 return get_store_value (ref_reg, ctxt);
757bf1df 2273 }
1b342485
AS
2274 case OBJ_TYPE_REF:
2275 {
2276 tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
2277 return get_rvalue (expr, ctxt);
2278 }
757bf1df
DM
2279 }
2280}
2281
2282/* Get the value of PV within this region_model,
2283 emitting any diagnostics to CTXT. */
2284
808f4dfe 2285const svalue *
53cb324c 2286region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2287{
2288 if (pv.m_tree == NULL_TREE)
808f4dfe 2289 return NULL;
757bf1df 2290
808f4dfe 2291 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
757bf1df 2292
808f4dfe
DM
2293 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
2294
2fdc8546 2295 result_sval = check_for_poison (result_sval, pv.m_tree, NULL, ctxt);
33255ad3 2296
808f4dfe 2297 return result_sval;
757bf1df
DM
2298}
2299
2300/* Get the value of EXPR within this region_model (assuming the most
2301 recent stack frame if it's a local). */
2302
808f4dfe 2303const svalue *
53cb324c 2304region_model::get_rvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
2305{
2306 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2307}
2308
623bc027
DM
2309/* Return true if this model is on a path with "main" as the entrypoint
2310 (as opposed to one in which we're merely analyzing a subset of the
2311 path through the code). */
2312
2313bool
2314region_model::called_from_main_p () const
2315{
2316 if (!m_current_frame)
2317 return false;
2318 /* Determine if the oldest stack frame in this model is for "main". */
2319 const frame_region *frame0 = get_frame_at_index (0);
2320 gcc_assert (frame0);
2321 return id_equal (DECL_NAME (frame0->get_function ()->decl), "main");
2322}
2323
2324/* Subroutine of region_model::get_store_value for when REG is (or is within)
2325 a global variable that hasn't been touched since the start of this path
2326 (or was implicitly touched due to a call to an unknown function). */
2327
2328const svalue *
2329region_model::get_initial_value_for_global (const region *reg) const
2330{
2331 /* Get the decl that REG is for (or is within). */
2332 const decl_region *base_reg
2333 = reg->get_base_region ()->dyn_cast_decl_region ();
2334 gcc_assert (base_reg);
2335 tree decl = base_reg->get_decl ();
2336
2337 /* Special-case: to avoid having to explicitly update all previously
2338 untracked globals when calling an unknown fn, they implicitly have
2339 an unknown value if an unknown call has occurred, unless this is
2340 static to-this-TU and hasn't escaped. Globals that have escaped
2341 are explicitly tracked, so we shouldn't hit this case for them. */
af66094d
DM
2342 if (m_store.called_unknown_fn_p ()
2343 && TREE_PUBLIC (decl)
2344 && !TREE_READONLY (decl))
623bc027
DM
2345 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2346
2347 /* If we are on a path from the entrypoint from "main" and we have a
2348 global decl defined in this TU that hasn't been touched yet, then
2349 the initial value of REG can be taken from the initialization value
2350 of the decl. */
16ad9ae8 2351 if (called_from_main_p () || TREE_READONLY (decl))
623bc027 2352 {
61a43de5
DM
2353 /* Attempt to get the initializer value for base_reg. */
2354 if (const svalue *base_reg_init
2355 = base_reg->get_svalue_for_initializer (m_mgr))
623bc027 2356 {
61a43de5
DM
2357 if (reg == base_reg)
2358 return base_reg_init;
2359 else
623bc027 2360 {
61a43de5
DM
2361 /* Get the value for REG within base_reg_init. */
2362 binding_cluster c (base_reg);
e61ffa20 2363 c.bind (m_mgr->get_store_manager (), base_reg, base_reg_init);
61a43de5
DM
2364 const svalue *sval
2365 = c.get_any_binding (m_mgr->get_store_manager (), reg);
2366 if (sval)
2367 {
2368 if (reg->get_type ())
2369 sval = m_mgr->get_or_create_cast (reg->get_type (),
2370 sval);
2371 return sval;
2372 }
623bc027
DM
2373 }
2374 }
2375 }
2376
2377 /* Otherwise, return INIT_VAL(REG). */
2378 return m_mgr->get_or_create_initial_value (reg);
2379}
2380
808f4dfe 2381/* Get a value for REG, looking it up in the store, or otherwise falling
9faf8348
DM
2382 back to "initial" or "unknown" values.
2383 Use CTXT to report any warnings associated with reading from REG. */
757bf1df 2384
808f4dfe 2385const svalue *
9faf8348
DM
2386region_model::get_store_value (const region *reg,
2387 region_model_context *ctxt) const
757bf1df 2388{
dfe2ef7f
DM
2389 /* Getting the value of an empty region gives an unknown_svalue. */
2390 if (reg->empty_p ())
2391 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2392
9faf8348
DM
2393 check_region_for_read (reg, ctxt);
2394
2867118d
DM
2395 /* Special-case: handle var_decls in the constant pool. */
2396 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
2397 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
2398 return sval;
2399
808f4dfe
DM
2400 const svalue *sval
2401 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
2402 if (sval)
757bf1df 2403 {
808f4dfe
DM
2404 if (reg->get_type ())
2405 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
2406 return sval;
757bf1df 2407 }
757bf1df 2408
808f4dfe
DM
2409 /* Special-case: read at a constant index within a STRING_CST. */
2410 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
2411 if (tree byte_offset_cst
2412 = offset_reg->get_byte_offset ()->maybe_get_constant ())
2413 if (const string_region *str_reg
2414 = reg->get_parent_region ()->dyn_cast_string_region ())
757bf1df 2415 {
808f4dfe
DM
2416 tree string_cst = str_reg->get_string_cst ();
2417 if (const svalue *char_sval
2418 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2419 byte_offset_cst))
2420 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
757bf1df 2421 }
757bf1df 2422
808f4dfe
DM
2423 /* Special-case: read the initial char of a STRING_CST. */
2424 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
2425 if (const string_region *str_reg
2426 = cast_reg->get_original_region ()->dyn_cast_string_region ())
2427 {
2428 tree string_cst = str_reg->get_string_cst ();
2429 tree byte_offset_cst = build_int_cst (integer_type_node, 0);
2430 if (const svalue *char_sval
2431 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2432 byte_offset_cst))
2433 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2434 }
757bf1df 2435
808f4dfe
DM
2436 /* Otherwise we implicitly have the initial value of the region
2437 (if the cluster had been touched, binding_cluster::get_any_binding,
2438 would have returned UNKNOWN, and we would already have returned
2439 that above). */
757bf1df 2440
623bc027
DM
2441 /* Handle globals. */
2442 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
2443 == RK_GLOBALS)
2444 return get_initial_value_for_global (reg);
757bf1df 2445
808f4dfe 2446 return m_mgr->get_or_create_initial_value (reg);
757bf1df
DM
2447}
2448
808f4dfe
DM
2449/* Return false if REG does not exist, true if it may do.
2450 This is for detecting regions within the stack that don't exist anymore
2451 after frames are popped. */
757bf1df 2452
808f4dfe
DM
2453bool
2454region_model::region_exists_p (const region *reg) const
757bf1df 2455{
808f4dfe
DM
2456 /* If within a stack frame, check that the stack frame is live. */
2457 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
757bf1df 2458 {
808f4dfe
DM
2459 /* Check that the current frame is the enclosing frame, or is called
2460 by it. */
2461 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
2462 iter_frame = iter_frame->get_calling_frame ())
2463 if (iter_frame == enclosing_frame)
2464 return true;
2465 return false;
757bf1df 2466 }
808f4dfe
DM
2467
2468 return true;
757bf1df
DM
2469}
2470
808f4dfe
DM
2471/* Get a region for referencing PTR_SVAL, creating a region if need be, and
2472 potentially generating warnings via CTXT.
35e3f082 2473 PTR_SVAL must be of pointer type.
808f4dfe 2474 PTR_TREE if non-NULL can be used when emitting diagnostics. */
757bf1df 2475
808f4dfe
DM
2476const region *
2477region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
53cb324c 2478 region_model_context *ctxt) const
757bf1df 2479{
808f4dfe 2480 gcc_assert (ptr_sval);
35e3f082 2481 gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
757bf1df 2482
49bfbf18
DM
2483 /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
2484 as a constraint. This suppresses false positives from
2485 -Wanalyzer-null-dereference for the case where we later have an
2486 if (PTR_SVAL) that would occur if we considered the false branch
2487 and transitioned the malloc state machine from start->null. */
2488 tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
2489 const svalue *null_ptr = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
2490 m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
2491
808f4dfe 2492 switch (ptr_sval->get_kind ())
757bf1df 2493 {
808f4dfe 2494 default:
23ebfda0 2495 break;
808f4dfe 2496
757bf1df
DM
2497 case SK_REGION:
2498 {
808f4dfe
DM
2499 const region_svalue *region_sval
2500 = as_a <const region_svalue *> (ptr_sval);
757bf1df
DM
2501 return region_sval->get_pointee ();
2502 }
2503
808f4dfe
DM
2504 case SK_BINOP:
2505 {
2506 const binop_svalue *binop_sval
2507 = as_a <const binop_svalue *> (ptr_sval);
2508 switch (binop_sval->get_op ())
2509 {
2510 case POINTER_PLUS_EXPR:
2511 {
2512 /* If we have a symbolic value expressing pointer arithmentic,
2513 try to convert it to a suitable region. */
2514 const region *parent_region
2515 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
2516 const svalue *offset = binop_sval->get_arg1 ();
2517 tree type= TREE_TYPE (ptr_sval->get_type ());
2518 return m_mgr->get_offset_region (parent_region, type, offset);
2519 }
2520 default:
23ebfda0 2521 break;
808f4dfe
DM
2522 }
2523 }
23ebfda0 2524 break;
757bf1df
DM
2525
2526 case SK_POISONED:
2527 {
2528 if (ctxt)
808f4dfe
DM
2529 {
2530 tree ptr = get_representative_tree (ptr_sval);
2531 /* If we can't get a representative tree for PTR_SVAL
2532 (e.g. if it hasn't been bound into the store), then
2533 fall back on PTR_TREE, if non-NULL. */
2534 if (!ptr)
2535 ptr = ptr_tree;
2536 if (ptr)
2537 {
2538 const poisoned_svalue *poisoned_sval
2539 = as_a <const poisoned_svalue *> (ptr_sval);
2540 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
6341f14e 2541 ctxt->warn (make_unique<poisoned_value_diagnostic>
b03a10b0 2542 (ptr, pkind, NULL, NULL));
808f4dfe
DM
2543 }
2544 }
757bf1df 2545 }
23ebfda0 2546 break;
757bf1df
DM
2547 }
2548
23ebfda0 2549 return m_mgr->get_symbolic_region (ptr_sval);
757bf1df
DM
2550}
2551
d3b1ef7a
DM
2552/* Attempt to get BITS within any value of REG, as TYPE.
2553 In particular, extract values from compound_svalues for the case
2554 where there's a concrete binding at BITS.
9faf8348
DM
2555 Return an unknown svalue if we can't handle the given case.
2556 Use CTXT to report any warnings associated with reading from REG. */
d3b1ef7a
DM
2557
2558const svalue *
2559region_model::get_rvalue_for_bits (tree type,
2560 const region *reg,
9faf8348
DM
2561 const bit_range &bits,
2562 region_model_context *ctxt) const
d3b1ef7a 2563{
9faf8348 2564 const svalue *sval = get_store_value (reg, ctxt);
e61ffa20 2565 return m_mgr->get_or_create_bits_within (type, bits, sval);
d3b1ef7a
DM
2566}
2567
3175d40f
DM
2568/* A subclass of pending_diagnostic for complaining about writes to
2569 constant regions of memory. */
2570
2571class write_to_const_diagnostic
2572: public pending_diagnostic_subclass<write_to_const_diagnostic>
2573{
2574public:
2575 write_to_const_diagnostic (const region *reg, tree decl)
2576 : m_reg (reg), m_decl (decl)
2577 {}
2578
ff171cb1 2579 const char *get_kind () const final override
3175d40f
DM
2580 {
2581 return "write_to_const_diagnostic";
2582 }
2583
2584 bool operator== (const write_to_const_diagnostic &other) const
2585 {
2586 return (m_reg == other.m_reg
2587 && m_decl == other.m_decl);
2588 }
2589
ff171cb1 2590 int get_controlling_option () const final override
7fd6e36e
DM
2591 {
2592 return OPT_Wanalyzer_write_to_const;
2593 }
2594
ff171cb1 2595 bool emit (rich_location *rich_loc) final override
3175d40f 2596 {
111fd515
DM
2597 auto_diagnostic_group d;
2598 bool warned;
2599 switch (m_reg->get_kind ())
2600 {
2601 default:
7fd6e36e 2602 warned = warning_at (rich_loc, get_controlling_option (),
111fd515
DM
2603 "write to %<const%> object %qE", m_decl);
2604 break;
2605 case RK_FUNCTION:
7fd6e36e 2606 warned = warning_at (rich_loc, get_controlling_option (),
111fd515
DM
2607 "write to function %qE", m_decl);
2608 break;
2609 case RK_LABEL:
7fd6e36e 2610 warned = warning_at (rich_loc, get_controlling_option (),
111fd515
DM
2611 "write to label %qE", m_decl);
2612 break;
2613 }
3175d40f
DM
2614 if (warned)
2615 inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
2616 return warned;
2617 }
2618
ff171cb1 2619 label_text describe_final_event (const evdesc::final_event &ev) final override
3175d40f 2620 {
111fd515
DM
2621 switch (m_reg->get_kind ())
2622 {
2623 default:
2624 return ev.formatted_print ("write to %<const%> object %qE here", m_decl);
2625 case RK_FUNCTION:
2626 return ev.formatted_print ("write to function %qE here", m_decl);
2627 case RK_LABEL:
2628 return ev.formatted_print ("write to label %qE here", m_decl);
2629 }
3175d40f
DM
2630 }
2631
2632private:
2633 const region *m_reg;
2634 tree m_decl;
2635};
2636
2637/* A subclass of pending_diagnostic for complaining about writes to
2638 string literals. */
2639
2640class write_to_string_literal_diagnostic
2641: public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
2642{
2643public:
2644 write_to_string_literal_diagnostic (const region *reg)
2645 : m_reg (reg)
2646 {}
2647
ff171cb1 2648 const char *get_kind () const final override
3175d40f
DM
2649 {
2650 return "write_to_string_literal_diagnostic";
2651 }
2652
2653 bool operator== (const write_to_string_literal_diagnostic &other) const
2654 {
2655 return m_reg == other.m_reg;
2656 }
2657
ff171cb1 2658 int get_controlling_option () const final override
7fd6e36e
DM
2659 {
2660 return OPT_Wanalyzer_write_to_string_literal;
2661 }
2662
ff171cb1 2663 bool emit (rich_location *rich_loc) final override
3175d40f 2664 {
7fd6e36e 2665 return warning_at (rich_loc, get_controlling_option (),
3175d40f
DM
2666 "write to string literal");
2667 /* Ideally we would show the location of the STRING_CST as well,
2668 but it is not available at this point. */
2669 }
2670
ff171cb1 2671 label_text describe_final_event (const evdesc::final_event &ev) final override
3175d40f
DM
2672 {
2673 return ev.formatted_print ("write to string literal here");
2674 }
2675
2676private:
2677 const region *m_reg;
2678};
2679
2680/* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
2681
2682void
2683region_model::check_for_writable_region (const region* dest_reg,
2684 region_model_context *ctxt) const
2685{
2686 /* Fail gracefully if CTXT is NULL. */
2687 if (!ctxt)
2688 return;
2689
2690 const region *base_reg = dest_reg->get_base_region ();
2691 switch (base_reg->get_kind ())
2692 {
2693 default:
2694 break;
111fd515
DM
2695 case RK_FUNCTION:
2696 {
2697 const function_region *func_reg = as_a <const function_region *> (base_reg);
2698 tree fndecl = func_reg->get_fndecl ();
6341f14e
DM
2699 ctxt->warn (make_unique<write_to_const_diagnostic>
2700 (func_reg, fndecl));
111fd515
DM
2701 }
2702 break;
2703 case RK_LABEL:
2704 {
2705 const label_region *label_reg = as_a <const label_region *> (base_reg);
2706 tree label = label_reg->get_label ();
6341f14e
DM
2707 ctxt->warn (make_unique<write_to_const_diagnostic>
2708 (label_reg, label));
111fd515
DM
2709 }
2710 break;
3175d40f
DM
2711 case RK_DECL:
2712 {
2713 const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
2714 tree decl = decl_reg->get_decl ();
2715 /* Warn about writes to const globals.
2716 Don't warn for writes to const locals, and params in particular,
2717 since we would warn in push_frame when setting them up (e.g the
2718 "this" param is "T* const"). */
2719 if (TREE_READONLY (decl)
2720 && is_global_var (decl))
6341f14e 2721 ctxt->warn (make_unique<write_to_const_diagnostic> (dest_reg, decl));
3175d40f
DM
2722 }
2723 break;
2724 case RK_STRING:
6341f14e 2725 ctxt->warn (make_unique<write_to_string_literal_diagnostic> (dest_reg));
3175d40f
DM
2726 break;
2727 }
2728}
2729
9a2c9579
DM
2730/* Get the capacity of REG in bytes. */
2731
2732const svalue *
2733region_model::get_capacity (const region *reg) const
2734{
2735 switch (reg->get_kind ())
2736 {
2737 default:
2738 break;
2739 case RK_DECL:
2740 {
2741 const decl_region *decl_reg = as_a <const decl_region *> (reg);
2742 tree decl = decl_reg->get_decl ();
2743 if (TREE_CODE (decl) == SSA_NAME)
2744 {
2745 tree type = TREE_TYPE (decl);
2746 tree size = TYPE_SIZE (type);
2747 return get_rvalue (size, NULL);
2748 }
2749 else
2750 {
2751 tree size = decl_init_size (decl, false);
2752 if (size)
2753 return get_rvalue (size, NULL);
2754 }
2755 }
2756 break;
e61ffa20
DM
2757 case RK_SIZED:
2758 /* Look through sized regions to get at the capacity
2759 of the underlying regions. */
2760 return get_capacity (reg->get_parent_region ());
9a2c9579
DM
2761 }
2762
2763 if (const svalue *recorded = get_dynamic_extents (reg))
2764 return recorded;
2765
2766 return m_mgr->get_or_create_unknown_svalue (sizetype);
2767}
2768
0a9c0d4a
TL
2769/* Return the string size, including the 0-terminator, if SVAL is a
2770 constant_svalue holding a string. Otherwise, return an unknown_svalue. */
2771
2772const svalue *
2773region_model::get_string_size (const svalue *sval) const
2774{
2775 tree cst = sval->maybe_get_constant ();
2776 if (!cst || TREE_CODE (cst) != STRING_CST)
2777 return m_mgr->get_or_create_unknown_svalue (size_type_node);
2778
2779 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
2780 return m_mgr->get_or_create_constant_svalue (out);
2781}
2782
2783/* Return the string size, including the 0-terminator, if REG is a
2784 string_region. Otherwise, return an unknown_svalue. */
2785
2786const svalue *
2787region_model::get_string_size (const region *reg) const
2788{
2789 const string_region *str_reg = dyn_cast <const string_region *> (reg);
2790 if (!str_reg)
2791 return m_mgr->get_or_create_unknown_svalue (size_type_node);
2792
2793 tree cst = str_reg->get_string_cst ();
2794 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
2795 return m_mgr->get_or_create_constant_svalue (out);
2796}
2797
9faf8348
DM
2798/* If CTXT is non-NULL, use it to warn about any problems accessing REG,
2799 using DIR to determine if this access is a read or write. */
2800
2801void
2802region_model::check_region_access (const region *reg,
2803 enum access_direction dir,
2804 region_model_context *ctxt) const
2805{
2806 /* Fail gracefully if CTXT is NULL. */
2807 if (!ctxt)
2808 return;
2809
b9365b93 2810 check_region_for_taint (reg, dir, ctxt);
7e3b45be 2811 check_region_bounds (reg, dir, ctxt);
b9365b93 2812
9faf8348
DM
2813 switch (dir)
2814 {
2815 default:
2816 gcc_unreachable ();
2817 case DIR_READ:
2818 /* Currently a no-op. */
2819 break;
2820 case DIR_WRITE:
2821 check_for_writable_region (reg, ctxt);
2822 break;
2823 }
2824}
2825
2826/* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
2827
2828void
2829region_model::check_region_for_write (const region *dest_reg,
2830 region_model_context *ctxt) const
2831{
2832 check_region_access (dest_reg, DIR_WRITE, ctxt);
2833}
2834
2835/* If CTXT is non-NULL, use it to warn about any problems reading from REG. */
2836
2837void
2838region_model::check_region_for_read (const region *src_reg,
2839 region_model_context *ctxt) const
2840{
2841 check_region_access (src_reg, DIR_READ, ctxt);
2842}
2843
e6c3bb37
TL
2844/* Concrete subclass for casts of pointers that lead to trailing bytes. */
2845
2846class dubious_allocation_size
2847: public pending_diagnostic_subclass<dubious_allocation_size>
2848{
2849public:
2850 dubious_allocation_size (const region *lhs, const region *rhs)
f5758fe5
DM
2851 : m_lhs (lhs), m_rhs (rhs), m_expr (NULL_TREE),
2852 m_has_allocation_event (false)
e6c3bb37
TL
2853 {}
2854
2855 dubious_allocation_size (const region *lhs, const region *rhs,
2856 tree expr)
f5758fe5
DM
2857 : m_lhs (lhs), m_rhs (rhs), m_expr (expr),
2858 m_has_allocation_event (false)
e6c3bb37
TL
2859 {}
2860
2861 const char *get_kind () const final override
2862 {
2863 return "dubious_allocation_size";
2864 }
2865
2866 bool operator== (const dubious_allocation_size &other) const
2867 {
2868 return m_lhs == other.m_lhs && m_rhs == other.m_rhs
2869 && pending_diagnostic::same_tree_p (m_expr, other.m_expr);
2870 }
2871
2872 int get_controlling_option () const final override
2873 {
2874 return OPT_Wanalyzer_allocation_size;
2875 }
2876
2877 bool emit (rich_location *rich_loc) final override
2878 {
2879 diagnostic_metadata m;
2880 m.add_cwe (131);
2881
2882 return warning_meta (rich_loc, m, get_controlling_option (),
c83e9731
TL
2883 "allocated buffer size is not a multiple"
2884 " of the pointee's size");
e6c3bb37
TL
2885 }
2886
e6c3bb37
TL
2887 label_text describe_final_event (const evdesc::final_event &ev) final
2888 override
2889 {
2890 tree pointee_type = TREE_TYPE (m_lhs->get_type ());
f5758fe5 2891 if (m_has_allocation_event)
e6c3bb37
TL
2892 return ev.formatted_print ("assigned to %qT here;"
2893 " %<sizeof (%T)%> is %qE",
2894 m_lhs->get_type (), pointee_type,
2895 size_in_bytes (pointee_type));
f5758fe5
DM
2896 /* Fallback: Typically, we should always see an allocation_event
2897 before. */
e6c3bb37
TL
2898 if (m_expr)
2899 {
2900 if (TREE_CODE (m_expr) == INTEGER_CST)
2901 return ev.formatted_print ("allocated %E bytes and assigned to"
2902 " %qT here; %<sizeof (%T)%> is %qE",
2903 m_expr, m_lhs->get_type (), pointee_type,
2904 size_in_bytes (pointee_type));
2905 else
2906 return ev.formatted_print ("allocated %qE bytes and assigned to"
2907 " %qT here; %<sizeof (%T)%> is %qE",
2908 m_expr, m_lhs->get_type (), pointee_type,
2909 size_in_bytes (pointee_type));
2910 }
2911
2912 return ev.formatted_print ("allocated and assigned to %qT here;"
2913 " %<sizeof (%T)%> is %qE",
2914 m_lhs->get_type (), pointee_type,
2915 size_in_bytes (pointee_type));
2916 }
2917
f5758fe5
DM
2918 void
2919 add_region_creation_events (const region *,
2920 tree capacity,
e24fe128 2921 const event_loc_info &loc_info,
f5758fe5
DM
2922 checker_path &emission_path) final override
2923 {
2924 emission_path.add_event
e24fe128 2925 (make_unique<region_creation_event_allocation_size> (capacity, loc_info));
f5758fe5
DM
2926
2927 m_has_allocation_event = true;
2928 }
2929
e6c3bb37
TL
2930 void mark_interesting_stuff (interesting_t *interest) final override
2931 {
2932 interest->add_region_creation (m_rhs);
2933 }
2934
2935private:
2936 const region *m_lhs;
2937 const region *m_rhs;
2938 const tree m_expr;
f5758fe5 2939 bool m_has_allocation_event;
e6c3bb37
TL
2940};
2941
2942/* Return true on dubious allocation sizes for constant sizes. */
2943
2944static bool
2945capacity_compatible_with_type (tree cst, tree pointee_size_tree,
2946 bool is_struct)
2947{
2948 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
2949 gcc_assert (TREE_CODE (pointee_size_tree) == INTEGER_CST);
2950
2951 unsigned HOST_WIDE_INT pointee_size = TREE_INT_CST_LOW (pointee_size_tree);
2952 unsigned HOST_WIDE_INT alloc_size = TREE_INT_CST_LOW (cst);
2953
2954 if (is_struct)
b4cc945c 2955 return alloc_size == 0 || alloc_size >= pointee_size;
e6c3bb37
TL
2956 return alloc_size % pointee_size == 0;
2957}
2958
2959static bool
2960capacity_compatible_with_type (tree cst, tree pointee_size_tree)
2961{
2962 return capacity_compatible_with_type (cst, pointee_size_tree, false);
2963}
2964
2965/* Checks whether SVAL could be a multiple of SIZE_CST.
2966
2967 It works by visiting all svalues inside SVAL until it reaches
2968 atomic nodes. From those, it goes back up again and adds each
2969 node that might be a multiple of SIZE_CST to the RESULT_SET. */
2970
2971class size_visitor : public visitor
2972{
2973public:
c83e9731
TL
2974 size_visitor (tree size_cst, const svalue *root_sval, constraint_manager *cm)
2975 : m_size_cst (size_cst), m_root_sval (root_sval), m_cm (cm)
e6c3bb37 2976 {
c83e9731 2977 m_root_sval->accept (this);
e6c3bb37
TL
2978 }
2979
2980 bool get_result ()
2981 {
c83e9731 2982 return result_set.contains (m_root_sval);
e6c3bb37
TL
2983 }
2984
2985 void visit_constant_svalue (const constant_svalue *sval) final override
2986 {
c83e9731 2987 check_constant (sval->get_constant (), sval);
e6c3bb37
TL
2988 }
2989
2990 void visit_unknown_svalue (const unknown_svalue *sval ATTRIBUTE_UNUSED)
2991 final override
2992 {
2993 result_set.add (sval);
2994 }
2995
2996 void visit_poisoned_svalue (const poisoned_svalue *sval ATTRIBUTE_UNUSED)
2997 final override
2998 {
2999 result_set.add (sval);
3000 }
3001
bdd385b2 3002 void visit_unaryop_svalue (const unaryop_svalue *sval) final override
e6c3bb37
TL
3003 {
3004 const svalue *arg = sval->get_arg ();
3005 if (result_set.contains (arg))
3006 result_set.add (sval);
3007 }
3008
3009 void visit_binop_svalue (const binop_svalue *sval) final override
3010 {
3011 const svalue *arg0 = sval->get_arg0 ();
3012 const svalue *arg1 = sval->get_arg1 ();
3013
3014 if (sval->get_op () == MULT_EXPR)
3015 {
3016 if (result_set.contains (arg0) || result_set.contains (arg1))
3017 result_set.add (sval);
3018 }
3019 else
3020 {
3021 if (result_set.contains (arg0) && result_set.contains (arg1))
3022 result_set.add (sval);
3023 }
3024 }
3025
bdd385b2 3026 void visit_repeated_svalue (const repeated_svalue *sval) final override
e6c3bb37
TL
3027 {
3028 sval->get_inner_svalue ()->accept (this);
3029 if (result_set.contains (sval->get_inner_svalue ()))
3030 result_set.add (sval);
3031 }
3032
3033 void visit_unmergeable_svalue (const unmergeable_svalue *sval) final override
3034 {
3035 sval->get_arg ()->accept (this);
3036 if (result_set.contains (sval->get_arg ()))
3037 result_set.add (sval);
3038 }
3039
3040 void visit_widening_svalue (const widening_svalue *sval) final override
3041 {
3042 const svalue *base = sval->get_base_svalue ();
3043 const svalue *iter = sval->get_iter_svalue ();
3044
3045 if (result_set.contains (base) && result_set.contains (iter))
3046 result_set.add (sval);
3047 }
3048
3049 void visit_conjured_svalue (const conjured_svalue *sval ATTRIBUTE_UNUSED)
3050 final override
3051 {
3052 equiv_class_id id (-1);
3053 if (m_cm->get_equiv_class_by_svalue (sval, &id))
3054 {
c83e9731
TL
3055 if (tree cst = id.get_obj (*m_cm).get_any_constant ())
3056 check_constant (cst, sval);
e6c3bb37 3057 else
c83e9731 3058 result_set.add (sval);
e6c3bb37
TL
3059 }
3060 }
3061
3062 void visit_asm_output_svalue (const asm_output_svalue *sval ATTRIBUTE_UNUSED)
3063 final override
3064 {
3065 result_set.add (sval);
3066 }
3067
3068 void visit_const_fn_result_svalue (const const_fn_result_svalue
3069 *sval ATTRIBUTE_UNUSED) final override
3070 {
3071 result_set.add (sval);
3072 }
3073
3074private:
c83e9731
TL
3075 void check_constant (tree cst, const svalue *sval)
3076 {
3077 switch (TREE_CODE (cst))
3078 {
3079 default:
3080 /* Assume all unhandled operands are compatible. */
3081 result_set.add (sval);
3082 break;
3083 case INTEGER_CST:
3084 if (capacity_compatible_with_type (cst, m_size_cst))
3085 result_set.add (sval);
3086 break;
3087 }
3088 }
3089
e6c3bb37 3090 tree m_size_cst;
c83e9731 3091 const svalue *m_root_sval;
e6c3bb37
TL
3092 constraint_manager *m_cm;
3093 svalue_set result_set; /* Used as a mapping of svalue*->bool. */
3094};
3095
3096/* Return true if a struct or union either uses the inheritance pattern,
3097 where the first field is a base struct, or the flexible array member
3098 pattern, where the last field is an array without a specified size. */
3099
3100static bool
3101struct_or_union_with_inheritance_p (tree struc)
3102{
3103 tree iter = TYPE_FIELDS (struc);
3104 if (iter == NULL_TREE)
3105 return false;
3106 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (iter)))
3107 return true;
3108
3109 tree last_field;
3110 while (iter != NULL_TREE)
3111 {
3112 last_field = iter;
3113 iter = DECL_CHAIN (iter);
3114 }
3115
3116 if (last_field != NULL_TREE
3117 && TREE_CODE (TREE_TYPE (last_field)) == ARRAY_TYPE)
3118 return true;
3119
3120 return false;
3121}
3122
3123/* Return true if the lhs and rhs of an assignment have different types. */
3124
3125static bool
3126is_any_cast_p (const gimple *stmt)
3127{
c83e9731 3128 if (const gassign *assign = dyn_cast <const gassign *> (stmt))
e6c3bb37
TL
3129 return gimple_assign_cast_p (assign)
3130 || !pending_diagnostic::same_tree_p (
3131 TREE_TYPE (gimple_assign_lhs (assign)),
3132 TREE_TYPE (gimple_assign_rhs1 (assign)));
c83e9731 3133 else if (const gcall *call = dyn_cast <const gcall *> (stmt))
e6c3bb37
TL
3134 {
3135 tree lhs = gimple_call_lhs (call);
3136 return lhs != NULL_TREE && !pending_diagnostic::same_tree_p (
3137 TREE_TYPE (gimple_call_lhs (call)),
3138 gimple_call_return_type (call));
3139 }
3140
3141 return false;
3142}
3143
3144/* On pointer assignments, check whether the buffer size of
3145 RHS_SVAL is compatible with the type of the LHS_REG.
3146 Use a non-null CTXT to report allocation size warnings. */
3147
3148void
3149region_model::check_region_size (const region *lhs_reg, const svalue *rhs_sval,
3150 region_model_context *ctxt) const
3151{
3152 if (!ctxt || ctxt->get_stmt () == NULL)
3153 return;
3154 /* Only report warnings on assignments that actually change the type. */
3155 if (!is_any_cast_p (ctxt->get_stmt ()))
3156 return;
3157
3158 const region_svalue *reg_sval = dyn_cast <const region_svalue *> (rhs_sval);
3159 if (!reg_sval)
3160 return;
3161
3162 tree pointer_type = lhs_reg->get_type ();
3163 if (pointer_type == NULL_TREE || !POINTER_TYPE_P (pointer_type))
3164 return;
3165
3166 tree pointee_type = TREE_TYPE (pointer_type);
3167 /* Make sure that the type on the left-hand size actually has a size. */
3168 if (pointee_type == NULL_TREE || VOID_TYPE_P (pointee_type)
3169 || TYPE_SIZE_UNIT (pointee_type) == NULL_TREE)
3170 return;
3171
3172 /* Bail out early on pointers to structs where we can
3173 not deduce whether the buffer size is compatible. */
3174 bool is_struct = RECORD_OR_UNION_TYPE_P (pointee_type);
3175 if (is_struct && struct_or_union_with_inheritance_p (pointee_type))
3176 return;
3177
3178 tree pointee_size_tree = size_in_bytes (pointee_type);
3179 /* We give up if the type size is not known at compile-time or the
3180 type size is always compatible regardless of the buffer size. */
3181 if (TREE_CODE (pointee_size_tree) != INTEGER_CST
3182 || integer_zerop (pointee_size_tree)
3183 || integer_onep (pointee_size_tree))
3184 return;
3185
3186 const region *rhs_reg = reg_sval->get_pointee ();
3187 const svalue *capacity = get_capacity (rhs_reg);
3188 switch (capacity->get_kind ())
3189 {
3190 case svalue_kind::SK_CONSTANT:
3191 {
3192 const constant_svalue *cst_cap_sval
c83e9731 3193 = as_a <const constant_svalue *> (capacity);
e6c3bb37 3194 tree cst_cap = cst_cap_sval->get_constant ();
c83e9731
TL
3195 if (TREE_CODE (cst_cap) == INTEGER_CST
3196 && !capacity_compatible_with_type (cst_cap, pointee_size_tree,
3197 is_struct))
6341f14e
DM
3198 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg, rhs_reg,
3199 cst_cap));
e6c3bb37
TL
3200 }
3201 break;
3202 default:
3203 {
3204 if (!is_struct)
3205 {
3206 size_visitor v (pointee_size_tree, capacity, m_constraints);
3207 if (!v.get_result ())
3208 {
3209 tree expr = get_representative_tree (capacity);
6341f14e
DM
3210 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg,
3211 rhs_reg,
3212 expr));
e6c3bb37
TL
3213 }
3214 }
3215 break;
3216 }
3217 }
3218}
3219
808f4dfe 3220/* Set the value of the region given by LHS_REG to the value given
9faf8348
DM
3221 by RHS_SVAL.
3222 Use CTXT to report any warnings associated with writing to LHS_REG. */
757bf1df 3223
808f4dfe
DM
3224void
3225region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
3175d40f 3226 region_model_context *ctxt)
757bf1df 3227{
808f4dfe
DM
3228 gcc_assert (lhs_reg);
3229 gcc_assert (rhs_sval);
3230
dfe2ef7f
DM
3231 /* Setting the value of an empty region is a no-op. */
3232 if (lhs_reg->empty_p ())
3233 return;
3234
e6c3bb37
TL
3235 check_region_size (lhs_reg, rhs_sval, ctxt);
3236
9faf8348 3237 check_region_for_write (lhs_reg, ctxt);
3175d40f 3238
808f4dfe 3239 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 3240 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
3241}
3242
808f4dfe 3243/* Set the value of the region given by LHS to the value given by RHS. */
757bf1df
DM
3244
3245void
808f4dfe 3246region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
757bf1df 3247{
808f4dfe
DM
3248 const region *lhs_reg = get_lvalue (lhs, ctxt);
3249 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
3250 gcc_assert (lhs_reg);
3251 gcc_assert (rhs_sval);
3252 set_value (lhs_reg, rhs_sval, ctxt);
757bf1df
DM
3253}
3254
808f4dfe 3255/* Remove all bindings overlapping REG within the store. */
884d9141
DM
3256
3257void
808f4dfe
DM
3258region_model::clobber_region (const region *reg)
3259{
3260 m_store.clobber_region (m_mgr->get_store_manager(), reg);
3261}
3262
3263/* Remove any bindings for REG within the store. */
3264
3265void
3266region_model::purge_region (const region *reg)
3267{
3268 m_store.purge_region (m_mgr->get_store_manager(), reg);
3269}
3270
e61ffa20
DM
3271/* Fill REG with SVAL. */
3272
3273void
3274region_model::fill_region (const region *reg, const svalue *sval)
3275{
3276 m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
3277}
3278
808f4dfe
DM
3279/* Zero-fill REG. */
3280
3281void
3282region_model::zero_fill_region (const region *reg)
3283{
3284 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
3285}
3286
3287/* Mark REG as having unknown content. */
3288
3289void
3a66c289
DM
3290region_model::mark_region_as_unknown (const region *reg,
3291 uncertainty_t *uncertainty)
884d9141 3292{
3a66c289
DM
3293 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
3294 uncertainty);
884d9141
DM
3295}
3296
808f4dfe 3297/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
757bf1df
DM
3298 this model. */
3299
3300tristate
808f4dfe
DM
3301region_model::eval_condition (const svalue *lhs,
3302 enum tree_code op,
3303 const svalue *rhs) const
757bf1df 3304{
757bf1df
DM
3305 gcc_assert (lhs);
3306 gcc_assert (rhs);
3307
808f4dfe
DM
3308 /* For now, make no attempt to capture constraints on floating-point
3309 values. */
3310 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
3311 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
3312 return tristate::unknown ();
3313
9bbcee45
DM
3314 /* See what we know based on the values. */
3315
808f4dfe
DM
3316 /* Unwrap any unmergeable values. */
3317 lhs = lhs->unwrap_any_unmergeable ();
3318 rhs = rhs->unwrap_any_unmergeable ();
3319
3320 if (lhs == rhs)
757bf1df 3321 {
808f4dfe
DM
3322 /* If we have the same svalue, then we have equality
3323 (apart from NaN-handling).
3324 TODO: should this definitely be the case for poisoned values? */
3325 /* Poisoned and unknown values are "unknowable". */
3326 if (lhs->get_kind () == SK_POISONED
3327 || lhs->get_kind () == SK_UNKNOWN)
3328 return tristate::TS_UNKNOWN;
e978955d 3329
808f4dfe 3330 switch (op)
757bf1df 3331 {
808f4dfe
DM
3332 case EQ_EXPR:
3333 case GE_EXPR:
3334 case LE_EXPR:
3335 return tristate::TS_TRUE;
07c86323 3336
808f4dfe
DM
3337 case NE_EXPR:
3338 case GT_EXPR:
3339 case LT_EXPR:
3340 return tristate::TS_FALSE;
3341
3342 default:
3343 /* For other ops, use the logic below. */
3344 break;
757bf1df 3345 }
808f4dfe 3346 }
757bf1df 3347
808f4dfe
DM
3348 /* If we have a pair of region_svalues, compare them. */
3349 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
3350 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
3351 {
3352 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
3353 if (res.is_known ())
3354 return res;
3355 /* Otherwise, only known through constraints. */
3356 }
757bf1df 3357
808f4dfe 3358 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
18faaeb3
DM
3359 {
3360 /* If we have a pair of constants, compare them. */
3361 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
3362 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
3363 else
3364 {
3365 /* When we have one constant, put it on the RHS. */
3366 std::swap (lhs, rhs);
3367 op = swap_tree_comparison (op);
3368 }
3369 }
3370 gcc_assert (lhs->get_kind () != SK_CONSTANT);
757bf1df 3371
e82e0f14
DM
3372 /* Handle comparison against zero. */
3373 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
3374 if (zerop (cst_rhs->get_constant ()))
3375 {
3376 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
3377 {
3378 /* A region_svalue is a non-NULL pointer, except in certain
3379 special cases (see the comment for region::non_null_p). */
3380 const region *pointee = ptr->get_pointee ();
3381 if (pointee->non_null_p ())
3382 {
3383 switch (op)
3384 {
3385 default:
3386 gcc_unreachable ();
3387
3388 case EQ_EXPR:
3389 case GE_EXPR:
3390 case LE_EXPR:
3391 return tristate::TS_FALSE;
3392
3393 case NE_EXPR:
3394 case GT_EXPR:
3395 case LT_EXPR:
3396 return tristate::TS_TRUE;
3397 }
3398 }
3399 }
3400 else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
3401 {
3402 /* Treat offsets from a non-NULL pointer as being non-NULL. This
3403 isn't strictly true, in that eventually ptr++ will wrap
3404 around and be NULL, but it won't occur in practise and thus
3405 can be used to suppress effectively false positives that we
3406 shouldn't warn for. */
3407 if (binop->get_op () == POINTER_PLUS_EXPR)
3408 {
9bbcee45 3409 tristate lhs_ts = eval_condition (binop->get_arg0 (), op, rhs);
e82e0f14
DM
3410 if (lhs_ts.is_known ())
3411 return lhs_ts;
3412 }
3413 }
0b737090
DM
3414 else if (const unaryop_svalue *unaryop
3415 = lhs->dyn_cast_unaryop_svalue ())
3416 {
3417 if (unaryop->get_op () == NEGATE_EXPR)
3418 {
3419 /* e.g. "-X <= 0" is equivalent to X >= 0". */
3420 tristate lhs_ts = eval_condition (unaryop->get_arg (),
3421 swap_tree_comparison (op),
3422 rhs);
3423 if (lhs_ts.is_known ())
3424 return lhs_ts;
3425 }
3426 }
e82e0f14 3427 }
808f4dfe
DM
3428
3429 /* Handle rejection of equality for comparisons of the initial values of
3430 "external" values (such as params) with the address of locals. */
3431 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
3432 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
3433 {
3434 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
3435 if (res.is_known ())
3436 return res;
3437 }
3438 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
3439 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
3440 {
3441 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
3442 if (res.is_known ())
3443 return res;
3444 }
3445
3446 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
3447 if (tree rhs_cst = rhs->maybe_get_constant ())
3448 {
3449 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
3450 if (res.is_known ())
3451 return res;
3452 }
3453
7a6564c9 3454 /* Handle comparisons between two svalues with more than one operand. */
9bbcee45 3455 if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
7a6564c9
TL
3456 {
3457 switch (op)
3458 {
3459 default:
3460 break;
3461 case EQ_EXPR:
3462 {
3463 /* TODO: binops can be equal even if they are not structurally
3464 equal in case of commutative operators. */
3465 tristate res = structural_equality (lhs, rhs);
3466 if (res.is_true ())
3467 return res;
3468 }
3469 break;
3470 case LE_EXPR:
3471 {
3472 tristate res = structural_equality (lhs, rhs);
3473 if (res.is_true ())
3474 return res;
3475 }
3476 break;
3477 case GE_EXPR:
3478 {
3479 tristate res = structural_equality (lhs, rhs);
3480 if (res.is_true ())
3481 return res;
3482 res = symbolic_greater_than (binop, rhs);
3483 if (res.is_true ())
3484 return res;
3485 }
3486 break;
3487 case GT_EXPR:
3488 {
3489 tristate res = symbolic_greater_than (binop, rhs);
3490 if (res.is_true ())
3491 return res;
3492 }
3493 break;
3494 }
3495 }
3496
9bbcee45
DM
3497 /* Otherwise, try constraints.
3498 Cast to const to ensure we don't change the constraint_manager as we
3499 do this (e.g. by creating equivalence classes). */
3500 const constraint_manager *constraints = m_constraints;
3501 return constraints->eval_condition (lhs, op, rhs);
808f4dfe
DM
3502}
3503
9bbcee45 3504/* Subroutine of region_model::eval_condition, for rejecting
808f4dfe
DM
3505 equality of INIT_VAL(PARM) with &LOCAL. */
3506
3507tristate
3508region_model::compare_initial_and_pointer (const initial_svalue *init,
3509 const region_svalue *ptr) const
3510{
3511 const region *pointee = ptr->get_pointee ();
3512
3513 /* If we have a pointer to something within a stack frame, it can't be the
3514 initial value of a param. */
3515 if (pointee->maybe_get_frame_region ())
e0139b2a
DM
3516 if (init->initial_value_of_param_p ())
3517 return tristate::TS_FALSE;
757bf1df
DM
3518
3519 return tristate::TS_UNKNOWN;
3520}
3521
7a6564c9
TL
3522/* Return true if SVAL is definitely positive. */
3523
3524static bool
3525is_positive_svalue (const svalue *sval)
3526{
3527 if (tree cst = sval->maybe_get_constant ())
3528 return !zerop (cst) && get_range_pos_neg (cst) == 1;
3529 tree type = sval->get_type ();
3530 if (!type)
3531 return false;
3532 /* Consider a binary operation size_t + int. The analyzer wraps the int in
3533 an unaryop_svalue, converting it to a size_t, but in the dynamic execution
3534 the result is smaller than the first operand. Thus, we have to look if
3535 the argument of the unaryop_svalue is also positive. */
3536 if (const unaryop_svalue *un_op = dyn_cast <const unaryop_svalue *> (sval))
3537 return CONVERT_EXPR_CODE_P (un_op->get_op ()) && TYPE_UNSIGNED (type)
3538 && is_positive_svalue (un_op->get_arg ());
3539 return TYPE_UNSIGNED (type);
3540}
3541
3542/* Return true if A is definitely larger than B.
3543
3544 Limitation: does not account for integer overflows and does not try to
3545 return false, so it can not be used negated. */
3546
3547tristate
3548region_model::symbolic_greater_than (const binop_svalue *bin_a,
3549 const svalue *b) const
3550{
3551 if (bin_a->get_op () == PLUS_EXPR || bin_a->get_op () == MULT_EXPR)
3552 {
3553 /* Eliminate the right-hand side of both svalues. */
3554 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
3555 if (bin_a->get_op () == bin_b->get_op ()
9bbcee45
DM
3556 && eval_condition (bin_a->get_arg1 (),
3557 GT_EXPR,
3558 bin_b->get_arg1 ()).is_true ()
3559 && eval_condition (bin_a->get_arg0 (),
3560 GE_EXPR,
3561 bin_b->get_arg0 ()).is_true ())
7a6564c9
TL
3562 return tristate (tristate::TS_TRUE);
3563
3564 /* Otherwise, try to remove a positive offset or factor from BIN_A. */
3565 if (is_positive_svalue (bin_a->get_arg1 ())
9bbcee45
DM
3566 && eval_condition (bin_a->get_arg0 (),
3567 GE_EXPR, b).is_true ())
7a6564c9
TL
3568 return tristate (tristate::TS_TRUE);
3569 }
3570 return tristate::unknown ();
3571}
3572
3573/* Return true if A and B are equal structurally.
3574
3575 Structural equality means that A and B are equal if the svalues A and B have
3576 the same nodes at the same positions in the tree and the leafs are equal.
3577 Equality for conjured_svalues and initial_svalues is determined by comparing
3578 the pointers while constants are compared by value. That behavior is useful
3579 to check for binaryop_svlaues that evaluate to the same concrete value but
3580 might use one operand with a different type but the same constant value.
3581
3582 For example,
3583 binop_svalue (mult_expr,
3584 initial_svalue (‘size_t’, decl_region (..., 'some_var')),
3585 constant_svalue (‘size_t’, 4))
3586 and
3587 binop_svalue (mult_expr,
3588 initial_svalue (‘size_t’, decl_region (..., 'some_var'),
3589 constant_svalue (‘sizetype’, 4))
3590 are structurally equal. A concrete C code example, where this occurs, can
3591 be found in test7 of out-of-bounds-5.c. */
3592
3593tristate
3594region_model::structural_equality (const svalue *a, const svalue *b) const
3595{
3596 /* If A and B are referentially equal, they are also structurally equal. */
3597 if (a == b)
3598 return tristate (tristate::TS_TRUE);
3599
3600 switch (a->get_kind ())
3601 {
3602 default:
3603 return tristate::unknown ();
3604 /* SK_CONJURED and SK_INITIAL are already handled
3605 by the referential equality above. */
3606 case SK_CONSTANT:
3607 {
3608 tree a_cst = a->maybe_get_constant ();
3609 tree b_cst = b->maybe_get_constant ();
3610 if (a_cst && b_cst)
3611 return tristate (tree_int_cst_equal (a_cst, b_cst));
3612 }
3613 return tristate (tristate::TS_FALSE);
3614 case SK_UNARYOP:
3615 {
3616 const unaryop_svalue *un_a = as_a <const unaryop_svalue *> (a);
3617 if (const unaryop_svalue *un_b = dyn_cast <const unaryop_svalue *> (b))
3618 return tristate (pending_diagnostic::same_tree_p (un_a->get_type (),
3619 un_b->get_type ())
3620 && un_a->get_op () == un_b->get_op ()
3621 && structural_equality (un_a->get_arg (),
3622 un_b->get_arg ()));
3623 }
3624 return tristate (tristate::TS_FALSE);
3625 case SK_BINOP:
3626 {
3627 const binop_svalue *bin_a = as_a <const binop_svalue *> (a);
3628 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
3629 return tristate (bin_a->get_op () == bin_b->get_op ()
3630 && structural_equality (bin_a->get_arg0 (),
3631 bin_b->get_arg0 ())
3632 && structural_equality (bin_a->get_arg1 (),
3633 bin_b->get_arg1 ()));
3634 }
3635 return tristate (tristate::TS_FALSE);
3636 }
3637}
3638
48e8a7a6
DM
3639/* Handle various constraints of the form:
3640 LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
3641 OP : == or !=
3642 RHS: zero
3643 and (with a cast):
3644 LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
3645 OP : == or !=
3646 RHS: zero
3647 by adding constraints for INNER_LHS INNEROP INNER_RHS.
3648
3649 Return true if this function can fully handle the constraint; if
3650 so, add the implied constraint(s) and write true to *OUT if they
3651 are consistent with existing constraints, or write false to *OUT
3652 if they contradicts existing constraints.
3653
3654 Return false for cases that this function doeesn't know how to handle.
3655
3656 For example, if we're checking a stored conditional, we'll have
3657 something like:
3658 LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
3659 OP : NE_EXPR
3660 RHS: zero
3661 which this function can turn into an add_constraint of:
3662 (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
3663
3664 Similarly, optimized && and || conditionals lead to e.g.
3665 if (p && q)
3666 becoming gimple like this:
3667 _1 = p_6 == 0B;
3668 _2 = q_8 == 0B
3669 _3 = _1 | _2
3670 On the "_3 is false" branch we can have constraints of the form:
3671 ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
3672 | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
3673 == 0
3674 which implies that both _1 and _2 are false,
3675 which this function can turn into a pair of add_constraints of
3676 (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
3677 and:
3678 (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
3679
3680bool
3681region_model::add_constraints_from_binop (const svalue *outer_lhs,
3682 enum tree_code outer_op,
3683 const svalue *outer_rhs,
3684 bool *out,
3685 region_model_context *ctxt)
3686{
3687 while (const svalue *cast = outer_lhs->maybe_undo_cast ())
3688 outer_lhs = cast;
3689 const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
3690 if (!binop_sval)
3691 return false;
3692 if (!outer_rhs->all_zeroes_p ())
3693 return false;
3694
3695 const svalue *inner_lhs = binop_sval->get_arg0 ();
3696 enum tree_code inner_op = binop_sval->get_op ();
3697 const svalue *inner_rhs = binop_sval->get_arg1 ();
3698
3699 if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
3700 return false;
3701
3702 /* We have either
3703 - "OUTER_LHS != false" (i.e. OUTER is true), or
3704 - "OUTER_LHS == false" (i.e. OUTER is false). */
3705 bool is_true = outer_op == NE_EXPR;
3706
3707 switch (inner_op)
3708 {
3709 default:
3710 return false;
3711
3712 case EQ_EXPR:
3713 case NE_EXPR:
3714 {
3715 /* ...and "(inner_lhs OP inner_rhs) == 0"
3716 then (inner_lhs OP inner_rhs) must have the same
3717 logical value as LHS. */
3718 if (!is_true)
3719 inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
3720 *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
3721 return true;
3722 }
3723 break;
3724
3725 case BIT_AND_EXPR:
3726 if (is_true)
3727 {
3728 /* ...and "(inner_lhs & inner_rhs) != 0"
3729 then both inner_lhs and inner_rhs must be true. */
3730 const svalue *false_sval
3731 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
3732 bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
3733 bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
3734 *out = sat1 && sat2;
3735 return true;
3736 }
3737 return false;
3738
3739 case BIT_IOR_EXPR:
3740 if (!is_true)
3741 {
3742 /* ...and "(inner_lhs | inner_rhs) == 0"
3743 i.e. "(inner_lhs | inner_rhs)" is false
3744 then both inner_lhs and inner_rhs must be false. */
3745 const svalue *false_sval
3746 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
3747 bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
3748 bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
3749 *out = sat1 && sat2;
3750 return true;
3751 }
3752 return false;
3753 }
3754}
3755
757bf1df
DM
3756/* Attempt to add the constraint "LHS OP RHS" to this region_model.
3757 If it is consistent with existing constraints, add it, and return true.
3758 Return false if it contradicts existing constraints.
3759 Use CTXT for reporting any diagnostics associated with the accesses. */
3760
3761bool
3762region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
3763 region_model_context *ctxt)
3764{
e978955d
DM
3765 /* For now, make no attempt to capture constraints on floating-point
3766 values. */
3767 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
3768 return true;
3769
808f4dfe
DM
3770 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
3771 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
757bf1df 3772
48e8a7a6
DM
3773 return add_constraint (lhs_sval, op, rhs_sval, ctxt);
3774}
3775
3776/* Attempt to add the constraint "LHS OP RHS" to this region_model.
3777 If it is consistent with existing constraints, add it, and return true.
3778 Return false if it contradicts existing constraints.
3779 Use CTXT for reporting any diagnostics associated with the accesses. */
3780
3781bool
3782region_model::add_constraint (const svalue *lhs,
3783 enum tree_code op,
3784 const svalue *rhs,
3785 region_model_context *ctxt)
3786{
3787 tristate t_cond = eval_condition (lhs, op, rhs);
757bf1df
DM
3788
3789 /* If we already have the condition, do nothing. */
3790 if (t_cond.is_true ())
3791 return true;
3792
3793 /* Reject a constraint that would contradict existing knowledge, as
3794 unsatisfiable. */
3795 if (t_cond.is_false ())
3796 return false;
3797
48e8a7a6
DM
3798 bool out;
3799 if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
3800 return out;
757bf1df 3801
c4b8f373
DM
3802 /* Attempt to store the constraint. */
3803 if (!m_constraints->add_constraint (lhs, op, rhs))
3804 return false;
757bf1df
DM
3805
3806 /* Notify the context, if any. This exists so that the state machines
3807 in a program_state can be notified about the condition, and so can
3808 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
3809 when synthesizing constraints as above. */
3810 if (ctxt)
3811 ctxt->on_condition (lhs, op, rhs);
3812
9a2c9579
DM
3813 /* If we have &REGION == NULL, then drop dynamic extents for REGION (for
3814 the case where REGION is heap-allocated and thus could be NULL). */
48e8a7a6
DM
3815 if (tree rhs_cst = rhs->maybe_get_constant ())
3816 if (op == EQ_EXPR && zerop (rhs_cst))
3817 if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
3818 unset_dynamic_extents (region_sval->get_pointee ());
9a2c9579 3819
757bf1df
DM
3820 return true;
3821}
3822
84fb3546
DM
3823/* As above, but when returning false, if OUT is non-NULL, write a
3824 new rejected_constraint to *OUT. */
3825
3826bool
3827region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
3828 region_model_context *ctxt,
3829 rejected_constraint **out)
3830{
3831 bool sat = add_constraint (lhs, op, rhs, ctxt);
3832 if (!sat && out)
8ca7fa84 3833 *out = new rejected_op_constraint (*this, lhs, op, rhs);
84fb3546
DM
3834 return sat;
3835}
3836
757bf1df
DM
3837/* Determine what is known about the condition "LHS OP RHS" within
3838 this model.
3839 Use CTXT for reporting any diagnostics associated with the accesses. */
3840
3841tristate
3842region_model::eval_condition (tree lhs,
3843 enum tree_code op,
3844 tree rhs,
5c6546ca 3845 region_model_context *ctxt) const
757bf1df 3846{
e978955d
DM
3847 /* For now, make no attempt to model constraints on floating-point
3848 values. */
3849 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
3850 return tristate::unknown ();
3851
757bf1df
DM
3852 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
3853}
3854
467a4820
DM
3855/* Implementation of region_model::get_representative_path_var.
3856 Attempt to return a path_var that represents SVAL, or return NULL_TREE.
808f4dfe
DM
3857 Use VISITED to prevent infinite mutual recursion with the overload for
3858 regions. */
757bf1df 3859
808f4dfe 3860path_var
467a4820
DM
3861region_model::get_representative_path_var_1 (const svalue *sval,
3862 svalue_set *visited) const
757bf1df 3863{
467a4820 3864 gcc_assert (sval);
757bf1df 3865
808f4dfe
DM
3866 /* Prevent infinite recursion. */
3867 if (visited->contains (sval))
3868 return path_var (NULL_TREE, 0);
3869 visited->add (sval);
757bf1df 3870
467a4820
DM
3871 /* Handle casts by recursion into get_representative_path_var. */
3872 if (const svalue *cast_sval = sval->maybe_undo_cast ())
3873 {
3874 path_var result = get_representative_path_var (cast_sval, visited);
3875 tree orig_type = sval->get_type ();
3876 /* If necessary, wrap the result in a cast. */
3877 if (result.m_tree && orig_type)
3878 result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
3879 return result;
3880 }
3881
808f4dfe
DM
3882 auto_vec<path_var> pvs;
3883 m_store.get_representative_path_vars (this, visited, sval, &pvs);
757bf1df 3884
808f4dfe
DM
3885 if (tree cst = sval->maybe_get_constant ())
3886 pvs.safe_push (path_var (cst, 0));
757bf1df 3887
90f7c300 3888 /* Handle string literals and various other pointers. */
808f4dfe
DM
3889 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
3890 {
3891 const region *reg = ptr_sval->get_pointee ();
3892 if (path_var pv = get_representative_path_var (reg, visited))
3893 return path_var (build1 (ADDR_EXPR,
467a4820 3894 sval->get_type (),
808f4dfe
DM
3895 pv.m_tree),
3896 pv.m_stack_depth);
3897 }
3898
3899 /* If we have a sub_svalue, look for ways to represent the parent. */
3900 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
90f7c300 3901 {
808f4dfe
DM
3902 const svalue *parent_sval = sub_sval->get_parent ();
3903 const region *subreg = sub_sval->get_subregion ();
3904 if (path_var parent_pv
3905 = get_representative_path_var (parent_sval, visited))
3906 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
3907 return path_var (build3 (COMPONENT_REF,
3908 sval->get_type (),
3909 parent_pv.m_tree,
3910 field_reg->get_field (),
3911 NULL_TREE),
3912 parent_pv.m_stack_depth);
90f7c300
DM
3913 }
3914
b9365b93
DM
3915 /* Handle binops. */
3916 if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
3917 if (path_var lhs_pv
3918 = get_representative_path_var (binop_sval->get_arg0 (), visited))
3919 if (path_var rhs_pv
3920 = get_representative_path_var (binop_sval->get_arg1 (), visited))
3921 return path_var (build2 (binop_sval->get_op (),
3922 sval->get_type (),
3923 lhs_pv.m_tree, rhs_pv.m_tree),
3924 lhs_pv.m_stack_depth);
3925
808f4dfe
DM
3926 if (pvs.length () < 1)
3927 return path_var (NULL_TREE, 0);
3928
3929 pvs.qsort (readability_comparator);
3930 return pvs[0];
757bf1df
DM
3931}
3932
467a4820
DM
3933/* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
3934 Use VISITED to prevent infinite mutual recursion with the overload for
3935 regions
3936
3937 This function defers to get_representative_path_var_1 to do the work;
3938 it adds verification that get_representative_path_var_1 returned a tree
3939 of the correct type. */
3940
3941path_var
3942region_model::get_representative_path_var (const svalue *sval,
3943 svalue_set *visited) const
3944{
3945 if (sval == NULL)
3946 return path_var (NULL_TREE, 0);
3947
3948 tree orig_type = sval->get_type ();
3949
3950 path_var result = get_representative_path_var_1 (sval, visited);
3951
3952 /* Verify that the result has the same type as SVAL, if any. */
3953 if (result.m_tree && orig_type)
3954 gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
3955
3956 return result;
3957}
3958
3959/* Attempt to return a tree that represents SVAL, or return NULL_TREE.
3960
3961 Strip off any top-level cast, to avoid messages like
3962 double-free of '(void *)ptr'
3963 from analyzer diagnostics. */
757bf1df 3964
808f4dfe
DM
3965tree
3966region_model::get_representative_tree (const svalue *sval) const
757bf1df 3967{
808f4dfe 3968 svalue_set visited;
467a4820
DM
3969 tree expr = get_representative_path_var (sval, &visited).m_tree;
3970
3971 /* Strip off any top-level cast. */
7e3b45be
TL
3972 if (expr && TREE_CODE (expr) == NOP_EXPR)
3973 expr = TREE_OPERAND (expr, 0);
3974
3975 return fixup_tree_for_diagnostic (expr);
3976}
3977
3978tree
3979region_model::get_representative_tree (const region *reg) const
3980{
3981 svalue_set visited;
3982 tree expr = get_representative_path_var (reg, &visited).m_tree;
3983
3984 /* Strip off any top-level cast. */
467a4820 3985 if (expr && TREE_CODE (expr) == NOP_EXPR)
e4bb1bd6 3986 expr = TREE_OPERAND (expr, 0);
467a4820 3987
e4bb1bd6 3988 return fixup_tree_for_diagnostic (expr);
808f4dfe
DM
3989}
3990
467a4820
DM
3991/* Implementation of region_model::get_representative_path_var.
3992
3993 Attempt to return a path_var that represents REG, or return
808f4dfe
DM
3994 the NULL path_var.
3995 For example, a region for a field of a local would be a path_var
3996 wrapping a COMPONENT_REF.
3997 Use VISITED to prevent infinite mutual recursion with the overload for
3998 svalues. */
757bf1df 3999
808f4dfe 4000path_var
467a4820
DM
4001region_model::get_representative_path_var_1 (const region *reg,
4002 svalue_set *visited) const
808f4dfe
DM
4003{
4004 switch (reg->get_kind ())
757bf1df 4005 {
808f4dfe
DM
4006 default:
4007 gcc_unreachable ();
e516294a 4008
808f4dfe
DM
4009 case RK_FRAME:
4010 case RK_GLOBALS:
4011 case RK_CODE:
4012 case RK_HEAP:
4013 case RK_STACK:
358dab90 4014 case RK_THREAD_LOCAL:
808f4dfe
DM
4015 case RK_ROOT:
4016 /* Regions that represent memory spaces are not expressible as trees. */
4017 return path_var (NULL_TREE, 0);
757bf1df 4018
808f4dfe 4019 case RK_FUNCTION:
884d9141 4020 {
808f4dfe
DM
4021 const function_region *function_reg
4022 = as_a <const function_region *> (reg);
4023 return path_var (function_reg->get_fndecl (), 0);
884d9141 4024 }
808f4dfe 4025 case RK_LABEL:
9e78634c
DM
4026 {
4027 const label_region *label_reg = as_a <const label_region *> (reg);
4028 return path_var (label_reg->get_label (), 0);
4029 }
90f7c300 4030
808f4dfe
DM
4031 case RK_SYMBOLIC:
4032 {
4033 const symbolic_region *symbolic_reg
4034 = as_a <const symbolic_region *> (reg);
4035 const svalue *pointer = symbolic_reg->get_pointer ();
4036 path_var pointer_pv = get_representative_path_var (pointer, visited);
4037 if (!pointer_pv)
4038 return path_var (NULL_TREE, 0);
4039 tree offset = build_int_cst (pointer->get_type (), 0);
4040 return path_var (build2 (MEM_REF,
4041 reg->get_type (),
4042 pointer_pv.m_tree,
4043 offset),
4044 pointer_pv.m_stack_depth);
4045 }
4046 case RK_DECL:
4047 {
4048 const decl_region *decl_reg = as_a <const decl_region *> (reg);
4049 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
4050 }
4051 case RK_FIELD:
4052 {
4053 const field_region *field_reg = as_a <const field_region *> (reg);
4054 path_var parent_pv
4055 = get_representative_path_var (reg->get_parent_region (), visited);
4056 if (!parent_pv)
4057 return path_var (NULL_TREE, 0);
4058 return path_var (build3 (COMPONENT_REF,
4059 reg->get_type (),
4060 parent_pv.m_tree,
4061 field_reg->get_field (),
4062 NULL_TREE),
4063 parent_pv.m_stack_depth);
4064 }
757bf1df 4065
808f4dfe
DM
4066 case RK_ELEMENT:
4067 {
4068 const element_region *element_reg
4069 = as_a <const element_region *> (reg);
4070 path_var parent_pv
4071 = get_representative_path_var (reg->get_parent_region (), visited);
4072 if (!parent_pv)
4073 return path_var (NULL_TREE, 0);
4074 path_var index_pv
4075 = get_representative_path_var (element_reg->get_index (), visited);
4076 if (!index_pv)
4077 return path_var (NULL_TREE, 0);
4078 return path_var (build4 (ARRAY_REF,
4079 reg->get_type (),
4080 parent_pv.m_tree, index_pv.m_tree,
4081 NULL_TREE, NULL_TREE),
4082 parent_pv.m_stack_depth);
4083 }
757bf1df 4084
808f4dfe 4085 case RK_OFFSET:
757bf1df 4086 {
808f4dfe
DM
4087 const offset_region *offset_reg
4088 = as_a <const offset_region *> (reg);
4089 path_var parent_pv
4090 = get_representative_path_var (reg->get_parent_region (), visited);
4091 if (!parent_pv)
4092 return path_var (NULL_TREE, 0);
4093 path_var offset_pv
4094 = get_representative_path_var (offset_reg->get_byte_offset (),
4095 visited);
29f5db8e 4096 if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
808f4dfe 4097 return path_var (NULL_TREE, 0);
29f5db8e
DM
4098 tree addr_parent = build1 (ADDR_EXPR,
4099 build_pointer_type (reg->get_type ()),
4100 parent_pv.m_tree);
808f4dfe
DM
4101 return path_var (build2 (MEM_REF,
4102 reg->get_type (),
29f5db8e 4103 addr_parent, offset_pv.m_tree),
808f4dfe 4104 parent_pv.m_stack_depth);
757bf1df 4105 }
757bf1df 4106
e61ffa20
DM
4107 case RK_SIZED:
4108 return path_var (NULL_TREE, 0);
4109
808f4dfe
DM
4110 case RK_CAST:
4111 {
4112 path_var parent_pv
4113 = get_representative_path_var (reg->get_parent_region (), visited);
4114 if (!parent_pv)
4115 return path_var (NULL_TREE, 0);
4116 return path_var (build1 (NOP_EXPR,
4117 reg->get_type (),
4118 parent_pv.m_tree),
4119 parent_pv.m_stack_depth);
4120 }
757bf1df 4121
808f4dfe
DM
4122 case RK_HEAP_ALLOCATED:
4123 case RK_ALLOCA:
4124 /* No good way to express heap-allocated/alloca regions as trees. */
4125 return path_var (NULL_TREE, 0);
757bf1df 4126
808f4dfe
DM
4127 case RK_STRING:
4128 {
4129 const string_region *string_reg = as_a <const string_region *> (reg);
4130 return path_var (string_reg->get_string_cst (), 0);
4131 }
757bf1df 4132
2402dc6b 4133 case RK_VAR_ARG:
358dab90 4134 case RK_ERRNO:
808f4dfe
DM
4135 case RK_UNKNOWN:
4136 return path_var (NULL_TREE, 0);
4137 }
757bf1df
DM
4138}
4139
467a4820
DM
4140/* Attempt to return a path_var that represents REG, or return
4141 the NULL path_var.
4142 For example, a region for a field of a local would be a path_var
4143 wrapping a COMPONENT_REF.
4144 Use VISITED to prevent infinite mutual recursion with the overload for
4145 svalues.
4146
4147 This function defers to get_representative_path_var_1 to do the work;
4148 it adds verification that get_representative_path_var_1 returned a tree
4149 of the correct type. */
4150
4151path_var
4152region_model::get_representative_path_var (const region *reg,
4153 svalue_set *visited) const
4154{
4155 path_var result = get_representative_path_var_1 (reg, visited);
4156
4157 /* Verify that the result has the same type as REG, if any. */
4158 if (result.m_tree && reg->get_type ())
4159 gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
4160
4161 return result;
4162}
4163
757bf1df
DM
4164/* Update this model for any phis in SNODE, assuming we came from
4165 LAST_CFG_SUPEREDGE. */
4166
4167void
4168region_model::update_for_phis (const supernode *snode,
4169 const cfg_superedge *last_cfg_superedge,
4170 region_model_context *ctxt)
4171{
4172 gcc_assert (last_cfg_superedge);
4173
e0a7a675
DM
4174 /* Copy this state and pass it to handle_phi so that all of the phi stmts
4175 are effectively handled simultaneously. */
4176 const region_model old_state (*this);
4177
757bf1df
DM
4178 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
4179 !gsi_end_p (gpi); gsi_next (&gpi))
4180 {
4181 gphi *phi = gpi.phi ();
4182
4183 tree src = last_cfg_superedge->get_phi_arg (phi);
4184 tree lhs = gimple_phi_result (phi);
4185
e0a7a675
DM
4186 /* Update next_state based on phi and old_state. */
4187 handle_phi (phi, lhs, src, old_state, ctxt);
757bf1df
DM
4188 }
4189}
4190
4191/* Attempt to update this model for taking EDGE (where the last statement
4192 was LAST_STMT), returning true if the edge can be taken, false
4193 otherwise.
84fb3546
DM
4194 When returning false, if OUT is non-NULL, write a new rejected_constraint
4195 to it.
757bf1df
DM
4196
4197 For CFG superedges where LAST_STMT is a conditional or a switch
4198 statement, attempt to add the relevant conditions for EDGE to this
4199 model, returning true if they are feasible, or false if they are
4200 impossible.
4201
4202 For call superedges, push frame information and store arguments
4203 into parameters.
4204
4205 For return superedges, pop frame information and store return
4206 values into any lhs.
4207
4208 Rejection of call/return superedges happens elsewhere, in
4209 program_point::on_edge (i.e. based on program point, rather
4210 than program state). */
4211
4212bool
4213region_model::maybe_update_for_edge (const superedge &edge,
4214 const gimple *last_stmt,
84fb3546
DM
4215 region_model_context *ctxt,
4216 rejected_constraint **out)
757bf1df
DM
4217{
4218 /* Handle frame updates for interprocedural edges. */
4219 switch (edge.m_kind)
4220 {
4221 default:
4222 break;
4223
4224 case SUPEREDGE_CALL:
4225 {
4226 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
4227 update_for_call_superedge (*call_edge, ctxt);
4228 }
4229 break;
4230
4231 case SUPEREDGE_RETURN:
4232 {
4233 const return_superedge *return_edge
4234 = as_a <const return_superedge *> (&edge);
4235 update_for_return_superedge (*return_edge, ctxt);
4236 }
4237 break;
4238
4239 case SUPEREDGE_INTRAPROCEDURAL_CALL:
bfca9505
DM
4240 /* This is a no-op for call summaries; we should already
4241 have handled the effect of the call summary at the call stmt. */
757bf1df
DM
4242 break;
4243 }
4244
4245 if (last_stmt == NULL)
4246 return true;
4247
4248 /* Apply any constraints for conditionals/switch statements. */
4249
4250 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
4251 {
4252 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
84fb3546 4253 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out);
757bf1df
DM
4254 }
4255
4256 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
4257 {
4258 const switch_cfg_superedge *switch_sedge
4259 = as_a <const switch_cfg_superedge *> (&edge);
84fb3546
DM
4260 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt,
4261 ctxt, out);
757bf1df
DM
4262 }
4263
1690a839
DM
4264 /* Apply any constraints due to an exception being thrown. */
4265 if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge))
4266 if (cfg_sedge->get_flags () & EDGE_EH)
84fb3546 4267 return apply_constraints_for_exception (last_stmt, ctxt, out);
1690a839 4268
757bf1df
DM
4269 return true;
4270}
4271
4272/* Push a new frame_region on to the stack region.
4273 Populate the frame_region with child regions for the function call's
4274 parameters, using values from the arguments at the callsite in the
4275 caller's frame. */
4276
4277void
aef703cf 4278region_model::update_for_gcall (const gcall *call_stmt,
e92d0ff6
AS
4279 region_model_context *ctxt,
4280 function *callee)
757bf1df 4281{
808f4dfe 4282 /* Build a vec of argument svalues, using the current top
757bf1df 4283 frame for resolving tree expressions. */
808f4dfe 4284 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
757bf1df
DM
4285
4286 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
4287 {
4288 tree arg = gimple_call_arg (call_stmt, i);
808f4dfe 4289 arg_svals.quick_push (get_rvalue (arg, ctxt));
757bf1df
DM
4290 }
4291
e92d0ff6
AS
4292 if(!callee)
4293 {
4294 /* Get the function * from the gcall. */
4295 tree fn_decl = get_fndecl_for_call (call_stmt,ctxt);
4296 callee = DECL_STRUCT_FUNCTION (fn_decl);
4297 }
4298
4299 push_frame (callee, &arg_svals, ctxt);
757bf1df
DM
4300}
4301
a96f1c38
DM
4302/* Pop the top-most frame_region from the stack, and copy the return
4303 region's values (if any) into the region for the lvalue of the LHS of
757bf1df 4304 the call (if any). */
aef703cf 4305
757bf1df 4306void
aef703cf
AS
4307region_model::update_for_return_gcall (const gcall *call_stmt,
4308 region_model_context *ctxt)
757bf1df 4309{
4cebae09
DM
4310 /* Get the lvalue for the result of the call, passing it to pop_frame,
4311 so that pop_frame can determine the region with respect to the
4312 *caller* frame. */
757bf1df 4313 tree lhs = gimple_call_lhs (call_stmt);
4cebae09 4314 pop_frame (lhs, NULL, ctxt);
757bf1df
DM
4315}
4316
aef703cf
AS
4317/* Extract calling information from the superedge and update the model for the
4318 call */
4319
4320void
4321region_model::update_for_call_superedge (const call_superedge &call_edge,
4322 region_model_context *ctxt)
4323{
4324 const gcall *call_stmt = call_edge.get_call_stmt ();
e92d0ff6 4325 update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ());
aef703cf
AS
4326}
4327
4328/* Extract calling information from the return superedge and update the model
4329 for the returning call */
4330
4331void
4332region_model::update_for_return_superedge (const return_superedge &return_edge,
4333 region_model_context *ctxt)
4334{
4335 const gcall *call_stmt = return_edge.get_call_stmt ();
4336 update_for_return_gcall (call_stmt, ctxt);
4337}
4338
bfca9505
DM
4339/* Attempt to to use R to replay SUMMARY into this object.
4340 Return true if it is possible. */
757bf1df 4341
bfca9505
DM
4342bool
4343region_model::replay_call_summary (call_summary_replay &r,
4344 const region_model &summary)
757bf1df 4345{
bfca9505
DM
4346 gcc_assert (summary.get_stack_depth () == 1);
4347
4348 m_store.replay_call_summary (r, summary.m_store);
757bf1df 4349
bfca9505
DM
4350 if (!m_constraints->replay_call_summary (r, *summary.m_constraints))
4351 return false;
4352
4353 for (auto kv : summary.m_dynamic_extents)
4354 {
4355 const region *summary_reg = kv.first;
4356 const region *caller_reg = r.convert_region_from_summary (summary_reg);
4357 if (!caller_reg)
4358 continue;
4359 const svalue *summary_sval = kv.second;
4360 const svalue *caller_sval = r.convert_svalue_from_summary (summary_sval);
4361 if (!caller_sval)
4362 continue;
4363 m_dynamic_extents.put (caller_reg, caller_sval);
4364 }
4365
4366 return true;
757bf1df
DM
4367}
4368
4369/* Given a true or false edge guarded by conditional statement COND_STMT,
4370 determine appropriate constraints for the edge to be taken.
4371
4372 If they are feasible, add the constraints and return true.
4373
4374 Return false if the constraints contradict existing knowledge
84fb3546
DM
4375 (and so the edge should not be taken).
4376 When returning false, if OUT is non-NULL, write a new rejected_constraint
4377 to it. */
757bf1df
DM
4378
4379bool
4380region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
4381 const gcond *cond_stmt,
84fb3546
DM
4382 region_model_context *ctxt,
4383 rejected_constraint **out)
757bf1df
DM
4384{
4385 ::edge cfg_edge = sedge.get_cfg_edge ();
4386 gcc_assert (cfg_edge != NULL);
4387 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
4388
4389 enum tree_code op = gimple_cond_code (cond_stmt);
4390 tree lhs = gimple_cond_lhs (cond_stmt);
4391 tree rhs = gimple_cond_rhs (cond_stmt);
4392 if (cfg_edge->flags & EDGE_FALSE_VALUE)
4393 op = invert_tree_comparison (op, false /* honor_nans */);
84fb3546 4394 return add_constraint (lhs, op, rhs, ctxt, out);
757bf1df
DM
4395}
4396
ccd4df81
DM
4397/* Return true iff SWITCH_STMT has a non-default label that contains
4398 INT_CST. */
4399
4400static bool
4401has_nondefault_case_for_value_p (const gswitch *switch_stmt, tree int_cst)
4402{
4403 /* We expect the initial label to be the default; skip it. */
4404 gcc_assert (CASE_LOW (gimple_switch_label (switch_stmt, 0)) == NULL);
4405 unsigned min_idx = 1;
4406 unsigned max_idx = gimple_switch_num_labels (switch_stmt) - 1;
4407
4408 /* Binary search: try to find the label containing INT_CST.
4409 This requires the cases to be sorted by CASE_LOW (done by the
4410 gimplifier). */
4411 while (max_idx >= min_idx)
4412 {
4413 unsigned case_idx = (min_idx + max_idx) / 2;
4414 tree label = gimple_switch_label (switch_stmt, case_idx);
4415 tree low = CASE_LOW (label);
4416 gcc_assert (low);
4417 tree high = CASE_HIGH (label);
4418 if (!high)
4419 high = low;
4420 if (tree_int_cst_compare (int_cst, low) < 0)
4421 {
4422 /* INT_CST is below the range of this label. */
4423 gcc_assert (case_idx > 0);
4424 max_idx = case_idx - 1;
4425 }
4426 else if (tree_int_cst_compare (int_cst, high) > 0)
4427 {
4428 /* INT_CST is above the range of this case. */
4429 min_idx = case_idx + 1;
4430 }
4431 else
4432 /* This case contains INT_CST. */
4433 return true;
4434 }
4435 /* Not found. */
4436 return false;
4437}
4438
4439/* Return true iff SWITCH_STMT (which must be on an enum value)
4440 has nondefault cases handling all values in the enum. */
4441
4442static bool
4443has_nondefault_cases_for_all_enum_values_p (const gswitch *switch_stmt)
4444{
4445 gcc_assert (switch_stmt);
4446 tree type = TREE_TYPE (gimple_switch_index (switch_stmt));
4447 gcc_assert (TREE_CODE (type) == ENUMERAL_TYPE);
4448
4449 for (tree enum_val_iter = TYPE_VALUES (type);
4450 enum_val_iter;
4451 enum_val_iter = TREE_CHAIN (enum_val_iter))
4452 {
4453 tree enum_val = TREE_VALUE (enum_val_iter);
4454 gcc_assert (TREE_CODE (enum_val) == CONST_DECL);
4455 gcc_assert (TREE_CODE (DECL_INITIAL (enum_val)) == INTEGER_CST);
4456 if (!has_nondefault_case_for_value_p (switch_stmt,
4457 DECL_INITIAL (enum_val)))
4458 return false;
4459 }
4460 return true;
4461}
4462
757bf1df
DM
4463/* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
4464 for the edge to be taken.
4465
4466 If they are feasible, add the constraints and return true.
4467
4468 Return false if the constraints contradict existing knowledge
84fb3546
DM
4469 (and so the edge should not be taken).
4470 When returning false, if OUT is non-NULL, write a new rejected_constraint
4471 to it. */
757bf1df
DM
4472
4473bool
4474region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
4475 const gswitch *switch_stmt,
84fb3546
DM
4476 region_model_context *ctxt,
4477 rejected_constraint **out)
757bf1df 4478{
ccd4df81
DM
4479 tree index = gimple_switch_index (switch_stmt);
4480 const svalue *index_sval = get_rvalue (index, ctxt);
4481
4482 /* If we're switching based on an enum type, assume that the user is only
4483 working with values from the enum. Hence if this is an
4484 implicitly-created "default", assume it doesn't get followed.
4485 This fixes numerous "uninitialized" false positives where we otherwise
4486 consider jumping past the initialization cases. */
4487
4488 if (/* Don't check during feasibility-checking (when ctxt is NULL). */
4489 ctxt
4490 /* Must be an enum value. */
4491 && index_sval->get_type ()
4492 && TREE_CODE (TREE_TYPE (index)) == ENUMERAL_TYPE
4493 && TREE_CODE (index_sval->get_type ()) == ENUMERAL_TYPE
4494 /* If we have a constant, then we can check it directly. */
4495 && index_sval->get_kind () != SK_CONSTANT
4496 && edge.implicitly_created_default_p ()
4497 && has_nondefault_cases_for_all_enum_values_p (switch_stmt)
4498 /* Don't do this if there's a chance that the index is
4499 attacker-controlled. */
4500 && !ctxt->possibly_tainted_p (index_sval))
4501 {
4502 if (out)
4503 *out = new rejected_default_case (*this);
4504 return false;
4505 }
4506
8ca7fa84
DM
4507 bounded_ranges_manager *ranges_mgr = get_range_manager ();
4508 const bounded_ranges *all_cases_ranges
4509 = ranges_mgr->get_or_create_ranges_for_switch (&edge, switch_stmt);
8ca7fa84
DM
4510 bool sat = m_constraints->add_bounded_ranges (index_sval, all_cases_ranges);
4511 if (!sat && out)
4512 *out = new rejected_ranges_constraint (*this, index, all_cases_ranges);
2c044ff1
DM
4513 if (sat && ctxt && !all_cases_ranges->empty_p ())
4514 ctxt->on_bounded_ranges (*index_sval, *all_cases_ranges);
8ca7fa84 4515 return sat;
757bf1df
DM
4516}
4517
1690a839
DM
4518/* Apply any constraints due to an exception being thrown at LAST_STMT.
4519
4520 If they are feasible, add the constraints and return true.
4521
4522 Return false if the constraints contradict existing knowledge
84fb3546
DM
4523 (and so the edge should not be taken).
4524 When returning false, if OUT is non-NULL, write a new rejected_constraint
4525 to it. */
1690a839
DM
4526
4527bool
4528region_model::apply_constraints_for_exception (const gimple *last_stmt,
84fb3546
DM
4529 region_model_context *ctxt,
4530 rejected_constraint **out)
1690a839
DM
4531{
4532 gcc_assert (last_stmt);
4533 if (const gcall *call = dyn_cast <const gcall *> (last_stmt))
4534 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
4535 if (is_named_call_p (callee_fndecl, "operator new", call, 1)
4536 || is_named_call_p (callee_fndecl, "operator new []", call, 1))
4537 {
4538 /* We have an exception thrown from operator new.
4539 Add a constraint that the result was NULL, to avoid a false
4540 leak report due to the result being lost when following
4541 the EH edge. */
4542 if (tree lhs = gimple_call_lhs (call))
84fb3546 4543 return add_constraint (lhs, EQ_EXPR, null_pointer_node, ctxt, out);
1690a839
DM
4544 return true;
4545 }
4546 return true;
4547}
4548
808f4dfe
DM
4549/* For use with push_frame when handling a top-level call within the analysis.
4550 PARAM has a defined but unknown initial value.
4551 Anything it points to has escaped, since the calling context "knows"
4552 the pointer, and thus calls to unknown functions could read/write into
dcfc7ac9
DM
4553 the region.
4554 If NONNULL is true, then assume that PARAM must be non-NULL. */
757bf1df
DM
4555
4556void
808f4dfe 4557region_model::on_top_level_param (tree param,
dcfc7ac9
DM
4558 bool nonnull,
4559 region_model_context *ctxt)
757bf1df 4560{
808f4dfe 4561 if (POINTER_TYPE_P (TREE_TYPE (param)))
5eae0ac7 4562 {
808f4dfe
DM
4563 const region *param_reg = get_lvalue (param, ctxt);
4564 const svalue *init_ptr_sval
4565 = m_mgr->get_or_create_initial_value (param_reg);
4566 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
4567 m_store.mark_as_escaped (pointee_reg);
dcfc7ac9
DM
4568 if (nonnull)
4569 {
4570 const svalue *null_ptr_sval
4571 = m_mgr->get_or_create_null_ptr (TREE_TYPE (param));
4572 add_constraint (init_ptr_sval, NE_EXPR, null_ptr_sval, ctxt);
4573 }
5eae0ac7 4574 }
757bf1df
DM
4575}
4576
808f4dfe
DM
4577/* Update this region_model to reflect pushing a frame onto the stack
4578 for a call to FUN.
757bf1df 4579
808f4dfe
DM
4580 If ARG_SVALS is non-NULL, use it to populate the parameters
4581 in the new frame.
4582 Otherwise, the params have their initial_svalues.
757bf1df 4583
808f4dfe 4584 Return the frame_region for the new frame. */
757bf1df 4585
808f4dfe
DM
4586const region *
4587region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
4588 region_model_context *ctxt)
757bf1df 4589{
808f4dfe
DM
4590 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
4591 if (arg_svals)
757bf1df 4592 {
808f4dfe
DM
4593 /* Arguments supplied from a caller frame. */
4594 tree fndecl = fun->decl;
4595 unsigned idx = 0;
4596 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
4597 iter_parm = DECL_CHAIN (iter_parm), ++idx)
757bf1df 4598 {
808f4dfe
DM
4599 /* If there's a mismatching declaration, the call stmt might
4600 not have enough args. Handle this case by leaving the
4601 rest of the params as uninitialized. */
4602 if (idx >= arg_svals->length ())
4603 break;
294b6da2
DM
4604 tree parm_lval = iter_parm;
4605 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
4606 parm_lval = parm_default_ssa;
4607 const region *parm_reg = get_lvalue (parm_lval, ctxt);
808f4dfe 4608 const svalue *arg_sval = (*arg_svals)[idx];
808f4dfe 4609 set_value (parm_reg, arg_sval, ctxt);
757bf1df 4610 }
2402dc6b
DM
4611
4612 /* Handle any variadic args. */
4613 unsigned va_arg_idx = 0;
4614 for (; idx < arg_svals->length (); idx++, va_arg_idx++)
4615 {
4616 const svalue *arg_sval = (*arg_svals)[idx];
4617 const region *var_arg_reg
4618 = m_mgr->get_var_arg_region (m_current_frame,
4619 va_arg_idx);
4620 set_value (var_arg_reg, arg_sval, ctxt);
4621 }
757bf1df 4622 }
808f4dfe 4623 else
757bf1df 4624 {
808f4dfe
DM
4625 /* Otherwise we have a top-level call within the analysis. The params
4626 have defined but unknown initial values.
4627 Anything they point to has escaped. */
4628 tree fndecl = fun->decl;
dcfc7ac9
DM
4629
4630 /* Handle "__attribute__((nonnull))". */
4631 tree fntype = TREE_TYPE (fndecl);
4632 bitmap nonnull_args = get_nonnull_args (fntype);
4633
4634 unsigned parm_idx = 0;
808f4dfe
DM
4635 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
4636 iter_parm = DECL_CHAIN (iter_parm))
757bf1df 4637 {
dcfc7ac9
DM
4638 bool non_null = (nonnull_args
4639 ? (bitmap_empty_p (nonnull_args)
4640 || bitmap_bit_p (nonnull_args, parm_idx))
4641 : false);
294b6da2 4642 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
dcfc7ac9 4643 on_top_level_param (parm_default_ssa, non_null, ctxt);
294b6da2 4644 else
dcfc7ac9
DM
4645 on_top_level_param (iter_parm, non_null, ctxt);
4646 parm_idx++;
757bf1df 4647 }
dcfc7ac9
DM
4648
4649 BITMAP_FREE (nonnull_args);
757bf1df 4650 }
757bf1df 4651
808f4dfe 4652 return m_current_frame;
757bf1df
DM
4653}
4654
808f4dfe
DM
4655/* Get the function of the top-most frame in this region_model's stack.
4656 There must be such a frame. */
757bf1df 4657
808f4dfe
DM
4658function *
4659region_model::get_current_function () const
757bf1df 4660{
808f4dfe
DM
4661 const frame_region *frame = get_current_frame ();
4662 gcc_assert (frame);
4663 return frame->get_function ();
757bf1df
DM
4664}
4665
808f4dfe 4666/* Pop the topmost frame_region from this region_model's stack;
757bf1df 4667
4cebae09
DM
4668 If RESULT_LVALUE is non-null, copy any return value from the frame
4669 into the corresponding region (evaluated with respect to the *caller*
4670 frame, rather than the called frame).
808f4dfe
DM
4671 If OUT_RESULT is non-null, copy any return value from the frame
4672 into *OUT_RESULT.
757bf1df 4673
808f4dfe
DM
4674 Purge the frame region and all its descendent regions.
4675 Convert any pointers that point into such regions into
4676 POISON_KIND_POPPED_STACK svalues. */
757bf1df 4677
808f4dfe 4678void
4cebae09 4679region_model::pop_frame (tree result_lvalue,
808f4dfe
DM
4680 const svalue **out_result,
4681 region_model_context *ctxt)
4682{
4683 gcc_assert (m_current_frame);
757bf1df 4684
808f4dfe 4685 const frame_region *frame_reg = m_current_frame;
5c6546ca
DM
4686
4687 /* Notify state machines. */
4688 if (ctxt)
4689 ctxt->on_pop_frame (frame_reg);
4690
4691 /* Evaluate the result, within the callee frame. */
808f4dfe
DM
4692 tree fndecl = m_current_frame->get_function ()->decl;
4693 tree result = DECL_RESULT (fndecl);
4cebae09 4694 const svalue *retval = NULL;
808f4dfe
DM
4695 if (result && TREE_TYPE (result) != void_type_node)
4696 {
4cebae09 4697 retval = get_rvalue (result, ctxt);
808f4dfe 4698 if (out_result)
13ad6d9f 4699 *out_result = retval;
808f4dfe 4700 }
757bf1df 4701
808f4dfe
DM
4702 /* Pop the frame. */
4703 m_current_frame = m_current_frame->get_calling_frame ();
757bf1df 4704
4cebae09
DM
4705 if (result_lvalue && retval)
4706 {
4707 /* Compute result_dst_reg using RESULT_LVALUE *after* popping
4708 the frame, but before poisoning pointers into the old frame. */
4709 const region *result_dst_reg = get_lvalue (result_lvalue, ctxt);
4710 set_value (result_dst_reg, retval, ctxt);
4711 }
4712
808f4dfe 4713 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
757bf1df
DM
4714}
4715
808f4dfe 4716/* Get the number of frames in this region_model's stack. */
757bf1df 4717
808f4dfe
DM
4718int
4719region_model::get_stack_depth () const
757bf1df 4720{
808f4dfe
DM
4721 const frame_region *frame = get_current_frame ();
4722 if (frame)
4723 return frame->get_stack_depth ();
4724 else
4725 return 0;
757bf1df
DM
4726}
4727
808f4dfe
DM
4728/* Get the frame_region with the given index within the stack.
4729 The frame_region must exist. */
757bf1df 4730
808f4dfe
DM
4731const frame_region *
4732region_model::get_frame_at_index (int index) const
757bf1df 4733{
808f4dfe
DM
4734 const frame_region *frame = get_current_frame ();
4735 gcc_assert (frame);
4736 gcc_assert (index >= 0);
4737 gcc_assert (index <= frame->get_index ());
4738 while (index != frame->get_index ())
4739 {
4740 frame = frame->get_calling_frame ();
4741 gcc_assert (frame);
4742 }
4743 return frame;
757bf1df
DM
4744}
4745
808f4dfe
DM
4746/* Unbind svalues for any regions in REG and below.
4747 Find any pointers to such regions; convert them to
9a2c9579
DM
4748 poisoned values of kind PKIND.
4749 Also purge any dynamic extents. */
757bf1df 4750
808f4dfe
DM
4751void
4752region_model::unbind_region_and_descendents (const region *reg,
4753 enum poison_kind pkind)
757bf1df 4754{
808f4dfe
DM
4755 /* Gather a set of base regions to be unbound. */
4756 hash_set<const region *> base_regs;
4757 for (store::cluster_map_t::iterator iter = m_store.begin ();
4758 iter != m_store.end (); ++iter)
757bf1df 4759 {
808f4dfe
DM
4760 const region *iter_base_reg = (*iter).first;
4761 if (iter_base_reg->descendent_of_p (reg))
4762 base_regs.add (iter_base_reg);
757bf1df 4763 }
808f4dfe
DM
4764 for (hash_set<const region *>::iterator iter = base_regs.begin ();
4765 iter != base_regs.end (); ++iter)
4766 m_store.purge_cluster (*iter);
757bf1df 4767
808f4dfe
DM
4768 /* Find any pointers to REG or its descendents; convert to poisoned. */
4769 poison_any_pointers_to_descendents (reg, pkind);
9a2c9579
DM
4770
4771 /* Purge dynamic extents of any base regions in REG and below
4772 (e.g. VLAs and alloca stack regions). */
4773 for (auto iter : m_dynamic_extents)
4774 {
4775 const region *iter_reg = iter.first;
4776 if (iter_reg->descendent_of_p (reg))
4777 unset_dynamic_extents (iter_reg);
4778 }
757bf1df
DM
4779}
4780
808f4dfe
DM
4781/* Implementation of BindingVisitor.
4782 Update the bound svalues for regions below REG to use poisoned
4783 values instead. */
757bf1df 4784
808f4dfe 4785struct bad_pointer_finder
757bf1df 4786{
808f4dfe
DM
4787 bad_pointer_finder (const region *reg, enum poison_kind pkind,
4788 region_model_manager *mgr)
4789 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
4790 {}
757bf1df 4791
808f4dfe
DM
4792 void on_binding (const binding_key *, const svalue *&sval)
4793 {
4794 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
4795 {
4796 const region *ptr_dst = ptr_sval->get_pointee ();
4797 /* Poison ptrs to descendents of REG, but not to REG itself,
4798 otherwise double-free detection doesn't work (since sm-state
4799 for "free" is stored on the original ptr svalue). */
4800 if (ptr_dst->descendent_of_p (m_reg)
4801 && ptr_dst != m_reg)
4802 {
4803 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
4804 sval->get_type ());
4805 ++m_count;
4806 }
4807 }
4808 }
757bf1df 4809
808f4dfe
DM
4810 const region *m_reg;
4811 enum poison_kind m_pkind;
4812 region_model_manager *const m_mgr;
4813 int m_count;
4814};
757bf1df 4815
808f4dfe
DM
4816/* Find any pointers to REG or its descendents; convert them to
4817 poisoned values of kind PKIND.
4818 Return the number of pointers that were poisoned. */
757bf1df 4819
808f4dfe
DM
4820int
4821region_model::poison_any_pointers_to_descendents (const region *reg,
4822 enum poison_kind pkind)
4823{
4824 bad_pointer_finder bv (reg, pkind, m_mgr);
4825 m_store.for_each_binding (bv);
4826 return bv.m_count;
757bf1df
DM
4827}
4828
808f4dfe
DM
4829/* Attempt to merge THIS with OTHER_MODEL, writing the result
4830 to OUT_MODEL. Use POINT to distinguish values created as a
4831 result of merging. */
757bf1df 4832
808f4dfe
DM
4833bool
4834region_model::can_merge_with_p (const region_model &other_model,
4835 const program_point &point,
f573d351
DM
4836 region_model *out_model,
4837 const extrinsic_state *ext_state,
4838 const program_state *state_a,
4839 const program_state *state_b) const
757bf1df 4840{
808f4dfe
DM
4841 gcc_assert (out_model);
4842 gcc_assert (m_mgr == other_model.m_mgr);
4843 gcc_assert (m_mgr == out_model->m_mgr);
757bf1df 4844
808f4dfe
DM
4845 if (m_current_frame != other_model.m_current_frame)
4846 return false;
4847 out_model->m_current_frame = m_current_frame;
757bf1df 4848
f573d351
DM
4849 model_merger m (this, &other_model, point, out_model,
4850 ext_state, state_a, state_b);
757bf1df 4851
808f4dfe
DM
4852 if (!store::can_merge_p (&m_store, &other_model.m_store,
4853 &out_model->m_store, m_mgr->get_store_manager (),
4854 &m))
4855 return false;
4856
9a2c9579
DM
4857 if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
4858 &out_model->m_dynamic_extents))
4859 return false;
4860
808f4dfe
DM
4861 /* Merge constraints. */
4862 constraint_manager::merge (*m_constraints,
4863 *other_model.m_constraints,
c710051a 4864 out_model->m_constraints);
757bf1df 4865
808f4dfe 4866 return true;
757bf1df
DM
4867}
4868
4869/* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
4870 otherwise. */
4871
4872tree
4873region_model::get_fndecl_for_call (const gcall *call,
4874 region_model_context *ctxt)
4875{
4876 tree fn_ptr = gimple_call_fn (call);
4877 if (fn_ptr == NULL_TREE)
4878 return NULL_TREE;
808f4dfe
DM
4879 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
4880 if (const region_svalue *fn_ptr_ptr
4881 = fn_ptr_sval->dyn_cast_region_svalue ())
757bf1df 4882 {
808f4dfe
DM
4883 const region *reg = fn_ptr_ptr->get_pointee ();
4884 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
757bf1df 4885 {
808f4dfe 4886 tree fn_decl = fn_reg->get_fndecl ();
0ba70d1b
DM
4887 cgraph_node *node = cgraph_node::get (fn_decl);
4888 if (!node)
4889 return NULL_TREE;
4890 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
91f993b7
DM
4891 if (ultimate_node)
4892 return ultimate_node->decl;
757bf1df
DM
4893 }
4894 }
4895
4896 return NULL_TREE;
4897}
4898
808f4dfe 4899/* Would be much simpler to use a lambda here, if it were supported. */
757bf1df 4900
faacafd2 4901struct append_regions_cb_data
757bf1df 4902{
808f4dfe
DM
4903 const region_model *model;
4904 auto_vec<const decl_region *> *out;
4905};
757bf1df 4906
faacafd2 4907/* Populate *OUT with all decl_regions in the current
808f4dfe 4908 frame that have clusters within the store. */
757bf1df
DM
4909
4910void
808f4dfe 4911region_model::
faacafd2 4912get_regions_for_current_frame (auto_vec<const decl_region *> *out) const
757bf1df 4913{
faacafd2 4914 append_regions_cb_data data;
808f4dfe
DM
4915 data.model = this;
4916 data.out = out;
faacafd2 4917 m_store.for_each_cluster (append_regions_cb, &data);
757bf1df
DM
4918}
4919
faacafd2 4920/* Implementation detail of get_regions_for_current_frame. */
757bf1df 4921
808f4dfe 4922void
faacafd2
DM
4923region_model::append_regions_cb (const region *base_reg,
4924 append_regions_cb_data *cb_data)
757bf1df 4925{
808f4dfe
DM
4926 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
4927 return;
4928 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
faacafd2 4929 cb_data->out->safe_push (decl_reg);
757bf1df
DM
4930}
4931
c83e9731
TL
4932
4933/* Abstract class for diagnostics related to the use of
4934 floating-point arithmetic where precision is needed. */
4935
4936class imprecise_floating_point_arithmetic : public pending_diagnostic
4937{
4938public:
4939 int get_controlling_option () const final override
4940 {
4941 return OPT_Wanalyzer_imprecise_fp_arithmetic;
4942 }
4943};
4944
4945/* Concrete diagnostic to complain about uses of floating-point arithmetic
4946 in the size argument of malloc etc. */
4947
4948class float_as_size_arg : public imprecise_floating_point_arithmetic
4949{
4950public:
4951 float_as_size_arg (tree arg) : m_arg (arg)
4952 {}
4953
4954 const char *get_kind () const final override
4955 {
4956 return "float_as_size_arg_diagnostic";
4957 }
4958
ac9230fb 4959 bool subclass_equal_p (const pending_diagnostic &other) const final override
c83e9731
TL
4960 {
4961 return same_tree_p (m_arg, ((const float_as_size_arg &) other).m_arg);
4962 }
4963
4964 bool emit (rich_location *rich_loc) final override
4965 {
4966 diagnostic_metadata m;
4967 bool warned = warning_meta (rich_loc, m, get_controlling_option (),
4968 "use of floating-point arithmetic here might"
4969 " yield unexpected results");
4970 if (warned)
4971 inform (rich_loc->get_loc (), "only use operands of an integer type"
4972 " inside the size argument");
4973 return warned;
4974 }
4975
4976 label_text describe_final_event (const evdesc::final_event &ev) final
4977 override
4978 {
4979 if (m_arg)
4980 return ev.formatted_print ("operand %qE is of type %qT",
4981 m_arg, TREE_TYPE (m_arg));
4982 return ev.formatted_print ("at least one operand of the size argument is"
4983 " of a floating-point type");
4984 }
4985
4986private:
4987 tree m_arg;
4988};
4989
4990/* Visitor to find uses of floating-point variables/constants in an svalue. */
4991
4992class contains_floating_point_visitor : public visitor
4993{
4994public:
4995 contains_floating_point_visitor (const svalue *root_sval) : m_result (NULL)
4996 {
4997 root_sval->accept (this);
4998 }
4999
5000 const svalue *get_svalue_to_report ()
5001 {
5002 return m_result;
5003 }
5004
5005 void visit_constant_svalue (const constant_svalue *sval) final override
5006 {
5007 /* At the point the analyzer runs, constant integer operands in a floating
5008 point expression are already implictly converted to floating-points.
5009 Thus, we do prefer to report non-constants such that the diagnostic
5010 always reports a floating-point operand. */
5011 tree type = sval->get_type ();
5012 if (type && FLOAT_TYPE_P (type) && !m_result)
5013 m_result = sval;
5014 }
5015
5016 void visit_conjured_svalue (const conjured_svalue *sval) final override
5017 {
5018 tree type = sval->get_type ();
5019 if (type && FLOAT_TYPE_P (type))
5020 m_result = sval;
5021 }
5022
5023 void visit_initial_svalue (const initial_svalue *sval) final override
5024 {
5025 tree type = sval->get_type ();
5026 if (type && FLOAT_TYPE_P (type))
5027 m_result = sval;
5028 }
5029
5030private:
5031 /* Non-null if at least one floating-point operand was found. */
5032 const svalue *m_result;
5033};
5034
5035/* May complain about uses of floating-point operands in SIZE_IN_BYTES. */
5036
5037void
5038region_model::check_dynamic_size_for_floats (const svalue *size_in_bytes,
5039 region_model_context *ctxt) const
5040{
5041 gcc_assert (ctxt);
5042
5043 contains_floating_point_visitor v (size_in_bytes);
5044 if (const svalue *float_sval = v.get_svalue_to_report ())
5045 {
5046 tree diag_arg = get_representative_tree (float_sval);
6341f14e 5047 ctxt->warn (make_unique<float_as_size_arg> (diag_arg));
c83e9731
TL
5048 }
5049}
5050
ce917b04
DM
5051/* Return a region describing a heap-allocated block of memory.
5052 Use CTXT to complain about tainted sizes.
5053
5054 Reuse an existing heap_allocated_region if it's not being referenced by
5055 this region_model; otherwise create a new one. */
757bf1df 5056
808f4dfe 5057const region *
ce917b04
DM
5058region_model::get_or_create_region_for_heap_alloc (const svalue *size_in_bytes,
5059 region_model_context *ctxt)
5060{
5061 /* Determine which regions are referenced in this region_model, so that
5062 we can reuse an existing heap_allocated_region if it's not in use on
5063 this path. */
7dc0ecaf 5064 auto_bitmap base_regs_in_use;
ce917b04 5065 get_referenced_base_regions (base_regs_in_use);
b03a10b0
DM
5066
5067 /* Don't reuse regions that are marked as TOUCHED. */
5068 for (store::cluster_map_t::iterator iter = m_store.begin ();
5069 iter != m_store.end (); ++iter)
5070 if ((*iter).second->touched_p ())
5071 {
5072 const region *base_reg = (*iter).first;
5073 bitmap_set_bit (base_regs_in_use, base_reg->get_id ());
5074 }
5075
ce917b04
DM
5076 const region *reg
5077 = m_mgr->get_or_create_region_for_heap_alloc (base_regs_in_use);
688fc162
DM
5078 if (size_in_bytes)
5079 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
5080 set_dynamic_extents (reg, size_in_bytes, ctxt);
808f4dfe 5081 return reg;
757bf1df
DM
5082}
5083
ce917b04
DM
5084/* Populate OUT_IDS with the set of IDs of those base regions which are
5085 reachable in this region_model. */
5086
5087void
7dc0ecaf 5088region_model::get_referenced_base_regions (auto_bitmap &out_ids) const
ce917b04
DM
5089{
5090 reachable_regions reachable_regs (const_cast<region_model *> (this));
5091 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
5092 &reachable_regs);
5093 /* Get regions for locals that have explicitly bound values. */
5094 for (store::cluster_map_t::iterator iter = m_store.begin ();
5095 iter != m_store.end (); ++iter)
5096 {
5097 const region *base_reg = (*iter).first;
5098 if (const region *parent = base_reg->get_parent_region ())
5099 if (parent->get_kind () == RK_FRAME)
5100 reachable_regs.add (base_reg, false);
5101 }
5102
5103 bitmap_clear (out_ids);
5104 for (auto iter_reg : reachable_regs)
5105 bitmap_set_bit (out_ids, iter_reg->get_id ());
5106}
5107
808f4dfe 5108/* Return a new region describing a block of memory allocated within the
b9365b93
DM
5109 current frame.
5110 Use CTXT to complain about tainted sizes. */
757bf1df 5111
808f4dfe 5112const region *
b9365b93
DM
5113region_model::create_region_for_alloca (const svalue *size_in_bytes,
5114 region_model_context *ctxt)
757bf1df 5115{
808f4dfe 5116 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
ea4e3218 5117 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
b9365b93 5118 set_dynamic_extents (reg, size_in_bytes, ctxt);
808f4dfe 5119 return reg;
757bf1df
DM
5120}
5121
b9365b93
DM
5122/* Record that the size of REG is SIZE_IN_BYTES.
5123 Use CTXT to complain about tainted sizes. */
757bf1df
DM
5124
5125void
9a2c9579 5126region_model::set_dynamic_extents (const region *reg,
b9365b93
DM
5127 const svalue *size_in_bytes,
5128 region_model_context *ctxt)
9a2c9579
DM
5129{
5130 assert_compat_types (size_in_bytes->get_type (), size_type_node);
b9365b93 5131 if (ctxt)
c83e9731
TL
5132 {
5133 check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
5134 ctxt);
5135 check_dynamic_size_for_floats (size_in_bytes, ctxt);
5136 }
9a2c9579
DM
5137 m_dynamic_extents.put (reg, size_in_bytes);
5138}
5139
5140/* Get the recording of REG in bytes, or NULL if no dynamic size was
5141 recorded. */
5142
5143const svalue *
5144region_model::get_dynamic_extents (const region *reg) const
757bf1df 5145{
9a2c9579
DM
5146 if (const svalue * const *slot = m_dynamic_extents.get (reg))
5147 return *slot;
5148 return NULL;
5149}
5150
5151/* Unset any recorded dynamic size of REG. */
5152
5153void
5154region_model::unset_dynamic_extents (const region *reg)
5155{
5156 m_dynamic_extents.remove (reg);
757bf1df
DM
5157}
5158
c81b60b8
DM
5159/* Information of the layout of a RECORD_TYPE, capturing it as a vector
5160 of items, where each item is either a field or padding. */
5161
5162class record_layout
5163{
5164public:
5165 /* An item within a record; either a field, or padding after a field. */
5166 struct item
5167 {
5168 public:
5169 item (const bit_range &br,
5170 tree field,
5171 bool is_padding)
5172 : m_bit_range (br),
5173 m_field (field),
5174 m_is_padding (is_padding)
5175 {
5176 }
5177
5178 bit_offset_t get_start_bit_offset () const
5179 {
5180 return m_bit_range.get_start_bit_offset ();
5181 }
5182 bit_offset_t get_next_bit_offset () const
5183 {
5184 return m_bit_range.get_next_bit_offset ();
5185 }
5186
5187 bool contains_p (bit_offset_t offset) const
5188 {
5189 return m_bit_range.contains_p (offset);
5190 }
5191
5192 void dump_to_pp (pretty_printer *pp) const
5193 {
5194 if (m_is_padding)
5195 pp_printf (pp, "padding after %qD", m_field);
5196 else
5197 pp_printf (pp, "%qD", m_field);
5198 pp_string (pp, ", ");
5199 m_bit_range.dump_to_pp (pp);
5200 }
5201
5202 bit_range m_bit_range;
5203 tree m_field;
5204 bool m_is_padding;
5205 };
5206
5207 record_layout (tree record_type)
c81b60b8
DM
5208 {
5209 gcc_assert (TREE_CODE (record_type) == RECORD_TYPE);
5210
5211 for (tree iter = TYPE_FIELDS (record_type); iter != NULL_TREE;
5212 iter = DECL_CHAIN (iter))
5213 {
5214 if (TREE_CODE (iter) == FIELD_DECL)
5215 {
5216 int iter_field_offset = int_bit_position (iter);
5217 bit_size_t size_in_bits;
5218 if (!int_size_in_bits (TREE_TYPE (iter), &size_in_bits))
5219 size_in_bits = 0;
5220
5221 maybe_pad_to (iter_field_offset);
5222
5223 /* Add field. */
5224 m_items.safe_push (item (bit_range (iter_field_offset,
5225 size_in_bits),
5226 iter, false));
5227 }
5228 }
5229
5230 /* Add any trailing padding. */
5231 bit_size_t size_in_bits;
5232 if (int_size_in_bits (record_type, &size_in_bits))
5233 maybe_pad_to (size_in_bits);
5234 }
5235
5236 void dump_to_pp (pretty_printer *pp) const
5237 {
5238 unsigned i;
5239 item *it;
5240 FOR_EACH_VEC_ELT (m_items, i, it)
5241 {
5242 it->dump_to_pp (pp);
5243 pp_newline (pp);
5244 }
5245 }
5246
5247 DEBUG_FUNCTION void dump () const
5248 {
5249 pretty_printer pp;
5250 pp_format_decoder (&pp) = default_tree_printer;
5251 pp.buffer->stream = stderr;
5252 dump_to_pp (&pp);
5253 pp_flush (&pp);
5254 }
5255
5256 const record_layout::item *get_item_at (bit_offset_t offset) const
5257 {
5258 unsigned i;
5259 item *it;
5260 FOR_EACH_VEC_ELT (m_items, i, it)
5261 if (it->contains_p (offset))
5262 return it;
5263 return NULL;
5264 }
5265
5266private:
5267 /* Subroutine of ctor. Add padding item to NEXT_OFFSET if necessary. */
5268
5269 void maybe_pad_to (bit_offset_t next_offset)
5270 {
5271 if (m_items.length () > 0)
5272 {
5273 const item &last_item = m_items[m_items.length () - 1];
5274 bit_offset_t offset_after_last_item
5275 = last_item.get_next_bit_offset ();
5276 if (next_offset > offset_after_last_item)
5277 {
5278 bit_size_t padding_size
5279 = next_offset - offset_after_last_item;
5280 m_items.safe_push (item (bit_range (offset_after_last_item,
5281 padding_size),
5282 last_item.m_field, true));
5283 }
5284 }
5285 }
5286
c81b60b8
DM
5287 auto_vec<item> m_items;
5288};
5289
5290/* A subclass of pending_diagnostic for complaining about uninitialized data
5291 being copied across a trust boundary to an untrusted output
5292 (e.g. copy_to_user infoleaks in the Linux kernel). */
5293
5294class exposure_through_uninit_copy
5295 : public pending_diagnostic_subclass<exposure_through_uninit_copy>
5296{
5297public:
5298 exposure_through_uninit_copy (const region *src_region,
5299 const region *dest_region,
ffaeb9dc 5300 const svalue *copied_sval)
c81b60b8
DM
5301 : m_src_region (src_region),
5302 m_dest_region (dest_region),
ffaeb9dc 5303 m_copied_sval (copied_sval)
c81b60b8
DM
5304 {
5305 gcc_assert (m_copied_sval->get_kind () == SK_POISONED
5306 || m_copied_sval->get_kind () == SK_COMPOUND);
5307 }
5308
5309 const char *get_kind () const final override
5310 {
5311 return "exposure_through_uninit_copy";
5312 }
5313
5314 bool operator== (const exposure_through_uninit_copy &other) const
5315 {
5316 return (m_src_region == other.m_src_region
5317 && m_dest_region == other.m_dest_region
5318 && m_copied_sval == other.m_copied_sval);
5319 }
5320
5321 int get_controlling_option () const final override
5322 {
5323 return OPT_Wanalyzer_exposure_through_uninit_copy;
5324 }
5325
5326 bool emit (rich_location *rich_loc) final override
5327 {
5328 diagnostic_metadata m;
5329 /* CWE-200: Exposure of Sensitive Information to an Unauthorized Actor. */
5330 m.add_cwe (200);
5331 enum memory_space mem_space = get_src_memory_space ();
5332 bool warned;
5333 switch (mem_space)
5334 {
5335 default:
5336 warned = warning_meta
5337 (rich_loc, m, get_controlling_option (),
5338 "potential exposure of sensitive information"
5339 " by copying uninitialized data across trust boundary");
5340 break;
5341 case MEMSPACE_STACK:
5342 warned = warning_meta
5343 (rich_loc, m, get_controlling_option (),
5344 "potential exposure of sensitive information"
5345 " by copying uninitialized data from stack across trust boundary");
5346 break;
5347 case MEMSPACE_HEAP:
5348 warned = warning_meta
5349 (rich_loc, m, get_controlling_option (),
5350 "potential exposure of sensitive information"
5351 " by copying uninitialized data from heap across trust boundary");
5352 break;
5353 }
5354 if (warned)
5355 {
5356 location_t loc = rich_loc->get_loc ();
5357 inform_number_of_uninit_bits (loc);
5358 complain_about_uninit_ranges (loc);
5359
5360 if (mem_space == MEMSPACE_STACK)
5361 maybe_emit_fixit_hint ();
5362 }
5363 return warned;
5364 }
5365
5366 label_text describe_final_event (const evdesc::final_event &) final override
5367 {
5368 enum memory_space mem_space = get_src_memory_space ();
5369 switch (mem_space)
5370 {
5371 default:
5372 return label_text::borrow ("uninitialized data copied here");
5373
5374 case MEMSPACE_STACK:
5375 return label_text::borrow ("uninitialized data copied from stack here");
5376
5377 case MEMSPACE_HEAP:
5378 return label_text::borrow ("uninitialized data copied from heap here");
5379 }
5380 }
5381
5382 void mark_interesting_stuff (interesting_t *interest) final override
5383 {
5384 if (m_src_region)
5385 interest->add_region_creation (m_src_region);
5386 }
5387
5388private:
5389 enum memory_space get_src_memory_space () const
5390 {
5391 return m_src_region ? m_src_region->get_memory_space () : MEMSPACE_UNKNOWN;
5392 }
5393
5394 bit_size_t calc_num_uninit_bits () const
5395 {
5396 switch (m_copied_sval->get_kind ())
5397 {
5398 default:
5399 gcc_unreachable ();
5400 break;
5401 case SK_POISONED:
5402 {
5403 const poisoned_svalue *poisoned_sval
5404 = as_a <const poisoned_svalue *> (m_copied_sval);
5405 gcc_assert (poisoned_sval->get_poison_kind () == POISON_KIND_UNINIT);
5406
5407 /* Give up if don't have type information. */
5408 if (m_copied_sval->get_type () == NULL_TREE)
5409 return 0;
5410
5411 bit_size_t size_in_bits;
5412 if (int_size_in_bits (m_copied_sval->get_type (), &size_in_bits))
5413 return size_in_bits;
5414
5415 /* Give up if we can't get the size of the type. */
5416 return 0;
5417 }
5418 break;
5419 case SK_COMPOUND:
5420 {
5421 const compound_svalue *compound_sval
5422 = as_a <const compound_svalue *> (m_copied_sval);
5423 bit_size_t result = 0;
5424 /* Find keys for uninit svals. */
5425 for (auto iter : *compound_sval)
5426 {
5427 const svalue *sval = iter.second;
5428 if (const poisoned_svalue *psval
5429 = sval->dyn_cast_poisoned_svalue ())
5430 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
5431 {
5432 const binding_key *key = iter.first;
5433 const concrete_binding *ckey
5434 = key->dyn_cast_concrete_binding ();
5435 gcc_assert (ckey);
5436 result += ckey->get_size_in_bits ();
5437 }
5438 }
5439 return result;
5440 }
5441 }
5442 }
5443
5444 void inform_number_of_uninit_bits (location_t loc) const
5445 {
5446 bit_size_t num_uninit_bits = calc_num_uninit_bits ();
5447 if (num_uninit_bits <= 0)
5448 return;
5449 if (num_uninit_bits % BITS_PER_UNIT == 0)
5450 {
5451 /* Express in bytes. */
5452 byte_size_t num_uninit_bytes = num_uninit_bits / BITS_PER_UNIT;
5453 if (num_uninit_bytes == 1)
5454 inform (loc, "1 byte is uninitialized");
5455 else
5456 inform (loc,
5457 "%wu bytes are uninitialized", num_uninit_bytes.to_uhwi ());
5458 }
5459 else
5460 {
5461 /* Express in bits. */
5462 if (num_uninit_bits == 1)
5463 inform (loc, "1 bit is uninitialized");
5464 else
5465 inform (loc,
5466 "%wu bits are uninitialized", num_uninit_bits.to_uhwi ());
5467 }
5468 }
5469
5470 void complain_about_uninit_ranges (location_t loc) const
5471 {
5472 if (const compound_svalue *compound_sval
5473 = m_copied_sval->dyn_cast_compound_svalue ())
5474 {
5475 /* Find keys for uninit svals. */
5476 auto_vec<const concrete_binding *> uninit_keys;
5477 for (auto iter : *compound_sval)
5478 {
5479 const svalue *sval = iter.second;
5480 if (const poisoned_svalue *psval
5481 = sval->dyn_cast_poisoned_svalue ())
5482 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
5483 {
5484 const binding_key *key = iter.first;
5485 const concrete_binding *ckey
5486 = key->dyn_cast_concrete_binding ();
5487 gcc_assert (ckey);
5488 uninit_keys.safe_push (ckey);
5489 }
5490 }
5491 /* Complain about them in sorted order. */
5492 uninit_keys.qsort (concrete_binding::cmp_ptr_ptr);
5493
5494 std::unique_ptr<record_layout> layout;
5495
5496 tree type = m_copied_sval->get_type ();
5497 if (type && TREE_CODE (type) == RECORD_TYPE)
5498 {
5499 // (std::make_unique is C++14)
5500 layout = std::unique_ptr<record_layout> (new record_layout (type));
5501
5502 if (0)
5503 layout->dump ();
5504 }
5505
5506 unsigned i;
5507 const concrete_binding *ckey;
5508 FOR_EACH_VEC_ELT (uninit_keys, i, ckey)
5509 {
5510 bit_offset_t start_bit = ckey->get_start_bit_offset ();
5511 bit_offset_t next_bit = ckey->get_next_bit_offset ();
5512 complain_about_uninit_range (loc, start_bit, next_bit,
5513 layout.get ());
5514 }
5515 }
5516 }
5517
5518 void complain_about_uninit_range (location_t loc,
5519 bit_offset_t start_bit,
5520 bit_offset_t next_bit,
5521 const record_layout *layout) const
5522 {
5523 if (layout)
5524 {
5525 while (start_bit < next_bit)
5526 {
5527 if (const record_layout::item *item
5528 = layout->get_item_at (start_bit))
5529 {
5530 gcc_assert (start_bit >= item->get_start_bit_offset ());
5531 gcc_assert (start_bit < item->get_next_bit_offset ());
5532 if (item->get_start_bit_offset () == start_bit
5533 && item->get_next_bit_offset () <= next_bit)
5534 complain_about_fully_uninit_item (*item);
5535 else
5536 complain_about_partially_uninit_item (*item);
5537 start_bit = item->get_next_bit_offset ();
5538 continue;
5539 }
5540 else
5541 break;
5542 }
5543 }
5544
5545 if (start_bit >= next_bit)
5546 return;
5547
5548 if (start_bit % 8 == 0 && next_bit % 8 == 0)
5549 {
5550 /* Express in bytes. */
5551 byte_offset_t start_byte = start_bit / 8;
5552 byte_offset_t last_byte = (next_bit / 8) - 1;
5553 if (last_byte == start_byte)
5554 inform (loc,
5555 "byte %wu is uninitialized",
5556 start_byte.to_uhwi ());
5557 else
5558 inform (loc,
5559 "bytes %wu - %wu are uninitialized",
5560 start_byte.to_uhwi (),
5561 last_byte.to_uhwi ());
5562 }
5563 else
5564 {
5565 /* Express in bits. */
5566 bit_offset_t last_bit = next_bit - 1;
5567 if (last_bit == start_bit)
5568 inform (loc,
5569 "bit %wu is uninitialized",
5570 start_bit.to_uhwi ());
5571 else
5572 inform (loc,
5573 "bits %wu - %wu are uninitialized",
5574 start_bit.to_uhwi (),
5575 last_bit.to_uhwi ());
5576 }
5577 }
5578
5579 static void
5580 complain_about_fully_uninit_item (const record_layout::item &item)
5581 {
5582 tree field = item.m_field;
5583 bit_size_t num_bits = item.m_bit_range.m_size_in_bits;
5584 if (item.m_is_padding)
5585 {
5586 if (num_bits % 8 == 0)
5587 {
5588 /* Express in bytes. */
5589 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
5590 if (num_bytes == 1)
5591 inform (DECL_SOURCE_LOCATION (field),
5592 "padding after field %qD is uninitialized (1 byte)",
5593 field);
5594 else
5595 inform (DECL_SOURCE_LOCATION (field),
5596 "padding after field %qD is uninitialized (%wu bytes)",
5597 field, num_bytes.to_uhwi ());
5598 }
5599 else
5600 {
5601 /* Express in bits. */
5602 if (num_bits == 1)
5603 inform (DECL_SOURCE_LOCATION (field),
5604 "padding after field %qD is uninitialized (1 bit)",
5605 field);
5606 else
5607 inform (DECL_SOURCE_LOCATION (field),
5608 "padding after field %qD is uninitialized (%wu bits)",
5609 field, num_bits.to_uhwi ());
5610 }
5611 }
5612 else
5613 {
5614 if (num_bits % 8 == 0)
5615 {
5616 /* Express in bytes. */
5617 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
5618 if (num_bytes == 1)
5619 inform (DECL_SOURCE_LOCATION (field),
5620 "field %qD is uninitialized (1 byte)", field);
5621 else
5622 inform (DECL_SOURCE_LOCATION (field),
5623 "field %qD is uninitialized (%wu bytes)",
5624 field, num_bytes.to_uhwi ());
5625 }
5626 else
5627 {
5628 /* Express in bits. */
5629 if (num_bits == 1)
5630 inform (DECL_SOURCE_LOCATION (field),
5631 "field %qD is uninitialized (1 bit)", field);
5632 else
5633 inform (DECL_SOURCE_LOCATION (field),
5634 "field %qD is uninitialized (%wu bits)",
5635 field, num_bits.to_uhwi ());
5636 }
5637 }
5638 }
5639
5640 static void
5641 complain_about_partially_uninit_item (const record_layout::item &item)
5642 {
5643 tree field = item.m_field;
5644 if (item.m_is_padding)
5645 inform (DECL_SOURCE_LOCATION (field),
5646 "padding after field %qD is partially uninitialized",
5647 field);
5648 else
5649 inform (DECL_SOURCE_LOCATION (field),
5650 "field %qD is partially uninitialized",
5651 field);
5652 /* TODO: ideally we'd describe what parts are uninitialized. */
5653 }
5654
5655 void maybe_emit_fixit_hint () const
5656 {
5657 if (tree decl = m_src_region->maybe_get_decl ())
5658 {
5659 gcc_rich_location hint_richloc (DECL_SOURCE_LOCATION (decl));
5660 hint_richloc.add_fixit_insert_after (" = {0}");
5661 inform (&hint_richloc,
5662 "suggest forcing zero-initialization by"
5663 " providing a %<{0}%> initializer");
5664 }
5665 }
5666
5667private:
5668 const region *m_src_region;
5669 const region *m_dest_region;
5670 const svalue *m_copied_sval;
c81b60b8
DM
5671};
5672
5673/* Return true if any part of SVAL is uninitialized. */
5674
5675static bool
5676contains_uninit_p (const svalue *sval)
5677{
5678 struct uninit_finder : public visitor
5679 {
5680 public:
5681 uninit_finder () : m_found_uninit (false) {}
5682 void visit_poisoned_svalue (const poisoned_svalue *sval)
5683 {
5684 if (sval->get_poison_kind () == POISON_KIND_UNINIT)
5685 m_found_uninit = true;
5686 }
5687 bool m_found_uninit;
5688 };
5689
5690 uninit_finder v;
5691 sval->accept (&v);
5692
5693 return v.m_found_uninit;
5694}
5695
5696/* Function for use by plugins when simulating writing data through a
5697 pointer to an "untrusted" region DST_REG (and thus crossing a security
5698 boundary), such as copying data to user space in an OS kernel.
5699
5700 Check that COPIED_SVAL is fully initialized. If not, complain about
5701 an infoleak to CTXT.
5702
5703 SRC_REG can be NULL; if non-NULL it is used as a hint in the diagnostic
5704 as to where COPIED_SVAL came from. */
5705
5706void
5707region_model::maybe_complain_about_infoleak (const region *dst_reg,
5708 const svalue *copied_sval,
5709 const region *src_reg,
5710 region_model_context *ctxt)
5711{
5712 /* Check for exposure. */
5713 if (contains_uninit_p (copied_sval))
6341f14e
DM
5714 ctxt->warn (make_unique<exposure_through_uninit_copy> (src_reg,
5715 dst_reg,
5716 copied_sval));
c81b60b8
DM
5717}
5718
3d2d04cd
DM
5719/* Set errno to a positive symbolic int, as if some error has occurred. */
5720
5721void
5722region_model::set_errno (const call_details &cd)
5723{
5724 const region *errno_reg = m_mgr->get_errno_region ();
5725 conjured_purge p (this, cd.get_ctxt ());
5726 const svalue *new_errno_sval
5727 = m_mgr->get_or_create_conjured_svalue (integer_type_node,
5728 cd.get_call_stmt (),
5729 errno_reg, p);
5730 const svalue *zero
5731 = m_mgr->get_or_create_int_cst (integer_type_node, 0);
5732 add_constraint (new_errno_sval, GT_EXPR, zero, cd.get_ctxt ());
5733 set_value (errno_reg, new_errno_sval, cd.get_ctxt ());
5734}
5735
eafa9d96
DM
5736/* class noop_region_model_context : public region_model_context. */
5737
c65d3c7f 5738void
6341f14e 5739noop_region_model_context::add_note (std::unique_ptr<pending_note>)
c65d3c7f 5740{
c65d3c7f
DM
5741}
5742
eafa9d96 5743void
accece8c 5744noop_region_model_context::bifurcate (std::unique_ptr<custom_edge_info>)
eafa9d96 5745{
eafa9d96
DM
5746}
5747
5748void
5749noop_region_model_context::terminate_path ()
5750{
5751}
5752
808f4dfe 5753/* struct model_merger. */
757bf1df 5754
808f4dfe 5755/* Dump a multiline representation of this merger to PP. */
757bf1df
DM
5756
5757void
808f4dfe 5758model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
757bf1df 5759{
808f4dfe
DM
5760 pp_string (pp, "model A:");
5761 pp_newline (pp);
5762 m_model_a->dump_to_pp (pp, simple, true);
5763 pp_newline (pp);
757bf1df 5764
808f4dfe 5765 pp_string (pp, "model B:");
757bf1df 5766 pp_newline (pp);
808f4dfe 5767 m_model_b->dump_to_pp (pp, simple, true);
757bf1df
DM
5768 pp_newline (pp);
5769
808f4dfe 5770 pp_string (pp, "merged model:");
757bf1df 5771 pp_newline (pp);
808f4dfe 5772 m_merged_model->dump_to_pp (pp, simple, true);
757bf1df
DM
5773 pp_newline (pp);
5774}
5775
808f4dfe 5776/* Dump a multiline representation of this merger to FILE. */
757bf1df
DM
5777
5778void
808f4dfe 5779model_merger::dump (FILE *fp, bool simple) const
757bf1df
DM
5780{
5781 pretty_printer pp;
5782 pp_format_decoder (&pp) = default_tree_printer;
5783 pp_show_color (&pp) = pp_show_color (global_dc->printer);
5784 pp.buffer->stream = fp;
808f4dfe 5785 dump_to_pp (&pp, simple);
757bf1df
DM
5786 pp_flush (&pp);
5787}
5788
808f4dfe 5789/* Dump a multiline representation of this merger to stderr. */
757bf1df
DM
5790
5791DEBUG_FUNCTION void
808f4dfe 5792model_merger::dump (bool simple) const
757bf1df 5793{
808f4dfe 5794 dump (stderr, simple);
757bf1df
DM
5795}
5796
f573d351
DM
5797/* Return true if it's OK to merge SVAL with other svalues. */
5798
5799bool
5800model_merger::mergeable_svalue_p (const svalue *sval) const
5801{
5802 if (m_ext_state)
5803 {
5804 /* Reject merging svalues that have non-purgable sm-state,
5805 to avoid falsely reporting memory leaks by merging them
5806 with something else. For example, given a local var "p",
5807 reject the merger of a:
5808 store_a mapping "p" to a malloc-ed ptr
5809 with:
5810 store_b mapping "p" to a NULL ptr. */
5811 if (m_state_a)
5812 if (!m_state_a->can_purge_p (*m_ext_state, sval))
5813 return false;
5814 if (m_state_b)
5815 if (!m_state_b->can_purge_p (*m_ext_state, sval))
5816 return false;
5817 }
5818 return true;
5819}
5820
75038aa6
DM
5821} // namespace ana
5822
808f4dfe 5823/* Dump RMODEL fully to stderr (i.e. without summarization). */
757bf1df 5824
808f4dfe
DM
5825DEBUG_FUNCTION void
5826debug (const region_model &rmodel)
757bf1df 5827{
808f4dfe 5828 rmodel.dump (false);
757bf1df
DM
5829}
5830
8ca7fa84 5831/* class rejected_op_constraint : public rejected_constraint. */
84fb3546
DM
5832
5833void
8ca7fa84 5834rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
84fb3546
DM
5835{
5836 region_model m (m_model);
5837 const svalue *lhs_sval = m.get_rvalue (m_lhs, NULL);
5838 const svalue *rhs_sval = m.get_rvalue (m_rhs, NULL);
5839 lhs_sval->dump_to_pp (pp, true);
5840 pp_printf (pp, " %s ", op_symbol_code (m_op));
5841 rhs_sval->dump_to_pp (pp, true);
5842}
5843
ccd4df81
DM
5844/* class rejected_default_case : public rejected_constraint. */
5845
5846void
5847rejected_default_case::dump_to_pp (pretty_printer *pp) const
5848{
5849 pp_string (pp, "implicit default for enum");
5850}
5851
8ca7fa84
DM
5852/* class rejected_ranges_constraint : public rejected_constraint. */
5853
5854void
5855rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
5856{
5857 region_model m (m_model);
5858 const svalue *sval = m.get_rvalue (m_expr, NULL);
5859 sval->dump_to_pp (pp, true);
5860 pp_string (pp, " in ");
5861 m_ranges->dump_to_pp (pp, true);
5862}
5863
808f4dfe 5864/* class engine. */
757bf1df 5865
11a2ff8d
DM
5866/* engine's ctor. */
5867
4cebae09
DM
5868engine::engine (const supergraph *sg, logger *logger)
5869: m_sg (sg), m_mgr (logger)
11a2ff8d
DM
5870{
5871}
5872
808f4dfe 5873/* Dump the managed objects by class to LOGGER, and the per-class totals. */
757bf1df 5874
808f4dfe
DM
5875void
5876engine::log_stats (logger *logger) const
757bf1df 5877{
808f4dfe 5878 m_mgr.log_stats (logger, true);
757bf1df
DM
5879}
5880
75038aa6
DM
5881namespace ana {
5882
757bf1df
DM
5883#if CHECKING_P
5884
5885namespace selftest {
5886
8c08c983
DM
5887/* Build a constant tree of the given type from STR. */
5888
5889static tree
5890build_real_cst_from_string (tree type, const char *str)
5891{
5892 REAL_VALUE_TYPE real;
5893 real_from_string (&real, str);
5894 return build_real (type, real);
5895}
5896
5897/* Append various "interesting" constants to OUT (e.g. NaN). */
5898
5899static void
5900append_interesting_constants (auto_vec<tree> *out)
5901{
5902 out->safe_push (build_int_cst (integer_type_node, 0));
5903 out->safe_push (build_int_cst (integer_type_node, 42));
5904 out->safe_push (build_int_cst (unsigned_type_node, 0));
5905 out->safe_push (build_int_cst (unsigned_type_node, 42));
5906 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
5907 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
5908 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
5909 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
5910 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
5911 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
5912 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
5913 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
5914}
5915
5916/* Verify that tree_cmp is a well-behaved comparator for qsort, even
5917 if the underlying constants aren't comparable. */
5918
5919static void
5920test_tree_cmp_on_constants ()
5921{
5922 auto_vec<tree> csts;
5923 append_interesting_constants (&csts);
5924
5925 /* Try sorting every triple. */
5926 const unsigned num = csts.length ();
5927 for (unsigned i = 0; i < num; i++)
5928 for (unsigned j = 0; j < num; j++)
5929 for (unsigned k = 0; k < num; k++)
5930 {
5931 auto_vec<tree> v (3);
5932 v.quick_push (csts[i]);
5933 v.quick_push (csts[j]);
5934 v.quick_push (csts[k]);
5935 v.qsort (tree_cmp);
5936 }
5937}
5938
757bf1df
DM
5939/* Implementation detail of the ASSERT_CONDITION_* macros. */
5940
808f4dfe
DM
5941void
5942assert_condition (const location &loc,
5943 region_model &model,
5944 const svalue *lhs, tree_code op, const svalue *rhs,
5945 tristate expected)
5946{
5947 tristate actual = model.eval_condition (lhs, op, rhs);
5948 ASSERT_EQ_AT (loc, actual, expected);
5949}
5950
5951/* Implementation detail of the ASSERT_CONDITION_* macros. */
5952
757bf1df
DM
5953void
5954assert_condition (const location &loc,
5955 region_model &model,
5956 tree lhs, tree_code op, tree rhs,
5957 tristate expected)
5958{
5959 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
5960 ASSERT_EQ_AT (loc, actual, expected);
5961}
5962
90f7c300
DM
5963/* Implementation detail of ASSERT_DUMP_TREE_EQ. */
5964
5965static void
5966assert_dump_tree_eq (const location &loc, tree t, const char *expected)
5967{
5968 auto_fix_quotes sentinel;
5969 pretty_printer pp;
5970 pp_format_decoder (&pp) = default_tree_printer;
5971 dump_tree (&pp, t);
5972 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
5973}
5974
5975/* Assert that dump_tree (T) is EXPECTED. */
5976
5977#define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
5978 SELFTEST_BEGIN_STMT \
5979 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
5980 SELFTEST_END_STMT
5981
757bf1df
DM
5982/* Implementation detail of ASSERT_DUMP_EQ. */
5983
5984static void
5985assert_dump_eq (const location &loc,
5986 const region_model &model,
5987 bool summarize,
5988 const char *expected)
5989{
5990 auto_fix_quotes sentinel;
5991 pretty_printer pp;
5992 pp_format_decoder (&pp) = default_tree_printer;
808f4dfe
DM
5993
5994 model.dump_to_pp (&pp, summarize, true);
757bf1df
DM
5995 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
5996}
5997
5998/* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
5999
6000#define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
6001 SELFTEST_BEGIN_STMT \
6002 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
6003 SELFTEST_END_STMT
6004
6005/* Smoketest for region_model::dump_to_pp. */
6006
6007static void
6008test_dump ()
6009{
808f4dfe
DM
6010 region_model_manager mgr;
6011 region_model model (&mgr);
757bf1df
DM
6012
6013 ASSERT_DUMP_EQ (model, false,
808f4dfe
DM
6014 "stack depth: 0\n"
6015 "m_called_unknown_fn: FALSE\n"
6016 "constraint_manager:\n"
6017 " equiv classes:\n"
6018 " constraints:\n");
6019 ASSERT_DUMP_EQ (model, true,
6020 "stack depth: 0\n"
6021 "m_called_unknown_fn: FALSE\n"
6022 "constraint_manager:\n"
757bf1df
DM
6023 " equiv classes:\n"
6024 " constraints:\n");
757bf1df
DM
6025}
6026
884d9141
DM
6027/* Helper function for selftests. Create a struct or union type named NAME,
6028 with the fields given by the FIELD_DECLS in FIELDS.
6029 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
6030 create a UNION_TYPE. */
6031
6032static tree
6033make_test_compound_type (const char *name, bool is_struct,
6034 const auto_vec<tree> *fields)
6035{
6036 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
6037 TYPE_NAME (t) = get_identifier (name);
6038 TYPE_SIZE (t) = 0;
6039
6040 tree fieldlist = NULL;
6041 int i;
6042 tree field;
6043 FOR_EACH_VEC_ELT (*fields, i, field)
6044 {
6045 gcc_assert (TREE_CODE (field) == FIELD_DECL);
6046 DECL_CONTEXT (field) = t;
6047 fieldlist = chainon (field, fieldlist);
6048 }
6049 fieldlist = nreverse (fieldlist);
6050 TYPE_FIELDS (t) = fieldlist;
6051
6052 layout_type (t);
6053 return t;
6054}
6055
a96f1c38
DM
6056/* Selftest fixture for creating the type "struct coord {int x; int y; };". */
6057
6058struct coord_test
6059{
6060 coord_test ()
6061 {
6062 auto_vec<tree> fields;
6063 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
6064 get_identifier ("x"), integer_type_node);
6065 fields.safe_push (m_x_field);
6066 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
6067 get_identifier ("y"), integer_type_node);
6068 fields.safe_push (m_y_field);
6069 m_coord_type = make_test_compound_type ("coord", true, &fields);
6070 }
6071
6072 tree m_x_field;
6073 tree m_y_field;
6074 tree m_coord_type;
6075};
6076
808f4dfe 6077/* Verify usage of a struct. */
884d9141
DM
6078
6079static void
808f4dfe 6080test_struct ()
884d9141 6081{
a96f1c38
DM
6082 coord_test ct;
6083
6084 tree c = build_global_decl ("c", ct.m_coord_type);
6085 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6086 c, ct.m_x_field, NULL_TREE);
6087 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6088 c, ct.m_y_field, NULL_TREE);
884d9141
DM
6089
6090 tree int_17 = build_int_cst (integer_type_node, 17);
6091 tree int_m3 = build_int_cst (integer_type_node, -3);
6092
808f4dfe
DM
6093 region_model_manager mgr;
6094 region_model model (&mgr);
884d9141
DM
6095 model.set_value (c_x, int_17, NULL);
6096 model.set_value (c_y, int_m3, NULL);
6097
808f4dfe
DM
6098 /* Verify get_offset for "c.x". */
6099 {
6100 const region *c_x_reg = model.get_lvalue (c_x, NULL);
7a6564c9 6101 region_offset offset = c_x_reg->get_offset (&mgr);
808f4dfe
DM
6102 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
6103 ASSERT_EQ (offset.get_bit_offset (), 0);
6104 }
6105
6106 /* Verify get_offset for "c.y". */
6107 {
6108 const region *c_y_reg = model.get_lvalue (c_y, NULL);
7a6564c9 6109 region_offset offset = c_y_reg->get_offset (&mgr);
808f4dfe
DM
6110 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
6111 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
6112 }
884d9141
DM
6113}
6114
808f4dfe 6115/* Verify usage of an array element. */
884d9141
DM
6116
6117static void
808f4dfe 6118test_array_1 ()
884d9141
DM
6119{
6120 tree tlen = size_int (10);
6121 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
6122
6123 tree a = build_global_decl ("a", arr_type);
6124
808f4dfe
DM
6125 region_model_manager mgr;
6126 region_model model (&mgr);
884d9141
DM
6127 tree int_0 = build_int_cst (integer_type_node, 0);
6128 tree a_0 = build4 (ARRAY_REF, char_type_node,
6129 a, int_0, NULL_TREE, NULL_TREE);
6130 tree char_A = build_int_cst (char_type_node, 'A');
6131 model.set_value (a_0, char_A, NULL);
884d9141
DM
6132}
6133
90f7c300
DM
6134/* Verify that region_model::get_representative_tree works as expected. */
6135
6136static void
6137test_get_representative_tree ()
6138{
808f4dfe
DM
6139 region_model_manager mgr;
6140
90f7c300
DM
6141 /* STRING_CST. */
6142 {
6143 tree string_cst = build_string (4, "foo");
808f4dfe
DM
6144 region_model m (&mgr);
6145 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
6146 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
6147 ASSERT_EQ (rep, string_cst);
6148 }
6149
6150 /* String literal. */
6151 {
6152 tree string_cst_ptr = build_string_literal (4, "foo");
808f4dfe
DM
6153 region_model m (&mgr);
6154 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
6155 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
6156 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
6157 }
808f4dfe
DM
6158
6159 /* Value of an element within an array. */
6160 {
6161 tree tlen = size_int (10);
6162 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
6163 tree a = build_global_decl ("a", arr_type);
6164 placeholder_svalue test_sval (char_type_node, "test value");
6165
6166 /* Value of a[3]. */
6167 {
6168 test_region_model_context ctxt;
6169 region_model model (&mgr);
6170 tree int_3 = build_int_cst (integer_type_node, 3);
6171 tree a_3 = build4 (ARRAY_REF, char_type_node,
6172 a, int_3, NULL_TREE, NULL_TREE);
6173 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
6174 model.set_value (a_3_reg, &test_sval, &ctxt);
6175 tree rep = model.get_representative_tree (&test_sval);
6176 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
6177 }
6178
6179 /* Value of a[0]. */
6180 {
6181 test_region_model_context ctxt;
6182 region_model model (&mgr);
6183 tree idx = build_int_cst (integer_type_node, 0);
6184 tree a_0 = build4 (ARRAY_REF, char_type_node,
6185 a, idx, NULL_TREE, NULL_TREE);
6186 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
6187 model.set_value (a_0_reg, &test_sval, &ctxt);
6188 tree rep = model.get_representative_tree (&test_sval);
6189 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
6190 }
6191 }
6192
6193 /* Value of a field within a struct. */
6194 {
6195 coord_test ct;
6196
6197 tree c = build_global_decl ("c", ct.m_coord_type);
6198 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6199 c, ct.m_x_field, NULL_TREE);
6200 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6201 c, ct.m_y_field, NULL_TREE);
6202
6203 test_region_model_context ctxt;
6204
6205 /* Value of initial field. */
6206 {
6207 region_model m (&mgr);
6208 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
6209 placeholder_svalue test_sval_x (integer_type_node, "test x val");
6210 m.set_value (c_x_reg, &test_sval_x, &ctxt);
6211 tree rep = m.get_representative_tree (&test_sval_x);
6212 ASSERT_DUMP_TREE_EQ (rep, "c.x");
6213 }
6214
6215 /* Value of non-initial field. */
6216 {
6217 region_model m (&mgr);
6218 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
6219 placeholder_svalue test_sval_y (integer_type_node, "test y val");
6220 m.set_value (c_y_reg, &test_sval_y, &ctxt);
6221 tree rep = m.get_representative_tree (&test_sval_y);
6222 ASSERT_DUMP_TREE_EQ (rep, "c.y");
6223 }
6224 }
90f7c300
DM
6225}
6226
757bf1df 6227/* Verify that calling region_model::get_rvalue repeatedly on the same
808f4dfe 6228 tree constant retrieves the same svalue *. */
757bf1df
DM
6229
6230static void
6231test_unique_constants ()
6232{
6233 tree int_0 = build_int_cst (integer_type_node, 0);
6234 tree int_42 = build_int_cst (integer_type_node, 42);
6235
6236 test_region_model_context ctxt;
808f4dfe
DM
6237 region_model_manager mgr;
6238 region_model model (&mgr);
757bf1df
DM
6239 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
6240 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
6241 model.get_rvalue (int_42, &ctxt));
6242 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
6243 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
757bf1df 6244
808f4dfe
DM
6245 /* A "(const int)42" will be a different tree from "(int)42)"... */
6246 tree const_int_type_node
6247 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
6248 tree const_int_42 = build_int_cst (const_int_type_node, 42);
6249 ASSERT_NE (int_42, const_int_42);
6250 /* It should have a different const_svalue. */
6251 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
6252 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
6253 ASSERT_NE (int_42_sval, const_int_42_sval);
6254 /* But they should compare as equal. */
6255 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
6256 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
757bf1df
DM
6257}
6258
808f4dfe
DM
6259/* Verify that each type gets its own singleton unknown_svalue within a
6260 region_model_manager, and that NULL_TREE gets its own singleton. */
757bf1df
DM
6261
6262static void
808f4dfe 6263test_unique_unknowns ()
757bf1df 6264{
808f4dfe
DM
6265 region_model_manager mgr;
6266 const svalue *unknown_int
6267 = mgr.get_or_create_unknown_svalue (integer_type_node);
6268 /* Repeated calls with the same type should get the same "unknown"
6269 svalue. */
6270 const svalue *unknown_int_2
6271 = mgr.get_or_create_unknown_svalue (integer_type_node);
6272 ASSERT_EQ (unknown_int, unknown_int_2);
757bf1df 6273
808f4dfe
DM
6274 /* Different types (or the NULL type) should have different
6275 unknown_svalues. */
6276 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
6277 ASSERT_NE (unknown_NULL_type, unknown_int);
757bf1df 6278
808f4dfe
DM
6279 /* Repeated calls with NULL for the type should get the same "unknown"
6280 svalue. */
6281 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
6282 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
757bf1df
DM
6283}
6284
808f4dfe 6285/* Verify that initial_svalue are handled as expected. */
757bf1df 6286
808f4dfe
DM
6287static void
6288test_initial_svalue_folding ()
757bf1df 6289{
808f4dfe
DM
6290 region_model_manager mgr;
6291 tree x = build_global_decl ("x", integer_type_node);
6292 tree y = build_global_decl ("y", integer_type_node);
757bf1df 6293
808f4dfe
DM
6294 test_region_model_context ctxt;
6295 region_model model (&mgr);
6296 const svalue *x_init = model.get_rvalue (x, &ctxt);
6297 const svalue *y_init = model.get_rvalue (y, &ctxt);
6298 ASSERT_NE (x_init, y_init);
6299 const region *x_reg = model.get_lvalue (x, &ctxt);
6300 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
757bf1df 6301
808f4dfe 6302}
757bf1df 6303
808f4dfe 6304/* Verify that unary ops are folded as expected. */
757bf1df
DM
6305
6306static void
808f4dfe 6307test_unaryop_svalue_folding ()
757bf1df 6308{
808f4dfe 6309 region_model_manager mgr;
757bf1df
DM
6310 tree x = build_global_decl ("x", integer_type_node);
6311 tree y = build_global_decl ("y", integer_type_node);
6312
808f4dfe
DM
6313 test_region_model_context ctxt;
6314 region_model model (&mgr);
6315 const svalue *x_init = model.get_rvalue (x, &ctxt);
6316 const svalue *y_init = model.get_rvalue (y, &ctxt);
6317 const region *x_reg = model.get_lvalue (x, &ctxt);
6318 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
6319
6320 /* "(int)x" -> "x". */
6321 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
6322
6323 /* "(void *)x" -> something other than "x". */
6324 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
6325
6326 /* "!(x == y)" -> "x != y". */
6327 ASSERT_EQ (mgr.get_or_create_unaryop
6328 (boolean_type_node, TRUTH_NOT_EXPR,
6329 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
6330 x_init, y_init)),
6331 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
6332 x_init, y_init));
6333 /* "!(x > y)" -> "x <= y". */
6334 ASSERT_EQ (mgr.get_or_create_unaryop
6335 (boolean_type_node, TRUTH_NOT_EXPR,
6336 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
6337 x_init, y_init)),
6338 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
6339 x_init, y_init));
6340}
6341
6342/* Verify that binops on constant svalues are folded. */
757bf1df 6343
808f4dfe
DM
6344static void
6345test_binop_svalue_folding ()
6346{
6347#define NUM_CSTS 10
6348 tree cst_int[NUM_CSTS];
6349 region_model_manager mgr;
6350 const svalue *cst_sval[NUM_CSTS];
6351 for (int i = 0; i < NUM_CSTS; i++)
6352 {
6353 cst_int[i] = build_int_cst (integer_type_node, i);
6354 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
6355 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
6356 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
6357 }
757bf1df 6358
808f4dfe
DM
6359 for (int i = 0; i < NUM_CSTS; i++)
6360 for (int j = 0; j < NUM_CSTS; j++)
6361 {
6362 if (i != j)
6363 ASSERT_NE (cst_sval[i], cst_sval[j]);
6364 if (i + j < NUM_CSTS)
6365 {
6366 const svalue *sum
6367 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6368 cst_sval[i], cst_sval[j]);
6369 ASSERT_EQ (sum, cst_sval[i + j]);
6370 }
6371 if (i - j >= 0)
6372 {
6373 const svalue *difference
6374 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
6375 cst_sval[i], cst_sval[j]);
6376 ASSERT_EQ (difference, cst_sval[i - j]);
6377 }
6378 if (i * j < NUM_CSTS)
6379 {
6380 const svalue *product
6381 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6382 cst_sval[i], cst_sval[j]);
6383 ASSERT_EQ (product, cst_sval[i * j]);
6384 }
6385 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
6386 cst_sval[i], cst_sval[j]);
6387 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
6388 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
6389 cst_sval[i], cst_sval[j]);
6390 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
6391 // etc
6392 }
757bf1df 6393
808f4dfe 6394 tree x = build_global_decl ("x", integer_type_node);
757bf1df 6395
808f4dfe
DM
6396 test_region_model_context ctxt;
6397 region_model model (&mgr);
6398 const svalue *x_init = model.get_rvalue (x, &ctxt);
6399
6400 /* PLUS_EXPR folding. */
6401 const svalue *x_init_plus_zero
6402 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6403 x_init, cst_sval[0]);
6404 ASSERT_EQ (x_init_plus_zero, x_init);
6405 const svalue *zero_plus_x_init
6406 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6407 cst_sval[0], x_init);
6408 ASSERT_EQ (zero_plus_x_init, x_init);
6409
6410 /* MULT_EXPR folding. */
6411 const svalue *x_init_times_zero
6412 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6413 x_init, cst_sval[0]);
6414 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
6415 const svalue *zero_times_x_init
6416 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6417 cst_sval[0], x_init);
6418 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
6419
6420 const svalue *x_init_times_one
6421 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6422 x_init, cst_sval[1]);
6423 ASSERT_EQ (x_init_times_one, x_init);
6424 const svalue *one_times_x_init
6425 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6426 cst_sval[1], x_init);
6427 ASSERT_EQ (one_times_x_init, x_init);
6428
6429 // etc
6430 // TODO: do we want to use the match-and-simplify DSL for this?
6431
6432 /* Verify that binops put any constants on the RHS. */
6433 const svalue *four_times_x_init
6434 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6435 cst_sval[4], x_init);
6436 const svalue *x_init_times_four
6437 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6438 x_init, cst_sval[4]);
6439 ASSERT_EQ (four_times_x_init, x_init_times_four);
6440 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
6441 ASSERT_EQ (binop->get_op (), MULT_EXPR);
6442 ASSERT_EQ (binop->get_arg0 (), x_init);
6443 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
6444
6445 /* Verify that ((x + 1) + 1) == (x + 2). */
6446 const svalue *x_init_plus_one
6447 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6448 x_init, cst_sval[1]);
6449 const svalue *x_init_plus_two
6450 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6451 x_init, cst_sval[2]);
6452 const svalue *x_init_plus_one_plus_one
6453 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6454 x_init_plus_one, cst_sval[1]);
6455 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
4f34f8cc
DM
6456
6457 /* Verify various binops on booleans. */
6458 {
6459 const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1);
6460 const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0);
6461 const svalue *sval_unknown
6462 = mgr.get_or_create_unknown_svalue (boolean_type_node);
6463 const placeholder_svalue sval_placeholder (boolean_type_node, "v");
6464 for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR})
6465 {
6466 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6467 sval_true, sval_unknown),
6468 sval_true);
6469 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6470 sval_false, sval_unknown),
6471 sval_unknown);
6472 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6473 sval_false, &sval_placeholder),
6474 &sval_placeholder);
6475 }
6476 for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR})
6477 {
6478 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6479 sval_false, sval_unknown),
6480 sval_false);
6481 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6482 sval_true, sval_unknown),
6483 sval_unknown);
6484 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6485 sval_true, &sval_placeholder),
6486 &sval_placeholder);
6487 }
6488 }
808f4dfe
DM
6489}
6490
6491/* Verify that sub_svalues are folded as expected. */
757bf1df 6492
808f4dfe
DM
6493static void
6494test_sub_svalue_folding ()
6495{
6496 coord_test ct;
6497 tree c = build_global_decl ("c", ct.m_coord_type);
6498 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6499 c, ct.m_x_field, NULL_TREE);
757bf1df 6500
808f4dfe
DM
6501 region_model_manager mgr;
6502 region_model model (&mgr);
6503 test_region_model_context ctxt;
6504 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
757bf1df 6505
808f4dfe
DM
6506 /* Verify that sub_svalue of "unknown" simply
6507 yields an unknown. */
757bf1df 6508
808f4dfe
DM
6509 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
6510 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
6511 unknown, c_x_reg);
6512 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
6513 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
757bf1df
DM
6514}
6515
f09b9955
DM
6516/* Get BIT within VAL as a symbolic value within MGR. */
6517
6518static const svalue *
6519get_bit (region_model_manager *mgr,
6520 bit_offset_t bit,
6521 unsigned HOST_WIDE_INT val)
6522{
6523 const svalue *inner_svalue
6524 = mgr->get_or_create_int_cst (unsigned_type_node, val);
6525 return mgr->get_or_create_bits_within (boolean_type_node,
6526 bit_range (bit, 1),
6527 inner_svalue);
6528}
6529
6530/* Verify that bits_within_svalues are folded as expected. */
6531
6532static void
6533test_bits_within_svalue_folding ()
6534{
6535 region_model_manager mgr;
6536
6537 const svalue *zero = mgr.get_or_create_int_cst (boolean_type_node, 0);
6538 const svalue *one = mgr.get_or_create_int_cst (boolean_type_node, 1);
6539
6540 {
6541 const unsigned val = 0x0000;
6542 for (unsigned bit = 0; bit < 16; bit++)
6543 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
6544 }
6545
6546 {
6547 const unsigned val = 0x0001;
6548 ASSERT_EQ (get_bit (&mgr, 0, val), one);
6549 for (unsigned bit = 1; bit < 16; bit++)
6550 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
6551 }
6552
6553 {
6554 const unsigned val = 0x8000;
6555 for (unsigned bit = 0; bit < 15; bit++)
6556 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
6557 ASSERT_EQ (get_bit (&mgr, 15, val), one);
6558 }
6559
6560 {
6561 const unsigned val = 0xFFFF;
6562 for (unsigned bit = 0; bit < 16; bit++)
6563 ASSERT_EQ (get_bit (&mgr, bit, val), one);
6564 }
6565}
6566
808f4dfe 6567/* Test that region::descendent_of_p works as expected. */
757bf1df
DM
6568
6569static void
808f4dfe 6570test_descendent_of_p ()
757bf1df 6571{
808f4dfe
DM
6572 region_model_manager mgr;
6573 const region *stack = mgr.get_stack_region ();
6574 const region *heap = mgr.get_heap_region ();
6575 const region *code = mgr.get_code_region ();
6576 const region *globals = mgr.get_globals_region ();
757bf1df 6577
808f4dfe
DM
6578 /* descendent_of_p should return true when used on the region itself. */
6579 ASSERT_TRUE (stack->descendent_of_p (stack));
6580 ASSERT_FALSE (stack->descendent_of_p (heap));
6581 ASSERT_FALSE (stack->descendent_of_p (code));
6582 ASSERT_FALSE (stack->descendent_of_p (globals));
757bf1df 6583
808f4dfe
DM
6584 tree x = build_global_decl ("x", integer_type_node);
6585 const region *x_reg = mgr.get_region_for_global (x);
6586 ASSERT_TRUE (x_reg->descendent_of_p (globals));
757bf1df 6587
808f4dfe
DM
6588 /* A cast_region should be a descendent of the original region. */
6589 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
6590 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
757bf1df
DM
6591}
6592
391512ad
DM
6593/* Verify that bit_range_region works as expected. */
6594
6595static void
6596test_bit_range_regions ()
6597{
6598 tree x = build_global_decl ("x", integer_type_node);
6599 region_model_manager mgr;
6600 const region *x_reg = mgr.get_region_for_global (x);
6601 const region *byte0
6602 = mgr.get_bit_range (x_reg, char_type_node, bit_range (0, 8));
6603 const region *byte1
6604 = mgr.get_bit_range (x_reg, char_type_node, bit_range (8, 8));
6605 ASSERT_TRUE (byte0->descendent_of_p (x_reg));
6606 ASSERT_TRUE (byte1->descendent_of_p (x_reg));
6607 ASSERT_NE (byte0, byte1);
6608}
6609
757bf1df
DM
6610/* Verify that simple assignments work as expected. */
6611
6612static void
6613test_assignment ()
6614{
6615 tree int_0 = build_int_cst (integer_type_node, 0);
6616 tree x = build_global_decl ("x", integer_type_node);
6617 tree y = build_global_decl ("y", integer_type_node);
6618
6619 /* "x == 0", then use of y, then "y = 0;". */
808f4dfe
DM
6620 region_model_manager mgr;
6621 region_model model (&mgr);
757bf1df
DM
6622 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
6623 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
6624 model.set_value (model.get_lvalue (y, NULL),
6625 model.get_rvalue (int_0, NULL),
6626 NULL);
6627 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
6628 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
757bf1df
DM
6629}
6630
a96f1c38
DM
6631/* Verify that compound assignments work as expected. */
6632
6633static void
6634test_compound_assignment ()
6635{
6636 coord_test ct;
6637
6638 tree c = build_global_decl ("c", ct.m_coord_type);
6639 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6640 c, ct.m_x_field, NULL_TREE);
6641 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6642 c, ct.m_y_field, NULL_TREE);
6643 tree d = build_global_decl ("d", ct.m_coord_type);
6644 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6645 d, ct.m_x_field, NULL_TREE);
6646 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6647 d, ct.m_y_field, NULL_TREE);
6648
6649 tree int_17 = build_int_cst (integer_type_node, 17);
6650 tree int_m3 = build_int_cst (integer_type_node, -3);
6651
808f4dfe
DM
6652 region_model_manager mgr;
6653 region_model model (&mgr);
a96f1c38
DM
6654 model.set_value (c_x, int_17, NULL);
6655 model.set_value (c_y, int_m3, NULL);
6656
a96f1c38 6657 /* Copy c to d. */
13ad6d9f
DM
6658 const svalue *sval = model.get_rvalue (c, NULL);
6659 model.set_value (model.get_lvalue (d, NULL), sval, NULL);
6660
a96f1c38
DM
6661 /* Check that the fields have the same svalues. */
6662 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
6663 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
6664}
6665
757bf1df
DM
6666/* Verify the details of pushing and popping stack frames. */
6667
6668static void
6669test_stack_frames ()
6670{
6671 tree int_42 = build_int_cst (integer_type_node, 42);
6672 tree int_10 = build_int_cst (integer_type_node, 10);
6673 tree int_5 = build_int_cst (integer_type_node, 5);
6674 tree int_0 = build_int_cst (integer_type_node, 0);
6675
6676 auto_vec <tree> param_types;
6677 tree parent_fndecl = make_fndecl (integer_type_node,
6678 "parent_fn",
6679 param_types);
6680 allocate_struct_function (parent_fndecl, true);
6681
6682 tree child_fndecl = make_fndecl (integer_type_node,
6683 "child_fn",
6684 param_types);
6685 allocate_struct_function (child_fndecl, true);
6686
6687 /* "a" and "b" in the parent frame. */
6688 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6689 get_identifier ("a"),
6690 integer_type_node);
4cebae09 6691 DECL_CONTEXT (a) = parent_fndecl;
757bf1df
DM
6692 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6693 get_identifier ("b"),
6694 integer_type_node);
4cebae09 6695 DECL_CONTEXT (b) = parent_fndecl;
757bf1df
DM
6696 /* "x" and "y" in a child frame. */
6697 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6698 get_identifier ("x"),
6699 integer_type_node);
4cebae09 6700 DECL_CONTEXT (x) = child_fndecl;
757bf1df
DM
6701 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6702 get_identifier ("y"),
6703 integer_type_node);
4cebae09 6704 DECL_CONTEXT (y) = child_fndecl;
757bf1df
DM
6705
6706 /* "p" global. */
6707 tree p = build_global_decl ("p", ptr_type_node);
6708
6709 /* "q" global. */
6710 tree q = build_global_decl ("q", ptr_type_node);
6711
808f4dfe 6712 region_model_manager mgr;
757bf1df 6713 test_region_model_context ctxt;
808f4dfe 6714 region_model model (&mgr);
757bf1df
DM
6715
6716 /* Push stack frame for "parent_fn". */
808f4dfe
DM
6717 const region *parent_frame_reg
6718 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl),
6719 NULL, &ctxt);
6720 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
6721 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
6722 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
6723 model.set_value (a_in_parent_reg,
6724 model.get_rvalue (int_42, &ctxt),
6725 &ctxt);
6726 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
6727
757bf1df
DM
6728 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
6729 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
6730 tristate (tristate::TS_TRUE));
6731
6732 /* Push stack frame for "child_fn". */
808f4dfe 6733 const region *child_frame_reg
757bf1df 6734 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
808f4dfe
DM
6735 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
6736 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
6737 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
6738 model.set_value (x_in_child_reg,
6739 model.get_rvalue (int_0, &ctxt),
6740 &ctxt);
6741 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
6742
757bf1df
DM
6743 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
6744 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
6745 tristate (tristate::TS_TRUE));
6746
6747 /* Point a global pointer at a local in the child frame: p = &x. */
808f4dfe
DM
6748 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
6749 model.set_value (p_in_globals_reg,
6750 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
757bf1df 6751 &ctxt);
808f4dfe 6752 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
757bf1df
DM
6753
6754 /* Point another global pointer at p: q = &p. */
808f4dfe
DM
6755 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
6756 model.set_value (q_in_globals_reg,
6757 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
757bf1df
DM
6758 &ctxt);
6759
808f4dfe
DM
6760 /* Test region::descendent_of_p. */
6761 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
6762 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
6763 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
757bf1df
DM
6764
6765 /* Pop the "child_fn" frame from the stack. */
808f4dfe
DM
6766 model.pop_frame (NULL, NULL, &ctxt);
6767 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
6768 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
757bf1df
DM
6769
6770 /* Verify that p (which was pointing at the local "x" in the popped
6771 frame) has been poisoned. */
33255ad3 6772 const svalue *new_p_sval = model.get_rvalue (p, NULL);
757bf1df
DM
6773 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
6774 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
6775 POISON_KIND_POPPED_STACK);
6776
6777 /* Verify that q still points to p, in spite of the region
6778 renumbering. */
808f4dfe 6779 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
757bf1df 6780 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
5932dd35 6781 ASSERT_EQ (new_q_sval->maybe_get_region (),
757bf1df
DM
6782 model.get_lvalue (p, &ctxt));
6783
6784 /* Verify that top of stack has been updated. */
808f4dfe 6785 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
757bf1df
DM
6786
6787 /* Verify locals in parent frame. */
6788 /* Verify "a" still has its value. */
808f4dfe 6789 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
757bf1df
DM
6790 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
6791 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
6792 int_42);
6793 /* Verify "b" still has its constraint. */
6794 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
6795 tristate (tristate::TS_TRUE));
6796}
6797
6798/* Verify that get_representative_path_var works as expected, that
808f4dfe 6799 we can map from regions to parms and back within a recursive call
757bf1df
DM
6800 stack. */
6801
6802static void
6803test_get_representative_path_var ()
6804{
6805 auto_vec <tree> param_types;
6806 tree fndecl = make_fndecl (integer_type_node,
6807 "factorial",
6808 param_types);
6809 allocate_struct_function (fndecl, true);
6810
6811 /* Parm "n". */
6812 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6813 get_identifier ("n"),
6814 integer_type_node);
4cebae09 6815 DECL_CONTEXT (n) = fndecl;
757bf1df 6816
808f4dfe
DM
6817 region_model_manager mgr;
6818 test_region_model_context ctxt;
6819 region_model model (&mgr);
757bf1df
DM
6820
6821 /* Push 5 stack frames for "factorial", each with a param */
808f4dfe
DM
6822 auto_vec<const region *> parm_regs;
6823 auto_vec<const svalue *> parm_svals;
757bf1df
DM
6824 for (int depth = 0; depth < 5; depth++)
6825 {
808f4dfe
DM
6826 const region *frame_n_reg
6827 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
6828 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
6829 parm_regs.safe_push (parm_n_reg);
757bf1df 6830
808f4dfe
DM
6831 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
6832 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
6833 parm_svals.safe_push (sval_n);
757bf1df
DM
6834 }
6835
6836 /* Verify that we can recognize that the regions are the parms,
6837 at every depth. */
6838 for (int depth = 0; depth < 5; depth++)
6839 {
808f4dfe
DM
6840 {
6841 svalue_set visited;
6842 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
6843 &visited),
6844 path_var (n, depth + 1));
6845 }
757bf1df
DM
6846 /* ...and that we can lookup lvalues for locals for all frames,
6847 not just the top. */
6848 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
808f4dfe 6849 parm_regs[depth]);
757bf1df 6850 /* ...and that we can locate the svalues. */
808f4dfe
DM
6851 {
6852 svalue_set visited;
6853 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
6854 &visited),
6855 path_var (n, depth + 1));
6856 }
757bf1df
DM
6857 }
6858}
6859
808f4dfe 6860/* Ensure that region_model::operator== works as expected. */
757bf1df
DM
6861
6862static void
808f4dfe 6863test_equality_1 ()
757bf1df 6864{
808f4dfe
DM
6865 tree int_42 = build_int_cst (integer_type_node, 42);
6866 tree int_17 = build_int_cst (integer_type_node, 17);
757bf1df 6867
808f4dfe
DM
6868/* Verify that "empty" region_model instances are equal to each other. */
6869 region_model_manager mgr;
6870 region_model model0 (&mgr);
6871 region_model model1 (&mgr);
757bf1df 6872 ASSERT_EQ (model0, model1);
808f4dfe
DM
6873
6874 /* Verify that setting state in model1 makes the models non-equal. */
6875 tree x = build_global_decl ("x", integer_type_node);
6876 model0.set_value (x, int_42, NULL);
6877 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
6878 ASSERT_NE (model0, model1);
6879
6880 /* Verify the copy-ctor. */
6881 region_model model2 (model0);
6882 ASSERT_EQ (model0, model2);
6883 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
6884 ASSERT_NE (model1, model2);
6885
6886 /* Verify that models obtained from copy-ctor are independently editable
6887 w/o affecting the original model. */
6888 model2.set_value (x, int_17, NULL);
6889 ASSERT_NE (model0, model2);
6890 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
6891 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
757bf1df
DM
6892}
6893
6894/* Verify that region models for
6895 x = 42; y = 113;
6896 and
6897 y = 113; x = 42;
808f4dfe 6898 are equal. */
757bf1df
DM
6899
6900static void
6901test_canonicalization_2 ()
6902{
6903 tree int_42 = build_int_cst (integer_type_node, 42);
6904 tree int_113 = build_int_cst (integer_type_node, 113);
6905 tree x = build_global_decl ("x", integer_type_node);
6906 tree y = build_global_decl ("y", integer_type_node);
6907
808f4dfe
DM
6908 region_model_manager mgr;
6909 region_model model0 (&mgr);
757bf1df
DM
6910 model0.set_value (model0.get_lvalue (x, NULL),
6911 model0.get_rvalue (int_42, NULL),
6912 NULL);
6913 model0.set_value (model0.get_lvalue (y, NULL),
6914 model0.get_rvalue (int_113, NULL),
6915 NULL);
6916
808f4dfe 6917 region_model model1 (&mgr);
757bf1df
DM
6918 model1.set_value (model1.get_lvalue (y, NULL),
6919 model1.get_rvalue (int_113, NULL),
6920 NULL);
6921 model1.set_value (model1.get_lvalue (x, NULL),
6922 model1.get_rvalue (int_42, NULL),
6923 NULL);
6924
757bf1df
DM
6925 ASSERT_EQ (model0, model1);
6926}
6927
6928/* Verify that constraints for
6929 x > 3 && y > 42
6930 and
6931 y > 42 && x > 3
6932 are equal after canonicalization. */
6933
6934static void
6935test_canonicalization_3 ()
6936{
6937 tree int_3 = build_int_cst (integer_type_node, 3);
6938 tree int_42 = build_int_cst (integer_type_node, 42);
6939 tree x = build_global_decl ("x", integer_type_node);
6940 tree y = build_global_decl ("y", integer_type_node);
6941
808f4dfe
DM
6942 region_model_manager mgr;
6943 region_model model0 (&mgr);
757bf1df
DM
6944 model0.add_constraint (x, GT_EXPR, int_3, NULL);
6945 model0.add_constraint (y, GT_EXPR, int_42, NULL);
6946
808f4dfe 6947 region_model model1 (&mgr);
757bf1df
DM
6948 model1.add_constraint (y, GT_EXPR, int_42, NULL);
6949 model1.add_constraint (x, GT_EXPR, int_3, NULL);
6950
808f4dfe
DM
6951 model0.canonicalize ();
6952 model1.canonicalize ();
757bf1df
DM
6953 ASSERT_EQ (model0, model1);
6954}
6955
8c08c983
DM
6956/* Verify that we can canonicalize a model containing NaN and other real
6957 constants. */
6958
6959static void
6960test_canonicalization_4 ()
6961{
6962 auto_vec<tree> csts;
6963 append_interesting_constants (&csts);
6964
808f4dfe
DM
6965 region_model_manager mgr;
6966 region_model model (&mgr);
8c08c983 6967
3f207ab3 6968 for (tree cst : csts)
8c08c983
DM
6969 model.get_rvalue (cst, NULL);
6970
808f4dfe 6971 model.canonicalize ();
8c08c983
DM
6972}
6973
757bf1df
DM
6974/* Assert that if we have two region_model instances
6975 with values VAL_A and VAL_B for EXPR that they are
6976 mergable. Write the merged model to *OUT_MERGED_MODEL,
6977 and the merged svalue ptr to *OUT_MERGED_SVALUE.
6978 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
6979 for that region_model. */
6980
6981static void
6982assert_region_models_merge (tree expr, tree val_a, tree val_b,
808f4dfe
DM
6983 region_model *out_merged_model,
6984 const svalue **out_merged_svalue)
757bf1df 6985{
808f4dfe 6986 region_model_manager *mgr = out_merged_model->get_manager ();
bb8e93eb
DM
6987 program_point point (program_point::origin (*mgr));
6988 test_region_model_context ctxt;
808f4dfe
DM
6989 region_model model0 (mgr);
6990 region_model model1 (mgr);
757bf1df
DM
6991 if (val_a)
6992 model0.set_value (model0.get_lvalue (expr, &ctxt),
6993 model0.get_rvalue (val_a, &ctxt),
6994 &ctxt);
6995 if (val_b)
6996 model1.set_value (model1.get_lvalue (expr, &ctxt),
6997 model1.get_rvalue (val_b, &ctxt),
6998 &ctxt);
6999
7000 /* They should be mergeable. */
808f4dfe
DM
7001 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
7002 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
757bf1df
DM
7003}
7004
7005/* Verify that we can merge region_model instances. */
7006
7007static void
7008test_state_merging ()
7009{
7010 tree int_42 = build_int_cst (integer_type_node, 42);
7011 tree int_113 = build_int_cst (integer_type_node, 113);
7012 tree x = build_global_decl ("x", integer_type_node);
7013 tree y = build_global_decl ("y", integer_type_node);
7014 tree z = build_global_decl ("z", integer_type_node);
7015 tree p = build_global_decl ("p", ptr_type_node);
7016
7017 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
7018 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
7019
7020 auto_vec <tree> param_types;
7021 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
7022 allocate_struct_function (test_fndecl, true);
7023
7024 /* Param "a". */
7025 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7026 get_identifier ("a"),
7027 integer_type_node);
4cebae09 7028 DECL_CONTEXT (a) = test_fndecl;
757bf1df
DM
7029 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
7030
455f58ec
DM
7031 /* Param "q", a pointer. */
7032 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7033 get_identifier ("q"),
7034 ptr_type_node);
4cebae09 7035 DECL_CONTEXT (q) = test_fndecl;
455f58ec 7036
808f4dfe 7037 region_model_manager mgr;
bb8e93eb 7038 program_point point (program_point::origin (mgr));
808f4dfe 7039
757bf1df 7040 {
808f4dfe
DM
7041 region_model model0 (&mgr);
7042 region_model model1 (&mgr);
7043 region_model merged (&mgr);
757bf1df 7044 /* Verify empty models can be merged. */
808f4dfe 7045 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7046 ASSERT_EQ (model0, merged);
7047 }
7048
7049 /* Verify that we can merge two contradictory constraints on the
7050 value for a global. */
7051 /* TODO: verify that the merged model doesn't have a value for
7052 the global */
7053 {
808f4dfe
DM
7054 region_model model0 (&mgr);
7055 region_model model1 (&mgr);
7056 region_model merged (&mgr);
757bf1df
DM
7057 test_region_model_context ctxt;
7058 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7059 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe 7060 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7061 ASSERT_NE (model0, merged);
7062 ASSERT_NE (model1, merged);
7063 }
7064
7065 /* Verify handling of a PARM_DECL. */
7066 {
7067 test_region_model_context ctxt;
808f4dfe
DM
7068 region_model model0 (&mgr);
7069 region_model model1 (&mgr);
757bf1df
DM
7070 ASSERT_EQ (model0.get_stack_depth (), 0);
7071 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7072 ASSERT_EQ (model0.get_stack_depth (), 1);
757bf1df
DM
7073 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7074
808f4dfe
DM
7075 placeholder_svalue test_sval (integer_type_node, "test sval");
7076 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
7077 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
757bf1df
DM
7078 ASSERT_EQ (model0, model1);
7079
757bf1df 7080 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7081 region_model merged (&mgr);
7082 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7083 ASSERT_EQ (model0, merged);
808f4dfe
DM
7084 /* In particular, "a" should have the placeholder value. */
7085 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
757bf1df
DM
7086 }
7087
7088 /* Verify handling of a global. */
7089 {
7090 test_region_model_context ctxt;
808f4dfe
DM
7091 region_model model0 (&mgr);
7092 region_model model1 (&mgr);
757bf1df 7093
808f4dfe
DM
7094 placeholder_svalue test_sval (integer_type_node, "test sval");
7095 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
7096 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
7097 ASSERT_EQ (model0, model1);
757bf1df
DM
7098
7099 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7100 region_model merged (&mgr);
7101 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7102 ASSERT_EQ (model0, merged);
808f4dfe
DM
7103 /* In particular, "x" should have the placeholder value. */
7104 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
757bf1df
DM
7105 }
7106
7107 /* Use global-handling to verify various combinations of values. */
7108
7109 /* Two equal constant values. */
7110 {
808f4dfe
DM
7111 region_model merged (&mgr);
7112 const svalue *merged_x_sval;
757bf1df
DM
7113 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
7114
7115 /* In particular, there should be a constant value for "x". */
7116 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
7117 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
7118 int_42);
7119 }
7120
7121 /* Two non-equal constant values. */
7122 {
808f4dfe
DM
7123 region_model merged (&mgr);
7124 const svalue *merged_x_sval;
757bf1df
DM
7125 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
7126
808f4dfe
DM
7127 /* In particular, there should be a "widening" value for "x". */
7128 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
757bf1df
DM
7129 }
7130
808f4dfe 7131 /* Initial and constant. */
757bf1df 7132 {
808f4dfe
DM
7133 region_model merged (&mgr);
7134 const svalue *merged_x_sval;
757bf1df
DM
7135 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
7136
7137 /* In particular, there should be an unknown value for "x". */
7138 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7139 }
7140
808f4dfe 7141 /* Constant and initial. */
757bf1df 7142 {
808f4dfe
DM
7143 region_model merged (&mgr);
7144 const svalue *merged_x_sval;
757bf1df
DM
7145 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
7146
7147 /* In particular, there should be an unknown value for "x". */
7148 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7149 }
7150
7151 /* Unknown and constant. */
7152 // TODO
7153
7154 /* Pointers: NULL and NULL. */
7155 // TODO
7156
7157 /* Pointers: NULL and non-NULL. */
7158 // TODO
7159
7160 /* Pointers: non-NULL and non-NULL: ptr to a local. */
7161 {
808f4dfe 7162 region_model model0 (&mgr);
757bf1df 7163 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
757bf1df
DM
7164 model0.set_value (model0.get_lvalue (p, NULL),
7165 model0.get_rvalue (addr_of_a, NULL), NULL);
7166
7167 region_model model1 (model0);
7168 ASSERT_EQ (model0, model1);
7169
7170 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7171 region_model merged (&mgr);
7172 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7173 ASSERT_EQ (model0, merged);
7174 }
7175
7176 /* Pointers: non-NULL and non-NULL: ptr to a global. */
7177 {
808f4dfe 7178 region_model merged (&mgr);
757bf1df 7179 /* p == &y in both input models. */
808f4dfe 7180 const svalue *merged_p_sval;
757bf1df
DM
7181 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
7182 &merged_p_sval);
7183
7184 /* We should get p == &y in the merged model. */
7185 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
808f4dfe
DM
7186 const region_svalue *merged_p_ptr
7187 = merged_p_sval->dyn_cast_region_svalue ();
7188 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
7189 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
757bf1df
DM
7190 }
7191
7192 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
7193 {
808f4dfe
DM
7194 region_model merged (&mgr);
7195 /* x == &y vs x == &z in the input models; these are actually casts
7196 of the ptrs to "int". */
7197 const svalue *merged_x_sval;
7198 // TODO:
757bf1df
DM
7199 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
7200 &merged_x_sval);
7201
7202 /* We should get x == unknown in the merged model. */
7203 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7204 }
7205
7206 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
7207 {
7208 test_region_model_context ctxt;
808f4dfe 7209 region_model model0 (&mgr);
9a2c9579 7210 tree size = build_int_cst (size_type_node, 1024);
808f4dfe 7211 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
b9365b93 7212 const region *new_reg
ce917b04 7213 = model0.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe 7214 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
757bf1df 7215 model0.set_value (model0.get_lvalue (p, &ctxt),
808f4dfe 7216 ptr_sval, &ctxt);
757bf1df
DM
7217
7218 region_model model1 (model0);
7219
7220 ASSERT_EQ (model0, model1);
7221
808f4dfe
DM
7222 region_model merged (&mgr);
7223 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7224
808f4dfe 7225 /* The merged model ought to be identical. */
757bf1df
DM
7226 ASSERT_EQ (model0, merged);
7227 }
7228
808f4dfe
DM
7229 /* Two regions sharing the same placeholder svalue should continue sharing
7230 it after self-merger. */
757bf1df
DM
7231 {
7232 test_region_model_context ctxt;
808f4dfe
DM
7233 region_model model0 (&mgr);
7234 placeholder_svalue placeholder_sval (integer_type_node, "test");
7235 model0.set_value (model0.get_lvalue (x, &ctxt),
7236 &placeholder_sval, &ctxt);
7237 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
757bf1df
DM
7238 region_model model1 (model0);
7239
7240 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7241 region_model merged (&mgr);
7242 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7243 ASSERT_EQ (model0, merged);
7244
7245 /* In particular, we should have x == y. */
7246 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
7247 tristate (tristate::TS_TRUE));
7248 }
7249
757bf1df 7250 {
808f4dfe
DM
7251 region_model model0 (&mgr);
7252 region_model model1 (&mgr);
757bf1df
DM
7253 test_region_model_context ctxt;
7254 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7255 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
808f4dfe
DM
7256 region_model merged (&mgr);
7257 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7258 }
7259
7260 {
808f4dfe
DM
7261 region_model model0 (&mgr);
7262 region_model model1 (&mgr);
757bf1df
DM
7263 test_region_model_context ctxt;
7264 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7265 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
7266 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe
DM
7267 region_model merged (&mgr);
7268 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7269 }
757bf1df
DM
7270
7271 // TODO: what can't we merge? need at least one such test
7272
7273 /* TODO: various things
7274 - heap regions
7275 - value merging:
7276 - every combination, but in particular
808f4dfe 7277 - pairs of regions
757bf1df
DM
7278 */
7279
7280 /* Views. */
7281 {
7282 test_region_model_context ctxt;
808f4dfe 7283 region_model model0 (&mgr);
757bf1df 7284
808f4dfe
DM
7285 const region *x_reg = model0.get_lvalue (x, &ctxt);
7286 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
757bf1df
DM
7287 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
7288
7289 region_model model1 (model0);
7290 ASSERT_EQ (model1, model0);
7291
7292 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7293 region_model merged (&mgr);
7294 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7295 }
455f58ec
DM
7296
7297 /* Verify that we can merge a model in which a local in an older stack
7298 frame points to a local in a more recent stack frame. */
7299 {
808f4dfe 7300 region_model model0 (&mgr);
455f58ec 7301 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
808f4dfe 7302 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
455f58ec
DM
7303
7304 /* Push a second frame. */
808f4dfe 7305 const region *reg_2nd_frame
455f58ec
DM
7306 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7307
7308 /* Have a pointer in the older frame point to a local in the
7309 more recent frame. */
808f4dfe
DM
7310 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
7311 model0.set_value (q_in_first_frame, sval_ptr, NULL);
455f58ec
DM
7312
7313 /* Verify that it's pointing at the newer frame. */
5932dd35 7314 const region *reg_pointee = sval_ptr->maybe_get_region ();
808f4dfe 7315 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
455f58ec 7316
808f4dfe 7317 model0.canonicalize ();
455f58ec
DM
7318
7319 region_model model1 (model0);
7320 ASSERT_EQ (model0, model1);
7321
7322 /* They should be mergeable, and the result should be the same
7323 (after canonicalization, at least). */
808f4dfe
DM
7324 region_model merged (&mgr);
7325 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7326 merged.canonicalize ();
455f58ec
DM
7327 ASSERT_EQ (model0, merged);
7328 }
7329
7330 /* Verify that we can merge a model in which a local points to a global. */
7331 {
808f4dfe 7332 region_model model0 (&mgr);
455f58ec
DM
7333 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7334 model0.set_value (model0.get_lvalue (q, NULL),
7335 model0.get_rvalue (addr_of_y, NULL), NULL);
7336
455f58ec
DM
7337 region_model model1 (model0);
7338 ASSERT_EQ (model0, model1);
7339
7340 /* They should be mergeable, and the result should be the same
7341 (after canonicalization, at least). */
808f4dfe
DM
7342 region_model merged (&mgr);
7343 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
455f58ec
DM
7344 ASSERT_EQ (model0, merged);
7345 }
757bf1df
DM
7346}
7347
7348/* Verify that constraints are correctly merged when merging region_model
7349 instances. */
7350
7351static void
7352test_constraint_merging ()
7353{
7354 tree int_0 = build_int_cst (integer_type_node, 0);
7355 tree int_5 = build_int_cst (integer_type_node, 5);
7356 tree x = build_global_decl ("x", integer_type_node);
7357 tree y = build_global_decl ("y", integer_type_node);
7358 tree z = build_global_decl ("z", integer_type_node);
7359 tree n = build_global_decl ("n", integer_type_node);
7360
808f4dfe 7361 region_model_manager mgr;
757bf1df
DM
7362 test_region_model_context ctxt;
7363
7364 /* model0: 0 <= (x == y) < n. */
808f4dfe 7365 region_model model0 (&mgr);
757bf1df
DM
7366 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
7367 model0.add_constraint (x, GE_EXPR, int_0, NULL);
7368 model0.add_constraint (x, LT_EXPR, n, NULL);
7369
7370 /* model1: z != 5 && (0 <= x < n). */
808f4dfe 7371 region_model model1 (&mgr);
757bf1df
DM
7372 model1.add_constraint (z, NE_EXPR, int_5, NULL);
7373 model1.add_constraint (x, GE_EXPR, int_0, NULL);
7374 model1.add_constraint (x, LT_EXPR, n, NULL);
7375
7376 /* They should be mergeable; the merged constraints should
7377 be: (0 <= x < n). */
bb8e93eb 7378 program_point point (program_point::origin (mgr));
808f4dfe
DM
7379 region_model merged (&mgr);
7380 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7381
7382 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
7383 tristate (tristate::TS_TRUE));
7384 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
7385 tristate (tristate::TS_TRUE));
7386
7387 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
7388 tristate (tristate::TS_UNKNOWN));
7389 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
7390 tristate (tristate::TS_UNKNOWN));
7391}
7392
808f4dfe
DM
7393/* Verify that widening_svalue::eval_condition_without_cm works as
7394 expected. */
7395
7396static void
7397test_widening_constraints ()
7398{
bb8e93eb 7399 region_model_manager mgr;
e6fe02d8 7400 function_point point (program_point::origin (mgr).get_function_point ());
808f4dfe
DM
7401 tree int_0 = build_int_cst (integer_type_node, 0);
7402 tree int_m1 = build_int_cst (integer_type_node, -1);
7403 tree int_1 = build_int_cst (integer_type_node, 1);
7404 tree int_256 = build_int_cst (integer_type_node, 256);
808f4dfe
DM
7405 test_region_model_context ctxt;
7406 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
7407 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
7408 const svalue *w_zero_then_one_sval
7409 = mgr.get_or_create_widening_svalue (integer_type_node, point,
7410 int_0_sval, int_1_sval);
7411 const widening_svalue *w_zero_then_one
7412 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
7413 ASSERT_EQ (w_zero_then_one->get_direction (),
7414 widening_svalue::DIR_ASCENDING);
7415 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
7416 tristate::TS_FALSE);
7417 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
7418 tristate::TS_FALSE);
7419 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
7420 tristate::TS_UNKNOWN);
7421 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
7422 tristate::TS_UNKNOWN);
7423
7424 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
7425 tristate::TS_FALSE);
7426 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
7427 tristate::TS_UNKNOWN);
7428 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
7429 tristate::TS_UNKNOWN);
7430 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
7431 tristate::TS_UNKNOWN);
7432
7433 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
7434 tristate::TS_TRUE);
7435 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
7436 tristate::TS_UNKNOWN);
7437 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
7438 tristate::TS_UNKNOWN);
7439 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
7440 tristate::TS_UNKNOWN);
7441
7442 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
7443 tristate::TS_TRUE);
7444 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
7445 tristate::TS_TRUE);
7446 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
7447 tristate::TS_UNKNOWN);
7448 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
7449 tristate::TS_UNKNOWN);
7450
7451 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
7452 tristate::TS_FALSE);
7453 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
7454 tristate::TS_UNKNOWN);
7455 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
7456 tristate::TS_UNKNOWN);
7457 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
7458 tristate::TS_UNKNOWN);
7459
7460 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
7461 tristate::TS_TRUE);
7462 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
7463 tristate::TS_UNKNOWN);
7464 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
7465 tristate::TS_UNKNOWN);
7466 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
7467 tristate::TS_UNKNOWN);
7468}
7469
7470/* Verify merging constraints for states simulating successive iterations
7471 of a loop.
7472 Simulate:
7473 for (i = 0; i < 256; i++)
7474 [...body...]
7475 i.e. this gimple:.
7476 i_15 = 0;
7477 goto <bb 4>;
7478
7479 <bb 4> :
7480 i_11 = PHI <i_15(2), i_23(3)>
7481 if (i_11 <= 255)
7482 goto <bb 3>;
7483 else
7484 goto [AFTER LOOP]
7485
7486 <bb 3> :
7487 [LOOP BODY]
7488 i_23 = i_11 + 1;
7489
7490 and thus these ops (and resultant states):
7491 i_11 = PHI()
7492 {i_11: 0}
7493 add_constraint (i_11 <= 255) [for the true edge]
7494 {i_11: 0} [constraint was a no-op]
7495 i_23 = i_11 + 1;
7496 {i_22: 1}
7497 i_11 = PHI()
7498 {i_11: WIDENED (at phi, 0, 1)}
7499 add_constraint (i_11 <= 255) [for the true edge]
7500 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
7501 i_23 = i_11 + 1;
7502 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
7503 i_11 = PHI(); merge with state at phi above
7504 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
7505 [changing meaning of "WIDENED" here]
7506 if (i_11 <= 255)
7507 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
7508 F: {i_11: 256}
7509 */
7510
7511static void
7512test_iteration_1 ()
7513{
bb8e93eb
DM
7514 region_model_manager mgr;
7515 program_point point (program_point::origin (mgr));
808f4dfe
DM
7516
7517 tree int_0 = build_int_cst (integer_type_node, 0);
7518 tree int_1 = build_int_cst (integer_type_node, 1);
7519 tree int_256 = build_int_cst (integer_type_node, 256);
7520 tree int_257 = build_int_cst (integer_type_node, 257);
7521 tree i = build_global_decl ("i", integer_type_node);
7522
808f4dfe
DM
7523 test_region_model_context ctxt;
7524
7525 /* model0: i: 0. */
7526 region_model model0 (&mgr);
7527 model0.set_value (i, int_0, &ctxt);
7528
7529 /* model1: i: 1. */
7530 region_model model1 (&mgr);
7531 model1.set_value (i, int_1, &ctxt);
7532
7533 /* Should merge "i" to a widened value. */
7534 region_model model2 (&mgr);
7535 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
7536 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
7537 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
7538 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
7539 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
7540
7541 /* Add constraint: i < 256 */
7542 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
7543 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
7544 tristate (tristate::TS_TRUE));
7545 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
7546 tristate (tristate::TS_TRUE));
7547
7548 /* Try merging with the initial state. */
7549 region_model model3 (&mgr);
7550 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
7551 /* Merging the merged value with the initial value should be idempotent,
7552 so that the analysis converges. */
7553 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
7554 /* Merger of 0 and a widening value with constraint < CST
7555 should retain the constraint, even though it was implicit
7556 for the 0 case. */
7557 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
7558 tristate (tristate::TS_TRUE));
7559 /* ...and we should have equality: the analysis should have converged. */
7560 ASSERT_EQ (model3, model2);
7561
7562 /* "i_23 = i_11 + 1;" */
7563 region_model model4 (model3);
7564 ASSERT_EQ (model4, model2);
7565 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
7566 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
7567 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
7568
7569 /* Try merging with the "i: 1" state. */
7570 region_model model5 (&mgr);
7571 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
7572 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
7573 ASSERT_EQ (model5, model4);
7574
7575 /* "i_11 = PHI();" merge with state at phi above.
7576 For i, we should have a merger of WIDENING with WIDENING + 1,
7577 and this should be WIDENING again. */
7578 region_model model6 (&mgr);
7579 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
7580 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
7581 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
7582
7583 ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257);
7584}
7585
6969ac30
DM
7586/* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
7587 all cast pointers to that region are also known to be non-NULL. */
7588
7589static void
7590test_malloc_constraints ()
7591{
808f4dfe
DM
7592 region_model_manager mgr;
7593 region_model model (&mgr);
6969ac30
DM
7594 tree p = build_global_decl ("p", ptr_type_node);
7595 tree char_star = build_pointer_type (char_type_node);
7596 tree q = build_global_decl ("q", char_star);
7597 tree null_ptr = build_int_cst (ptr_type_node, 0);
7598
808f4dfe 7599 const svalue *size_in_bytes
9a2c9579 7600 = mgr.get_or_create_unknown_svalue (size_type_node);
ce917b04
DM
7601 const region *reg
7602 = model.get_or_create_region_for_heap_alloc (size_in_bytes, NULL);
808f4dfe
DM
7603 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
7604 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
6969ac30
DM
7605 model.set_value (q, p, NULL);
7606
6969ac30
DM
7607 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
7608 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
7609 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
7610 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
7611
7612 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
7613
6969ac30
DM
7614 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
7615 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
7616 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
7617 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
7618}
7619
808f4dfe
DM
7620/* Smoketest of getting and setting the value of a variable. */
7621
7622static void
7623test_var ()
7624{
7625 /* "int i;" */
7626 tree i = build_global_decl ("i", integer_type_node);
7627
7628 tree int_17 = build_int_cst (integer_type_node, 17);
7629 tree int_m3 = build_int_cst (integer_type_node, -3);
7630
7631 region_model_manager mgr;
7632 region_model model (&mgr);
7633
7634 const region *i_reg = model.get_lvalue (i, NULL);
7635 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
7636
7637 /* Reading "i" should give a symbolic "initial value". */
7638 const svalue *sval_init = model.get_rvalue (i, NULL);
7639 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
7640 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
7641 /* ..and doing it again should give the same "initial value". */
7642 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
7643
7644 /* "i = 17;". */
7645 model.set_value (i, int_17, NULL);
7646 ASSERT_EQ (model.get_rvalue (i, NULL),
7647 model.get_rvalue (int_17, NULL));
7648
7649 /* "i = -3;". */
7650 model.set_value (i, int_m3, NULL);
7651 ASSERT_EQ (model.get_rvalue (i, NULL),
7652 model.get_rvalue (int_m3, NULL));
7653
7654 /* Verify get_offset for "i". */
7655 {
7a6564c9 7656 region_offset offset = i_reg->get_offset (&mgr);
808f4dfe
DM
7657 ASSERT_EQ (offset.get_base_region (), i_reg);
7658 ASSERT_EQ (offset.get_bit_offset (), 0);
7659 }
7660}
7661
7662static void
7663test_array_2 ()
7664{
7665 /* "int arr[10];" */
7666 tree tlen = size_int (10);
7667 tree arr_type
7668 = build_array_type (integer_type_node, build_index_type (tlen));
7669 tree arr = build_global_decl ("arr", arr_type);
7670
7671 /* "int i;" */
7672 tree i = build_global_decl ("i", integer_type_node);
7673
7674 tree int_0 = build_int_cst (integer_type_node, 0);
7675 tree int_1 = build_int_cst (integer_type_node, 1);
7676
7677 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
7678 arr, int_0, NULL_TREE, NULL_TREE);
7679 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
7680 arr, int_1, NULL_TREE, NULL_TREE);
7681 tree arr_i = build4 (ARRAY_REF, integer_type_node,
7682 arr, i, NULL_TREE, NULL_TREE);
7683
7684 tree int_17 = build_int_cst (integer_type_node, 17);
7685 tree int_42 = build_int_cst (integer_type_node, 42);
7686 tree int_m3 = build_int_cst (integer_type_node, -3);
7687
7688 region_model_manager mgr;
7689 region_model model (&mgr);
7690 /* "arr[0] = 17;". */
7691 model.set_value (arr_0, int_17, NULL);
7692 /* "arr[1] = -3;". */
7693 model.set_value (arr_1, int_m3, NULL);
7694
7695 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
7696 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
7697
7698 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
7699 model.set_value (arr_1, int_42, NULL);
7700 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
7701
7702 /* Verify get_offset for "arr[0]". */
7703 {
7704 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
7a6564c9 7705 region_offset offset = arr_0_reg->get_offset (&mgr);
808f4dfe
DM
7706 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7707 ASSERT_EQ (offset.get_bit_offset (), 0);
7708 }
7709
7710 /* Verify get_offset for "arr[1]". */
7711 {
7712 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
7a6564c9 7713 region_offset offset = arr_1_reg->get_offset (&mgr);
808f4dfe
DM
7714 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7715 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
7716 }
7717
7a6564c9
TL
7718 /* Verify get_offset for "arr[i]". */
7719 {
7720 const region *arr_i_reg = model.get_lvalue (arr_i, NULL);
7721 region_offset offset = arr_i_reg->get_offset (&mgr);
7722 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7723 ASSERT_EQ (offset.get_symbolic_byte_offset ()->get_kind (), SK_BINOP);
7724 }
7725
808f4dfe
DM
7726 /* "arr[i] = i;" - this should remove the earlier bindings. */
7727 model.set_value (arr_i, i, NULL);
7728 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
7729 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
7730
7731 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
7732 model.set_value (arr_0, int_17, NULL);
7733 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
7734 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
7735}
7736
7737/* Smoketest of dereferencing a pointer via MEM_REF. */
7738
7739static void
7740test_mem_ref ()
7741{
7742 /*
7743 x = 17;
7744 p = &x;
7745 *p;
7746 */
7747 tree x = build_global_decl ("x", integer_type_node);
7748 tree int_star = build_pointer_type (integer_type_node);
7749 tree p = build_global_decl ("p", int_star);
7750
7751 tree int_17 = build_int_cst (integer_type_node, 17);
7752 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
7753 tree offset_0 = build_int_cst (integer_type_node, 0);
7754 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
7755
7756 region_model_manager mgr;
7757 region_model model (&mgr);
7758
7759 /* "x = 17;". */
7760 model.set_value (x, int_17, NULL);
7761
7762 /* "p = &x;". */
7763 model.set_value (p, addr_of_x, NULL);
7764
7765 const svalue *sval = model.get_rvalue (star_p, NULL);
7766 ASSERT_EQ (sval->maybe_get_constant (), int_17);
7767}
7768
7769/* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
7770 Analogous to this code:
7771 void test_6 (int a[10])
7772 {
7773 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
7774 a[3] = 42;
7775 __analyzer_eval (a[3] == 42); [should be TRUE]
7776 }
7777 from data-model-1.c, which looks like this at the gimple level:
7778 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
7779 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
7780 int _2 = *_1; # MEM_REF
7781 _Bool _3 = _2 == 42;
7782 int _4 = (int) _3;
7783 __analyzer_eval (_4);
7784
7785 # a[3] = 42;
7786 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
7787 *_5 = 42; # MEM_REF
7788
7789 # __analyzer_eval (a[3] == 42); [should be TRUE]
7790 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
7791 int _7 = *_6; # MEM_REF
7792 _Bool _8 = _7 == 42;
7793 int _9 = (int) _8;
7794 __analyzer_eval (_9); */
7795
7796static void
7797test_POINTER_PLUS_EXPR_then_MEM_REF ()
7798{
7799 tree int_star = build_pointer_type (integer_type_node);
7800 tree a = build_global_decl ("a", int_star);
7801 tree offset_12 = build_int_cst (size_type_node, 12);
7802 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
7803 tree offset_0 = build_int_cst (integer_type_node, 0);
7804 tree mem_ref = build2 (MEM_REF, integer_type_node,
7805 pointer_plus_expr, offset_0);
7806 region_model_manager mgr;
7807 region_model m (&mgr);
7808
7809 tree int_42 = build_int_cst (integer_type_node, 42);
7810 m.set_value (mem_ref, int_42, NULL);
7811 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
7812}
7813
7814/* Verify that malloc works. */
7815
7816static void
7817test_malloc ()
7818{
7819 tree int_star = build_pointer_type (integer_type_node);
7820 tree p = build_global_decl ("p", int_star);
7821 tree n = build_global_decl ("n", integer_type_node);
7822 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
7823 n, build_int_cst (size_type_node, 4));
7824
7825 region_model_manager mgr;
7826 test_region_model_context ctxt;
7827 region_model model (&mgr);
7828
7829 /* "p = malloc (n * 4);". */
7830 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
ce917b04
DM
7831 const region *reg
7832 = model.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe
DM
7833 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
7834 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 7835 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
7836}
7837
7838/* Verify that alloca works. */
7839
7840static void
7841test_alloca ()
7842{
7843 auto_vec <tree> param_types;
7844 tree fndecl = make_fndecl (integer_type_node,
7845 "test_fn",
7846 param_types);
7847 allocate_struct_function (fndecl, true);
7848
7849
7850 tree int_star = build_pointer_type (integer_type_node);
7851 tree p = build_global_decl ("p", int_star);
7852 tree n = build_global_decl ("n", integer_type_node);
7853 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
7854 n, build_int_cst (size_type_node, 4));
7855
7856 region_model_manager mgr;
7857 test_region_model_context ctxt;
7858 region_model model (&mgr);
7859
7860 /* Push stack frame. */
7861 const region *frame_reg
7862 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl),
7863 NULL, &ctxt);
7864 /* "p = alloca (n * 4);". */
7865 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
b9365b93 7866 const region *reg = model.create_region_for_alloca (size_sval, &ctxt);
808f4dfe
DM
7867 ASSERT_EQ (reg->get_parent_region (), frame_reg);
7868 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
7869 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 7870 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
7871
7872 /* Verify that the pointers to the alloca region are replaced by
7873 poisoned values when the frame is popped. */
7874 model.pop_frame (NULL, NULL, &ctxt);
33255ad3 7875 ASSERT_EQ (model.get_rvalue (p, NULL)->get_kind (), SK_POISONED);
808f4dfe
DM
7876}
7877
71fc4655
DM
7878/* Verify that svalue::involves_p works. */
7879
7880static void
7881test_involves_p ()
7882{
7883 region_model_manager mgr;
7884 tree int_star = build_pointer_type (integer_type_node);
7885 tree p = build_global_decl ("p", int_star);
7886 tree q = build_global_decl ("q", int_star);
7887
7888 test_region_model_context ctxt;
7889 region_model model (&mgr);
7890 const svalue *p_init = model.get_rvalue (p, &ctxt);
7891 const svalue *q_init = model.get_rvalue (q, &ctxt);
7892
7893 ASSERT_TRUE (p_init->involves_p (p_init));
7894 ASSERT_FALSE (p_init->involves_p (q_init));
7895
7896 const region *star_p_reg = mgr.get_symbolic_region (p_init);
7897 const region *star_q_reg = mgr.get_symbolic_region (q_init);
7898
7899 const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
7900 const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
7901
7902 ASSERT_TRUE (init_star_p->involves_p (p_init));
7903 ASSERT_FALSE (p_init->involves_p (init_star_p));
7904 ASSERT_FALSE (init_star_p->involves_p (q_init));
7905 ASSERT_TRUE (init_star_q->involves_p (q_init));
7906 ASSERT_FALSE (init_star_q->involves_p (p_init));
7907}
7908
757bf1df
DM
7909/* Run all of the selftests within this file. */
7910
7911void
7912analyzer_region_model_cc_tests ()
7913{
8c08c983 7914 test_tree_cmp_on_constants ();
757bf1df 7915 test_dump ();
808f4dfe
DM
7916 test_struct ();
7917 test_array_1 ();
90f7c300 7918 test_get_representative_tree ();
757bf1df 7919 test_unique_constants ();
808f4dfe
DM
7920 test_unique_unknowns ();
7921 test_initial_svalue_folding ();
7922 test_unaryop_svalue_folding ();
7923 test_binop_svalue_folding ();
7924 test_sub_svalue_folding ();
f09b9955 7925 test_bits_within_svalue_folding ();
808f4dfe 7926 test_descendent_of_p ();
391512ad 7927 test_bit_range_regions ();
757bf1df 7928 test_assignment ();
a96f1c38 7929 test_compound_assignment ();
757bf1df
DM
7930 test_stack_frames ();
7931 test_get_representative_path_var ();
808f4dfe 7932 test_equality_1 ();
757bf1df
DM
7933 test_canonicalization_2 ();
7934 test_canonicalization_3 ();
8c08c983 7935 test_canonicalization_4 ();
757bf1df
DM
7936 test_state_merging ();
7937 test_constraint_merging ();
808f4dfe
DM
7938 test_widening_constraints ();
7939 test_iteration_1 ();
6969ac30 7940 test_malloc_constraints ();
808f4dfe
DM
7941 test_var ();
7942 test_array_2 ();
7943 test_mem_ref ();
7944 test_POINTER_PLUS_EXPR_then_MEM_REF ();
7945 test_malloc ();
7946 test_alloca ();
71fc4655 7947 test_involves_p ();
757bf1df
DM
7948}
7949
7950} // namespace selftest
7951
7952#endif /* CHECKING_P */
7953
75038aa6
DM
7954} // namespace ana
7955
757bf1df 7956#endif /* #if ENABLE_ANALYZER */