]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/analyzer/region-model.cc
analyzer: reimplement kf_memcpy_memmove
[thirdparty/gcc.git] / gcc / analyzer / region-model.cc
CommitLineData
757bf1df 1/* Classes for modeling the state of memory.
83ffe9cd 2 Copyright (C) 2019-2023 Free Software Foundation, Inc.
757bf1df
DM
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
c81b60b8 22#define INCLUDE_MEMORY
757bf1df
DM
23#include "system.h"
24#include "coretypes.h"
6341f14e 25#include "make-unique.h"
757bf1df
DM
26#include "tree.h"
27#include "function.h"
28#include "basic-block.h"
29#include "gimple.h"
30#include "gimple-iterator.h"
7892ff37 31#include "diagnostic-core.h"
757bf1df
DM
32#include "graphviz.h"
33#include "options.h"
34#include "cgraph.h"
35#include "tree-dfa.h"
36#include "stringpool.h"
37#include "convert.h"
38#include "target.h"
39#include "fold-const.h"
40#include "tree-pretty-print.h"
41#include "diagnostic-color.h"
42#include "diagnostic-metadata.h"
ef7827b0 43#include "bitmap.h"
757bf1df 44#include "selftest.h"
757bf1df
DM
45#include "analyzer/analyzer.h"
46#include "analyzer/analyzer-logging.h"
47#include "ordered-hash-map.h"
48#include "options.h"
49#include "cgraph.h"
50#include "cfg.h"
757bf1df
DM
51#include "analyzer/supergraph.h"
52#include "sbitmap.h"
808f4dfe
DM
53#include "analyzer/call-string.h"
54#include "analyzer/program-point.h"
55#include "analyzer/store.h"
757bf1df
DM
56#include "analyzer/region-model.h"
57#include "analyzer/constraint-manager.h"
58#include "diagnostic-event-id.h"
59#include "analyzer/sm.h"
60#include "diagnostic-event-id.h"
61#include "analyzer/sm.h"
62#include "analyzer/pending-diagnostic.h"
808f4dfe 63#include "analyzer/region-model-reachability.h"
757bf1df 64#include "analyzer/analyzer-selftests.h"
f573d351 65#include "analyzer/program-state.h"
bfca9505 66#include "analyzer/call-summary.h"
884d9141 67#include "stor-layout.h"
c7e276b8 68#include "attribs.h"
9a2c9579 69#include "tree-object-size.h"
1e2fe671
DM
70#include "gimple-ssa.h"
71#include "tree-phinodes.h"
72#include "tree-ssa-operands.h"
73#include "ssa-iterators.h"
5fbcbcaf 74#include "calls.h"
e6c3bb37 75#include "is-a.h"
c81b60b8 76#include "gcc-rich-location.h"
f5758fe5
DM
77#include "analyzer/checker-event.h"
78#include "analyzer/checker-path.h"
b03a10b0 79#include "analyzer/feasible-graph.h"
757bf1df
DM
80
81#if ENABLE_ANALYZER
82
75038aa6
DM
83namespace ana {
84
757bf1df
DM
85/* Dump T to PP in language-independent form, for debugging/logging/dumping
86 purposes. */
87
757bf1df 88void
808f4dfe 89dump_tree (pretty_printer *pp, tree t)
757bf1df 90{
808f4dfe 91 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
757bf1df
DM
92}
93
808f4dfe
DM
94/* Dump T to PP in language-independent form in quotes, for
95 debugging/logging/dumping purposes. */
757bf1df
DM
96
97void
808f4dfe 98dump_quoted_tree (pretty_printer *pp, tree t)
757bf1df 99{
808f4dfe
DM
100 pp_begin_quote (pp, pp_show_color (pp));
101 dump_tree (pp, t);
102 pp_end_quote (pp, pp_show_color (pp));
757bf1df
DM
103}
104
808f4dfe
DM
105/* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
106 calls within other pp_printf calls.
757bf1df 107
808f4dfe
DM
108 default_tree_printer handles 'T' and some other codes by calling
109 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
110 dump_generic_node calls pp_printf in various places, leading to
111 garbled output.
757bf1df 112
808f4dfe
DM
113 Ideally pp_printf could be made to be reentrant, but in the meantime
114 this function provides a workaround. */
6969ac30
DM
115
116void
808f4dfe 117print_quoted_type (pretty_printer *pp, tree t)
6969ac30 118{
808f4dfe
DM
119 pp_begin_quote (pp, pp_show_color (pp));
120 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
121 pp_end_quote (pp, pp_show_color (pp));
6969ac30
DM
122}
123
d726a57b
DM
124/* class region_to_value_map. */
125
126/* Assignment operator for region_to_value_map. */
127
128region_to_value_map &
129region_to_value_map::operator= (const region_to_value_map &other)
130{
131 m_hash_map.empty ();
132 for (auto iter : other.m_hash_map)
133 {
134 const region *reg = iter.first;
135 const svalue *sval = iter.second;
136 m_hash_map.put (reg, sval);
137 }
138 return *this;
139}
140
141/* Equality operator for region_to_value_map. */
142
143bool
144region_to_value_map::operator== (const region_to_value_map &other) const
145{
146 if (m_hash_map.elements () != other.m_hash_map.elements ())
147 return false;
148
149 for (auto iter : *this)
150 {
151 const region *reg = iter.first;
152 const svalue *sval = iter.second;
153 const svalue * const *other_slot = other.get (reg);
154 if (other_slot == NULL)
155 return false;
156 if (sval != *other_slot)
157 return false;
158 }
159
160 return true;
161}
162
163/* Dump this object to PP. */
164
165void
166region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
167 bool multiline) const
168{
169 auto_vec<const region *> regs;
170 for (iterator iter = begin (); iter != end (); ++iter)
171 regs.safe_push ((*iter).first);
172 regs.qsort (region::cmp_ptr_ptr);
173 if (multiline)
174 pp_newline (pp);
175 else
176 pp_string (pp, " {");
177 unsigned i;
178 const region *reg;
179 FOR_EACH_VEC_ELT (regs, i, reg)
180 {
181 if (multiline)
182 pp_string (pp, " ");
183 else if (i > 0)
184 pp_string (pp, ", ");
185 reg->dump_to_pp (pp, simple);
186 pp_string (pp, ": ");
187 const svalue *sval = *get (reg);
188 sval->dump_to_pp (pp, true);
189 if (multiline)
190 pp_newline (pp);
191 }
192 if (!multiline)
193 pp_string (pp, "}");
194}
195
196/* Dump this object to stderr. */
197
198DEBUG_FUNCTION void
199region_to_value_map::dump (bool simple) const
200{
201 pretty_printer pp;
202 pp_format_decoder (&pp) = default_tree_printer;
203 pp_show_color (&pp) = pp_show_color (global_dc->printer);
204 pp.buffer->stream = stderr;
205 dump_to_pp (&pp, simple, true);
206 pp_newline (&pp);
207 pp_flush (&pp);
208}
209
210
211/* Attempt to merge THIS with OTHER, writing the result
212 to OUT.
213
214 For now, write (region, value) mappings that are in common between THIS
ce917b04
DM
215 and OTHER to OUT, effectively taking the intersection.
216
217 Reject merger of different values. */
d726a57b
DM
218
219bool
220region_to_value_map::can_merge_with_p (const region_to_value_map &other,
221 region_to_value_map *out) const
222{
223 for (auto iter : *this)
224 {
225 const region *iter_reg = iter.first;
226 const svalue *iter_sval = iter.second;
227 const svalue * const * other_slot = other.get (iter_reg);
228 if (other_slot)
ce917b04
DM
229 {
230 if (iter_sval == *other_slot)
231 out->put (iter_reg, iter_sval);
232 else
233 return false;
234 }
d726a57b
DM
235 }
236 return true;
237}
238
33255ad3
DM
239/* Purge any state involving SVAL. */
240
241void
242region_to_value_map::purge_state_involving (const svalue *sval)
243{
244 auto_vec<const region *> to_purge;
245 for (auto iter : *this)
246 {
247 const region *iter_reg = iter.first;
248 const svalue *iter_sval = iter.second;
249 if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
250 to_purge.safe_push (iter_reg);
251 }
252 for (auto iter : to_purge)
253 m_hash_map.remove (iter);
254}
255
757bf1df
DM
256/* class region_model. */
257
808f4dfe 258/* Ctor for region_model: construct an "empty" model. */
757bf1df 259
808f4dfe 260region_model::region_model (region_model_manager *mgr)
9a2c9579
DM
261: m_mgr (mgr), m_store (), m_current_frame (NULL),
262 m_dynamic_extents ()
757bf1df 263{
808f4dfe 264 m_constraints = new constraint_manager (mgr);
757bf1df
DM
265}
266
267/* region_model's copy ctor. */
268
269region_model::region_model (const region_model &other)
808f4dfe
DM
270: m_mgr (other.m_mgr), m_store (other.m_store),
271 m_constraints (new constraint_manager (*other.m_constraints)),
9a2c9579
DM
272 m_current_frame (other.m_current_frame),
273 m_dynamic_extents (other.m_dynamic_extents)
757bf1df 274{
757bf1df
DM
275}
276
277/* region_model's dtor. */
278
279region_model::~region_model ()
280{
281 delete m_constraints;
282}
283
284/* region_model's assignment operator. */
285
286region_model &
287region_model::operator= (const region_model &other)
288{
808f4dfe
DM
289 /* m_mgr is const. */
290 gcc_assert (m_mgr == other.m_mgr);
757bf1df 291
808f4dfe 292 m_store = other.m_store;
757bf1df
DM
293
294 delete m_constraints;
808f4dfe 295 m_constraints = new constraint_manager (*other.m_constraints);
757bf1df 296
808f4dfe 297 m_current_frame = other.m_current_frame;
757bf1df 298
9a2c9579
DM
299 m_dynamic_extents = other.m_dynamic_extents;
300
757bf1df
DM
301 return *this;
302}
303
304/* Equality operator for region_model.
305
808f4dfe
DM
306 Amongst other things this directly compares the stores and the constraint
307 managers, so for this to be meaningful both this and OTHER should
757bf1df
DM
308 have been canonicalized. */
309
310bool
311region_model::operator== (const region_model &other) const
312{
808f4dfe
DM
313 /* We can only compare instances that use the same manager. */
314 gcc_assert (m_mgr == other.m_mgr);
757bf1df 315
808f4dfe 316 if (m_store != other.m_store)
757bf1df
DM
317 return false;
318
319 if (*m_constraints != *other.m_constraints)
320 return false;
321
808f4dfe
DM
322 if (m_current_frame != other.m_current_frame)
323 return false;
757bf1df 324
9a2c9579
DM
325 if (m_dynamic_extents != other.m_dynamic_extents)
326 return false;
327
757bf1df
DM
328 gcc_checking_assert (hash () == other.hash ());
329
330 return true;
331}
332
333/* Generate a hash value for this region_model. */
334
335hashval_t
808f4dfe
DM
336region_model::hash () const
337{
338 hashval_t result = m_store.hash ();
339 result ^= m_constraints->hash ();
340 return result;
757bf1df
DM
341}
342
808f4dfe
DM
343/* Dump a representation of this model to PP, showing the
344 stack, the store, and any constraints.
345 Use SIMPLE to control how svalues and regions are printed. */
757bf1df
DM
346
347void
808f4dfe
DM
348region_model::dump_to_pp (pretty_printer *pp, bool simple,
349 bool multiline) const
757bf1df 350{
808f4dfe
DM
351 /* Dump stack. */
352 pp_printf (pp, "stack depth: %i", get_stack_depth ());
353 if (multiline)
354 pp_newline (pp);
355 else
356 pp_string (pp, " {");
357 for (const frame_region *iter_frame = m_current_frame; iter_frame;
358 iter_frame = iter_frame->get_calling_frame ())
359 {
360 if (multiline)
361 pp_string (pp, " ");
362 else if (iter_frame != m_current_frame)
363 pp_string (pp, ", ");
364 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
365 iter_frame->dump_to_pp (pp, simple);
366 if (multiline)
367 pp_newline (pp);
368 }
369 if (!multiline)
370 pp_string (pp, "}");
371
372 /* Dump store. */
373 if (!multiline)
374 pp_string (pp, ", {");
375 m_store.dump_to_pp (pp, simple, multiline,
376 m_mgr->get_store_manager ());
377 if (!multiline)
378 pp_string (pp, "}");
379
380 /* Dump constraints. */
381 pp_string (pp, "constraint_manager:");
382 if (multiline)
383 pp_newline (pp);
384 else
385 pp_string (pp, " {");
386 m_constraints->dump_to_pp (pp, multiline);
387 if (!multiline)
388 pp_string (pp, "}");
9a2c9579
DM
389
390 /* Dump sizes of dynamic regions, if any are known. */
391 if (!m_dynamic_extents.is_empty ())
392 {
393 pp_string (pp, "dynamic_extents:");
394 m_dynamic_extents.dump_to_pp (pp, simple, multiline);
395 }
808f4dfe 396}
757bf1df 397
808f4dfe 398/* Dump a representation of this model to FILE. */
757bf1df 399
808f4dfe
DM
400void
401region_model::dump (FILE *fp, bool simple, bool multiline) const
402{
403 pretty_printer pp;
404 pp_format_decoder (&pp) = default_tree_printer;
405 pp_show_color (&pp) = pp_show_color (global_dc->printer);
406 pp.buffer->stream = fp;
407 dump_to_pp (&pp, simple, multiline);
408 pp_newline (&pp);
409 pp_flush (&pp);
757bf1df
DM
410}
411
808f4dfe 412/* Dump a multiline representation of this model to stderr. */
757bf1df 413
808f4dfe
DM
414DEBUG_FUNCTION void
415region_model::dump (bool simple) const
416{
417 dump (stderr, simple, true);
418}
757bf1df 419
808f4dfe 420/* Dump a multiline representation of this model to stderr. */
757bf1df 421
808f4dfe
DM
422DEBUG_FUNCTION void
423region_model::debug () const
757bf1df 424{
808f4dfe 425 dump (true);
757bf1df
DM
426}
427
e61ffa20
DM
428/* Assert that this object is valid. */
429
430void
431region_model::validate () const
432{
433 m_store.validate ();
434}
435
808f4dfe
DM
436/* Canonicalize the store and constraints, to maximize the chance of
437 equality between region_model instances. */
757bf1df
DM
438
439void
808f4dfe 440region_model::canonicalize ()
757bf1df 441{
808f4dfe
DM
442 m_store.canonicalize (m_mgr->get_store_manager ());
443 m_constraints->canonicalize ();
757bf1df
DM
444}
445
446/* Return true if this region_model is in canonical form. */
447
448bool
449region_model::canonicalized_p () const
450{
451 region_model copy (*this);
808f4dfe 452 copy.canonicalize ();
757bf1df
DM
453 return *this == copy;
454}
455
808f4dfe
DM
456/* See the comment for store::loop_replay_fixup. */
457
458void
459region_model::loop_replay_fixup (const region_model *dst_state)
460{
461 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
462}
463
757bf1df
DM
464/* A subclass of pending_diagnostic for complaining about uses of
465 poisoned values. */
466
467class poisoned_value_diagnostic
468: public pending_diagnostic_subclass<poisoned_value_diagnostic>
469{
470public:
00e7d024 471 poisoned_value_diagnostic (tree expr, enum poison_kind pkind,
b03a10b0
DM
472 const region *src_region,
473 tree check_expr)
00e7d024 474 : m_expr (expr), m_pkind (pkind),
b03a10b0
DM
475 m_src_region (src_region),
476 m_check_expr (check_expr)
757bf1df
DM
477 {}
478
ff171cb1 479 const char *get_kind () const final override { return "poisoned_value_diagnostic"; }
757bf1df 480
ff171cb1 481 bool use_of_uninit_p () const final override
33255ad3
DM
482 {
483 return m_pkind == POISON_KIND_UNINIT;
484 }
485
757bf1df
DM
486 bool operator== (const poisoned_value_diagnostic &other) const
487 {
00e7d024
DM
488 return (m_expr == other.m_expr
489 && m_pkind == other.m_pkind
490 && m_src_region == other.m_src_region);
757bf1df
DM
491 }
492
ff171cb1 493 int get_controlling_option () const final override
7fd6e36e
DM
494 {
495 switch (m_pkind)
496 {
497 default:
498 gcc_unreachable ();
499 case POISON_KIND_UNINIT:
500 return OPT_Wanalyzer_use_of_uninitialized_value;
501 case POISON_KIND_FREED:
502 return OPT_Wanalyzer_use_after_free;
503 case POISON_KIND_POPPED_STACK:
504 return OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame;
505 }
506 }
507
8f636915
DM
508 bool terminate_path_p () const final override { return true; }
509
0e466e97 510 bool emit (rich_location *rich_loc, logger *) final override
757bf1df
DM
511 {
512 switch (m_pkind)
513 {
514 default:
515 gcc_unreachable ();
33255ad3
DM
516 case POISON_KIND_UNINIT:
517 {
518 diagnostic_metadata m;
519 m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
7fd6e36e 520 return warning_meta (rich_loc, m, get_controlling_option (),
33255ad3
DM
521 "use of uninitialized value %qE",
522 m_expr);
523 }
524 break;
757bf1df
DM
525 case POISON_KIND_FREED:
526 {
527 diagnostic_metadata m;
528 m.add_cwe (416); /* "CWE-416: Use After Free". */
7fd6e36e 529 return warning_meta (rich_loc, m, get_controlling_option (),
6c8e5844
DM
530 "use after %<free%> of %qE",
531 m_expr);
757bf1df
DM
532 }
533 break;
534 case POISON_KIND_POPPED_STACK:
535 {
757bf1df 536 /* TODO: which CWE? */
808f4dfe 537 return warning_at
7fd6e36e 538 (rich_loc, get_controlling_option (),
808f4dfe
DM
539 "dereferencing pointer %qE to within stale stack frame",
540 m_expr);
757bf1df
DM
541 }
542 break;
543 }
544 }
545
ff171cb1 546 label_text describe_final_event (const evdesc::final_event &ev) final override
757bf1df
DM
547 {
548 switch (m_pkind)
549 {
550 default:
551 gcc_unreachable ();
33255ad3
DM
552 case POISON_KIND_UNINIT:
553 return ev.formatted_print ("use of uninitialized value %qE here",
554 m_expr);
757bf1df
DM
555 case POISON_KIND_FREED:
556 return ev.formatted_print ("use after %<free%> of %qE here",
557 m_expr);
558 case POISON_KIND_POPPED_STACK:
559 return ev.formatted_print
808f4dfe 560 ("dereferencing pointer %qE to within stale stack frame",
757bf1df
DM
561 m_expr);
562 }
563 }
564
ff171cb1 565 void mark_interesting_stuff (interesting_t *interest) final override
00e7d024
DM
566 {
567 if (m_src_region)
568 interest->add_region_creation (m_src_region);
569 }
570
b03a10b0
DM
571 /* Attempt to suppress false positives.
572 Reject paths where the value of the underlying region isn't poisoned.
573 This can happen due to state merging when exploring the exploded graph,
574 where the more precise analysis during feasibility analysis finds that
575 the region is in fact valid.
576 To do this we need to get the value from the fgraph. Unfortunately
577 we can't simply query the state of m_src_region (from the enode),
578 since it might be a different region in the fnode state (e.g. with
579 heap-allocated regions, the numbering could be different).
580 Hence we access m_check_expr, if available. */
581
582 bool check_valid_fpath_p (const feasible_node &fnode,
583 const gimple *emission_stmt)
584 const final override
585 {
586 if (!m_check_expr)
587 return true;
588
589 /* We've reached the enode, but not necessarily the right function_point.
590 Try to get the state at the correct stmt. */
591 region_model emission_model (fnode.get_model ().get_manager());
592 if (!fnode.get_state_at_stmt (emission_stmt, &emission_model))
593 /* Couldn't get state; accept this diagnostic. */
594 return true;
595
596 const svalue *fsval = emission_model.get_rvalue (m_check_expr, NULL);
597 /* Check to see if the expr is also poisoned in FNODE (and in the
598 same way). */
599 const poisoned_svalue * fspval = fsval->dyn_cast_poisoned_svalue ();
600 if (!fspval)
601 return false;
602 if (fspval->get_poison_kind () != m_pkind)
603 return false;
604 return true;
605 }
606
757bf1df
DM
607private:
608 tree m_expr;
609 enum poison_kind m_pkind;
00e7d024 610 const region *m_src_region;
b03a10b0 611 tree m_check_expr;
757bf1df
DM
612};
613
5e00ad3f
DM
614/* A subclass of pending_diagnostic for complaining about shifts
615 by negative counts. */
616
617class shift_count_negative_diagnostic
618: public pending_diagnostic_subclass<shift_count_negative_diagnostic>
619{
620public:
621 shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
622 : m_assign (assign), m_count_cst (count_cst)
623 {}
624
ff171cb1 625 const char *get_kind () const final override
5e00ad3f
DM
626 {
627 return "shift_count_negative_diagnostic";
628 }
629
630 bool operator== (const shift_count_negative_diagnostic &other) const
631 {
632 return (m_assign == other.m_assign
633 && same_tree_p (m_count_cst, other.m_count_cst));
634 }
635
ff171cb1 636 int get_controlling_option () const final override
7fd6e36e
DM
637 {
638 return OPT_Wanalyzer_shift_count_negative;
639 }
640
0e466e97 641 bool emit (rich_location *rich_loc, logger *) final override
5e00ad3f 642 {
7fd6e36e 643 return warning_at (rich_loc, get_controlling_option (),
5e00ad3f
DM
644 "shift by negative count (%qE)", m_count_cst);
645 }
646
ff171cb1 647 label_text describe_final_event (const evdesc::final_event &ev) final override
5e00ad3f
DM
648 {
649 return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst);
650 }
651
652private:
653 const gassign *m_assign;
654 tree m_count_cst;
655};
656
657/* A subclass of pending_diagnostic for complaining about shifts
658 by counts >= the width of the operand type. */
659
660class shift_count_overflow_diagnostic
661: public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
662{
663public:
664 shift_count_overflow_diagnostic (const gassign *assign,
665 int operand_precision,
666 tree count_cst)
667 : m_assign (assign), m_operand_precision (operand_precision),
668 m_count_cst (count_cst)
669 {}
670
ff171cb1 671 const char *get_kind () const final override
5e00ad3f
DM
672 {
673 return "shift_count_overflow_diagnostic";
674 }
675
676 bool operator== (const shift_count_overflow_diagnostic &other) const
677 {
678 return (m_assign == other.m_assign
679 && m_operand_precision == other.m_operand_precision
680 && same_tree_p (m_count_cst, other.m_count_cst));
681 }
682
ff171cb1 683 int get_controlling_option () const final override
7fd6e36e
DM
684 {
685 return OPT_Wanalyzer_shift_count_overflow;
686 }
687
0e466e97 688 bool emit (rich_location *rich_loc, logger *) final override
5e00ad3f 689 {
7fd6e36e 690 return warning_at (rich_loc, get_controlling_option (),
5e00ad3f
DM
691 "shift by count (%qE) >= precision of type (%qi)",
692 m_count_cst, m_operand_precision);
693 }
694
ff171cb1 695 label_text describe_final_event (const evdesc::final_event &ev) final override
5e00ad3f
DM
696 {
697 return ev.formatted_print ("shift by count %qE here", m_count_cst);
698 }
699
700private:
701 const gassign *m_assign;
702 int m_operand_precision;
703 tree m_count_cst;
704};
705
808f4dfe
DM
706/* If ASSIGN is a stmt that can be modelled via
707 set_value (lhs_reg, SVALUE, CTXT)
708 for some SVALUE, get the SVALUE.
709 Otherwise return NULL. */
757bf1df 710
808f4dfe
DM
711const svalue *
712region_model::get_gassign_result (const gassign *assign,
713 region_model_context *ctxt)
757bf1df
DM
714{
715 tree lhs = gimple_assign_lhs (assign);
716 tree rhs1 = gimple_assign_rhs1 (assign);
757bf1df
DM
717 enum tree_code op = gimple_assign_rhs_code (assign);
718 switch (op)
719 {
720 default:
808f4dfe 721 return NULL;
757bf1df
DM
722
723 case POINTER_PLUS_EXPR:
724 {
725 /* e.g. "_1 = a_10(D) + 12;" */
726 tree ptr = rhs1;
727 tree offset = gimple_assign_rhs2 (assign);
728
808f4dfe
DM
729 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
730 const svalue *offset_sval = get_rvalue (offset, ctxt);
731 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
732 is an integer of type sizetype". */
733 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
734
735 const svalue *sval_binop
736 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
737 ptr_sval, offset_sval);
738 return sval_binop;
757bf1df
DM
739 }
740 break;
741
742 case POINTER_DIFF_EXPR:
743 {
744 /* e.g. "_1 = p_2(D) - q_3(D);". */
808f4dfe
DM
745 tree rhs2 = gimple_assign_rhs2 (assign);
746 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
747 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 748
808f4dfe 749 // TODO: perhaps fold to zero if they're known to be equal?
757bf1df 750
808f4dfe
DM
751 const svalue *sval_binop
752 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
753 rhs1_sval, rhs2_sval);
754 return sval_binop;
757bf1df
DM
755 }
756 break;
757
808f4dfe
DM
758 /* Assignments of the form
759 set_value (lvalue (LHS), rvalue (EXPR))
760 for various EXPR.
761 We already have the lvalue for the LHS above, as "lhs_reg". */
762 case ADDR_EXPR: /* LHS = &RHS; */
763 case BIT_FIELD_REF:
764 case COMPONENT_REF: /* LHS = op0.op1; */
757bf1df 765 case MEM_REF:
757bf1df 766 case REAL_CST:
808f4dfe
DM
767 case COMPLEX_CST:
768 case VECTOR_CST:
757bf1df
DM
769 case INTEGER_CST:
770 case ARRAY_REF:
808f4dfe
DM
771 case SSA_NAME: /* LHS = VAR; */
772 case VAR_DECL: /* LHS = VAR; */
773 case PARM_DECL:/* LHS = VAR; */
774 case REALPART_EXPR:
775 case IMAGPART_EXPR:
776 return get_rvalue (rhs1, ctxt);
777
778 case ABS_EXPR:
779 case ABSU_EXPR:
780 case CONJ_EXPR:
781 case BIT_NOT_EXPR:
757bf1df
DM
782 case FIX_TRUNC_EXPR:
783 case FLOAT_EXPR:
808f4dfe 784 case NEGATE_EXPR:
757bf1df 785 case NOP_EXPR:
808f4dfe 786 case VIEW_CONVERT_EXPR:
757bf1df 787 {
808f4dfe
DM
788 /* Unary ops. */
789 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
790 const svalue *sval_unaryop
791 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
792 return sval_unaryop;
757bf1df 793 }
757bf1df
DM
794
795 case EQ_EXPR:
796 case GE_EXPR:
797 case LE_EXPR:
798 case NE_EXPR:
799 case GT_EXPR:
800 case LT_EXPR:
808f4dfe
DM
801 case UNORDERED_EXPR:
802 case ORDERED_EXPR:
757bf1df
DM
803 {
804 tree rhs2 = gimple_assign_rhs2 (assign);
805
808f4dfe
DM
806 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
807 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 808
2f5951bd 809 if (TREE_TYPE (lhs) == boolean_type_node)
808f4dfe 810 {
2f5951bd
DM
811 /* Consider constraints between svalues. */
812 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
813 if (t.is_known ())
814 return m_mgr->get_or_create_constant_svalue
815 (t.is_true () ? boolean_true_node : boolean_false_node);
808f4dfe 816 }
2f5951bd
DM
817
818 /* Otherwise, generate a symbolic binary op. */
819 const svalue *sval_binop
820 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
821 rhs1_sval, rhs2_sval);
822 return sval_binop;
757bf1df
DM
823 }
824 break;
825
826 case PLUS_EXPR:
827 case MINUS_EXPR:
828 case MULT_EXPR:
808f4dfe 829 case MULT_HIGHPART_EXPR:
757bf1df 830 case TRUNC_DIV_EXPR:
808f4dfe
DM
831 case CEIL_DIV_EXPR:
832 case FLOOR_DIV_EXPR:
833 case ROUND_DIV_EXPR:
757bf1df 834 case TRUNC_MOD_EXPR:
808f4dfe
DM
835 case CEIL_MOD_EXPR:
836 case FLOOR_MOD_EXPR:
837 case ROUND_MOD_EXPR:
838 case RDIV_EXPR:
839 case EXACT_DIV_EXPR:
757bf1df
DM
840 case LSHIFT_EXPR:
841 case RSHIFT_EXPR:
808f4dfe
DM
842 case LROTATE_EXPR:
843 case RROTATE_EXPR:
757bf1df
DM
844 case BIT_IOR_EXPR:
845 case BIT_XOR_EXPR:
846 case BIT_AND_EXPR:
847 case MIN_EXPR:
848 case MAX_EXPR:
808f4dfe 849 case COMPLEX_EXPR:
757bf1df
DM
850 {
851 /* Binary ops. */
852 tree rhs2 = gimple_assign_rhs2 (assign);
853
808f4dfe
DM
854 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
855 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 856
5e00ad3f
DM
857 if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
858 {
859 /* "INT34-C. Do not shift an expression by a negative number of bits
860 or by greater than or equal to the number of bits that exist in
861 the operand." */
862 if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
a4913a19
DM
863 if (TREE_CODE (rhs2_cst) == INTEGER_CST
864 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
5e00ad3f
DM
865 {
866 if (tree_int_cst_sgn (rhs2_cst) < 0)
6341f14e
DM
867 ctxt->warn
868 (make_unique<shift_count_negative_diagnostic>
869 (assign, rhs2_cst));
5e00ad3f
DM
870 else if (compare_tree_int (rhs2_cst,
871 TYPE_PRECISION (TREE_TYPE (rhs1)))
872 >= 0)
6341f14e
DM
873 ctxt->warn
874 (make_unique<shift_count_overflow_diagnostic>
875 (assign,
876 int (TYPE_PRECISION (TREE_TYPE (rhs1))),
877 rhs2_cst));
5e00ad3f
DM
878 }
879 }
880
808f4dfe
DM
881 const svalue *sval_binop
882 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
883 rhs1_sval, rhs2_sval);
884 return sval_binop;
885 }
886
887 /* Vector expressions. In theory we could implement these elementwise,
888 but for now, simply return unknown values. */
889 case VEC_DUPLICATE_EXPR:
890 case VEC_SERIES_EXPR:
891 case VEC_COND_EXPR:
892 case VEC_PERM_EXPR:
1b0be822
DM
893 case VEC_WIDEN_MULT_HI_EXPR:
894 case VEC_WIDEN_MULT_LO_EXPR:
895 case VEC_WIDEN_MULT_EVEN_EXPR:
896 case VEC_WIDEN_MULT_ODD_EXPR:
897 case VEC_UNPACK_HI_EXPR:
898 case VEC_UNPACK_LO_EXPR:
899 case VEC_UNPACK_FLOAT_HI_EXPR:
900 case VEC_UNPACK_FLOAT_LO_EXPR:
901 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
902 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
903 case VEC_PACK_TRUNC_EXPR:
904 case VEC_PACK_SAT_EXPR:
905 case VEC_PACK_FIX_TRUNC_EXPR:
906 case VEC_PACK_FLOAT_EXPR:
907 case VEC_WIDEN_LSHIFT_HI_EXPR:
908 case VEC_WIDEN_LSHIFT_LO_EXPR:
808f4dfe
DM
909 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
910 }
911}
912
1e2fe671
DM
913/* Workaround for discarding certain false positives from
914 -Wanalyzer-use-of-uninitialized-value
915 of the form:
916 ((A OR-IF B) OR-IF C)
917 and:
918 ((A AND-IF B) AND-IF C)
919 where evaluating B is redundant, but could involve simple accesses of
920 uninitialized locals.
921
922 When optimization is turned on the FE can immediately fold compound
923 conditionals. Specifically, c_parser_condition parses this condition:
924 ((A OR-IF B) OR-IF C)
925 and calls c_fully_fold on the condition.
926 Within c_fully_fold, fold_truth_andor is called, which bails when
927 optimization is off, but if any optimization is turned on can convert the
928 ((A OR-IF B) OR-IF C)
929 into:
930 ((A OR B) OR_IF C)
931 for sufficiently simple B
932 i.e. the inner OR-IF becomes an OR.
933 At gimplification time the inner OR becomes BIT_IOR_EXPR (in gimplify_expr),
934 giving this for the inner condition:
935 tmp = A | B;
936 if (tmp)
937 thus effectively synthesizing a redundant access of B when optimization
938 is turned on, when compared to:
939 if (A) goto L1; else goto L4;
940 L1: if (B) goto L2; else goto L4;
941 L2: if (C) goto L3; else goto L4;
942 for the unoptimized case.
943
944 Return true if CTXT appears to be handling such a short-circuitable stmt,
945 such as the def-stmt for B for the:
946 tmp = A | B;
947 case above, for the case where A is true and thus B would have been
948 short-circuited without optimization, using MODEL for the value of A. */
949
950static bool
951within_short_circuited_stmt_p (const region_model *model,
b33dd787 952 const gassign *assign_stmt)
1e2fe671 953{
1e2fe671 954 /* We must have an assignment to a temporary of _Bool type. */
1e2fe671
DM
955 tree lhs = gimple_assign_lhs (assign_stmt);
956 if (TREE_TYPE (lhs) != boolean_type_node)
957 return false;
958 if (TREE_CODE (lhs) != SSA_NAME)
959 return false;
960 if (SSA_NAME_VAR (lhs) != NULL_TREE)
961 return false;
962
963 /* The temporary bool must be used exactly once: as the second arg of
964 a BIT_IOR_EXPR or BIT_AND_EXPR. */
965 use_operand_p use_op;
966 gimple *use_stmt;
967 if (!single_imm_use (lhs, &use_op, &use_stmt))
968 return false;
969 const gassign *use_assign = dyn_cast <const gassign *> (use_stmt);
970 if (!use_assign)
971 return false;
972 enum tree_code op = gimple_assign_rhs_code (use_assign);
973 if (!(op == BIT_IOR_EXPR ||op == BIT_AND_EXPR))
974 return false;
975 if (!(gimple_assign_rhs1 (use_assign) != lhs
976 && gimple_assign_rhs2 (use_assign) == lhs))
977 return false;
978
979 /* The first arg of the bitwise stmt must have a known value in MODEL
980 that implies that the value of the second arg doesn't matter, i.e.
981 1 for bitwise or, 0 for bitwise and. */
982 tree other_arg = gimple_assign_rhs1 (use_assign);
983 /* Use a NULL ctxt here to avoid generating warnings. */
984 const svalue *other_arg_sval = model->get_rvalue (other_arg, NULL);
985 tree other_arg_cst = other_arg_sval->maybe_get_constant ();
986 if (!other_arg_cst)
987 return false;
988 switch (op)
989 {
990 default:
991 gcc_unreachable ();
992 case BIT_IOR_EXPR:
993 if (zerop (other_arg_cst))
994 return false;
995 break;
996 case BIT_AND_EXPR:
997 if (!zerop (other_arg_cst))
998 return false;
999 break;
1000 }
1001
1002 /* All tests passed. We appear to be in a stmt that generates a boolean
1003 temporary with a value that won't matter. */
1004 return true;
1005}
1006
b33dd787
DM
1007/* Workaround for discarding certain false positives from
1008 -Wanalyzer-use-of-uninitialized-value
1009 seen with -ftrivial-auto-var-init=.
1010
1011 -ftrivial-auto-var-init= will generate calls to IFN_DEFERRED_INIT.
1012
1013 If the address of the var is taken, gimplification will give us
1014 something like:
1015
1016 _1 = .DEFERRED_INIT (4, 2, &"len"[0]);
1017 len = _1;
1018
1019 The result of DEFERRED_INIT will be an uninit value; we don't
1020 want to emit a false positive for "len = _1;"
1021
1022 Return true if ASSIGN_STMT is such a stmt. */
1023
1024static bool
1025due_to_ifn_deferred_init_p (const gassign *assign_stmt)
1026
1027{
1028 /* We must have an assignment to a decl from an SSA name that's the
1029 result of a IFN_DEFERRED_INIT call. */
1030 if (gimple_assign_rhs_code (assign_stmt) != SSA_NAME)
1031 return false;
1032 tree lhs = gimple_assign_lhs (assign_stmt);
1033 if (TREE_CODE (lhs) != VAR_DECL)
1034 return false;
1035 tree rhs = gimple_assign_rhs1 (assign_stmt);
1036 if (TREE_CODE (rhs) != SSA_NAME)
1037 return false;
1038 const gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1039 const gcall *call = dyn_cast <const gcall *> (def_stmt);
1040 if (!call)
1041 return false;
1042 if (gimple_call_internal_p (call)
1043 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1044 return true;
1045 return false;
1046}
1047
33255ad3
DM
1048/* Check for SVAL being poisoned, adding a warning to CTXT.
1049 Return SVAL, or, if a warning is added, another value, to avoid
2fdc8546
DM
1050 repeatedly complaining about the same poisoned value in followup code.
1051 SRC_REGION is a hint about where SVAL came from, and can be NULL. */
33255ad3
DM
1052
1053const svalue *
1054region_model::check_for_poison (const svalue *sval,
1055 tree expr,
2fdc8546 1056 const region *src_region,
33255ad3
DM
1057 region_model_context *ctxt) const
1058{
1059 if (!ctxt)
1060 return sval;
1061
1062 if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
1063 {
cc68ad87
DM
1064 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
1065
1066 /* Ignore uninitialized uses of empty types; there's nothing
1067 to initialize. */
1068 if (pkind == POISON_KIND_UNINIT
1069 && sval->get_type ()
1070 && is_empty_type (sval->get_type ()))
1071 return sval;
1072
b33dd787
DM
1073 if (pkind == POISON_KIND_UNINIT)
1074 if (const gimple *curr_stmt = ctxt->get_stmt ())
1075 if (const gassign *assign_stmt
1076 = dyn_cast <const gassign *> (curr_stmt))
1077 {
1078 /* Special case to avoid certain false positives. */
1079 if (within_short_circuited_stmt_p (this, assign_stmt))
1080 return sval;
1081
1082 /* Special case to avoid false positive on
1083 -ftrivial-auto-var-init=. */
1084 if (due_to_ifn_deferred_init_p (assign_stmt))
1085 return sval;
1086 }
1e2fe671 1087
33255ad3
DM
1088 /* If we have an SSA name for a temporary, we don't want to print
1089 '<unknown>'.
1090 Poisoned values are shared by type, and so we can't reconstruct
1091 the tree other than via the def stmts, using
1092 fixup_tree_for_diagnostic. */
1093 tree diag_arg = fixup_tree_for_diagnostic (expr);
2fdc8546 1094 if (src_region == NULL && pkind == POISON_KIND_UNINIT)
00e7d024 1095 src_region = get_region_for_poisoned_expr (expr);
b03a10b0
DM
1096
1097 /* Can we reliably get the poisoned value from "expr"?
1098 This is for use by poisoned_value_diagnostic::check_valid_fpath_p.
1099 Unfortunately, we might not have a reliable value for EXPR.
1100 Hence we only query its value now, and only use it if we get the
1101 poisoned value back again. */
1102 tree check_expr = expr;
1103 const svalue *foo_sval = get_rvalue (expr, NULL);
1104 if (foo_sval == sval)
1105 check_expr = expr;
1106 else
1107 check_expr = NULL;
6341f14e
DM
1108 if (ctxt->warn (make_unique<poisoned_value_diagnostic> (diag_arg,
1109 pkind,
b03a10b0
DM
1110 src_region,
1111 check_expr)))
33255ad3
DM
1112 {
1113 /* We only want to report use of a poisoned value at the first
1114 place it gets used; return an unknown value to avoid generating
1115 a chain of followup warnings. */
1116 sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
1117 }
1118
1119 return sval;
1120 }
1121
1122 return sval;
1123}
1124
00e7d024
DM
1125/* Attempt to get a region for describing EXPR, the source of region of
1126 a poisoned_svalue for use in a poisoned_value_diagnostic.
1127 Return NULL if there is no good region to use. */
1128
1129const region *
1130region_model::get_region_for_poisoned_expr (tree expr) const
1131{
1132 if (TREE_CODE (expr) == SSA_NAME)
1133 {
1134 tree decl = SSA_NAME_VAR (expr);
1135 if (decl && DECL_P (decl))
1136 expr = decl;
1137 else
1138 return NULL;
1139 }
1140 return get_lvalue (expr, NULL);
1141}
1142
808f4dfe
DM
1143/* Update this model for the ASSIGN stmt, using CTXT to report any
1144 diagnostics. */
1145
1146void
1147region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
1148{
1149 tree lhs = gimple_assign_lhs (assign);
1150 tree rhs1 = gimple_assign_rhs1 (assign);
1151
1152 const region *lhs_reg = get_lvalue (lhs, ctxt);
1153
1154 /* Most assignments are handled by:
1155 set_value (lhs_reg, SVALUE, CTXT)
1156 for some SVALUE. */
1157 if (const svalue *sval = get_gassign_result (assign, ctxt))
1158 {
33255ad3 1159 tree expr = get_diagnostic_tree_for_gassign (assign);
2fdc8546 1160 check_for_poison (sval, expr, NULL, ctxt);
808f4dfe
DM
1161 set_value (lhs_reg, sval, ctxt);
1162 return;
1163 }
1164
1165 enum tree_code op = gimple_assign_rhs_code (assign);
1166 switch (op)
1167 {
1168 default:
1169 {
1b0be822 1170 if (0)
808f4dfe
DM
1171 sorry_at (assign->location, "unhandled assignment op: %qs",
1172 get_tree_code_name (op));
1b0be822
DM
1173 const svalue *unknown_sval
1174 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1175 set_value (lhs_reg, unknown_sval, ctxt);
757bf1df
DM
1176 }
1177 break;
1178
808f4dfe
DM
1179 case CONSTRUCTOR:
1180 {
1181 if (TREE_CLOBBER_P (rhs1))
1182 {
1183 /* e.g. "x ={v} {CLOBBER};" */
1184 clobber_region (lhs_reg);
1185 }
1186 else
1187 {
1188 /* Any CONSTRUCTOR that survives to this point is either
1189 just a zero-init of everything, or a vector. */
1190 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
1191 zero_fill_region (lhs_reg);
1192 unsigned ix;
1193 tree index;
1194 tree val;
1195 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
1196 {
1197 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
1198 if (!index)
1199 index = build_int_cst (integer_type_node, ix);
1200 gcc_assert (TREE_CODE (index) == INTEGER_CST);
1201 const svalue *index_sval
1202 = m_mgr->get_or_create_constant_svalue (index);
1203 gcc_assert (index_sval);
1204 const region *sub_reg
1205 = m_mgr->get_element_region (lhs_reg,
1206 TREE_TYPE (val),
1207 index_sval);
1208 const svalue *val_sval = get_rvalue (val, ctxt);
1209 set_value (sub_reg, val_sval, ctxt);
1210 }
1211 }
1212 }
1213 break;
1214
1215 case STRING_CST:
757bf1df 1216 {
808f4dfe 1217 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
808f4dfe
DM
1218 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1219 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 1220 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
1221 }
1222 break;
1223 }
1224}
1225
33255ad3 1226/* Handle the pre-sm-state part of STMT, modifying this object in-place.
33255ad3
DM
1227 Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
1228 side effects. */
1229
1230void
1231region_model::on_stmt_pre (const gimple *stmt,
33255ad3
DM
1232 bool *out_unknown_side_effects,
1233 region_model_context *ctxt)
1234{
1235 switch (gimple_code (stmt))
1236 {
1237 default:
1238 /* No-op for now. */
1239 break;
1240
1241 case GIMPLE_ASSIGN:
1242 {
1243 const gassign *assign = as_a <const gassign *> (stmt);
1244 on_assignment (assign, ctxt);
1245 }
1246 break;
1247
1248 case GIMPLE_ASM:
ded2c2c0
DM
1249 {
1250 const gasm *asm_stmt = as_a <const gasm *> (stmt);
1251 on_asm_stmt (asm_stmt, ctxt);
1252 }
33255ad3
DM
1253 break;
1254
1255 case GIMPLE_CALL:
1256 {
1257 /* Track whether we have a gcall to a function that's not recognized by
1258 anything, for which we don't have a function body, or for which we
1259 don't know the fndecl. */
1260 const gcall *call = as_a <const gcall *> (stmt);
6bd31b33 1261 *out_unknown_side_effects = on_call_pre (call, ctxt);
33255ad3
DM
1262 }
1263 break;
1264
1265 case GIMPLE_RETURN:
1266 {
1267 const greturn *return_ = as_a <const greturn *> (stmt);
1268 on_return (return_, ctxt);
1269 }
1270 break;
1271 }
1272}
1273
3b691e01
DM
1274/* Given a call CD with function attribute FORMAT_ATTR, check that the
1275 format arg to the call is a valid null-terminated string. */
1276
1277void
1278region_model::check_call_format_attr (const call_details &cd,
1279 tree format_attr) const
1280{
1281 /* We assume that FORMAT_ATTR has already been validated. */
1282
1283 /* arg0 of the attribute should be kind of format strings
1284 that this function expects (e.g. "printf"). */
1285 const tree arg0_tree_list = TREE_VALUE (format_attr);
1286 if (!arg0_tree_list)
1287 return;
1288
1289 /* arg1 of the attribute should be the 1-based parameter index
1290 to treat as the format string. */
1291 const tree arg1_tree_list = TREE_CHAIN (arg0_tree_list);
1292 if (!arg1_tree_list)
1293 return;
1294 const tree arg1_value = TREE_VALUE (arg1_tree_list);
1295 if (!arg1_value)
1296 return;
1297
1298 unsigned format_arg_idx = TREE_INT_CST_LOW (arg1_value) - 1;
1299 if (cd.num_args () <= format_arg_idx)
1300 return;
1301
1302 /* Subclass of annotating_context that
1303 adds a note about the format attr to any saved diagnostics. */
1304 class annotating_ctxt : public annotating_context
1305 {
1306 public:
1307 annotating_ctxt (const call_details &cd,
1308 unsigned fmt_param_idx)
1309 : annotating_context (cd.get_ctxt ()),
1310 m_cd (cd),
1311 m_fmt_param_idx (fmt_param_idx)
1312 {
1313 }
1314 void add_annotations () final override
1315 {
1316 class reason_format_attr
1317 : public pending_note_subclass<reason_format_attr>
1318 {
1319 public:
1320 reason_format_attr (const call_arg_details &arg_details)
1321 : m_arg_details (arg_details)
1322 {
1323 }
1324
1325 const char *get_kind () const final override
1326 {
1327 return "reason_format_attr";
1328 }
1329
1330 void emit () const final override
1331 {
1332 inform (DECL_SOURCE_LOCATION (m_arg_details.m_called_fndecl),
1333 "parameter %i of %qD marked as a format string"
1334 " via %qs attribute",
1335 m_arg_details.m_arg_idx + 1, m_arg_details.m_called_fndecl,
1336 "format");
1337 }
1338
1339 bool operator== (const reason_format_attr &other) const
1340 {
1341 return m_arg_details == other.m_arg_details;
1342 }
1343
1344 private:
1345 call_arg_details m_arg_details;
1346 };
1347
1348 call_arg_details arg_details (m_cd, m_fmt_param_idx);
1349 add_note (make_unique<reason_format_attr> (arg_details));
1350 }
1351 private:
1352 const call_details &m_cd;
1353 unsigned m_fmt_param_idx;
1354 };
1355
1356 annotating_ctxt my_ctxt (cd, format_arg_idx);
1357 call_details my_cd (cd, &my_ctxt);
1358 my_cd.check_for_null_terminated_string_arg (format_arg_idx);
1359}
1360
9ff3e236 1361/* Ensure that all arguments at the call described by CD are checked
3b691e01
DM
1362 for poisoned values, by calling get_rvalue on each argument.
1363
1364 Check that calls to functions with "format" attribute have valid
1365 null-terminated strings for their format argument. */
9ff3e236
DM
1366
1367void
1368region_model::check_call_args (const call_details &cd) const
1369{
1370 for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++)
1371 cd.get_arg_svalue (arg_idx);
3b691e01
DM
1372
1373 /* Handle attribute "format". */
1374 if (tree format_attr = cd.lookup_function_attribute ("format"))
1375 check_call_format_attr (cd, format_attr);
9ff3e236
DM
1376}
1377
792f039f
DM
1378/* Update this model for an outcome of a call that returns a specific
1379 integer constant.
07e30160
DM
1380 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1381 the state-merger code from merging success and failure outcomes. */
1382
1383void
792f039f
DM
1384region_model::update_for_int_cst_return (const call_details &cd,
1385 int retval,
1386 bool unmergeable)
07e30160
DM
1387{
1388 if (!cd.get_lhs_type ())
1389 return;
4e4e45a4
DM
1390 if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1391 return;
07e30160 1392 const svalue *result
792f039f 1393 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), retval);
07e30160
DM
1394 if (unmergeable)
1395 result = m_mgr->get_or_create_unmergeable (result);
1396 set_value (cd.get_lhs_region (), result, cd.get_ctxt ());
1397}
1398
792f039f
DM
1399/* Update this model for an outcome of a call that returns zero.
1400 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1401 the state-merger code from merging success and failure outcomes. */
1402
1403void
1404region_model::update_for_zero_return (const call_details &cd,
1405 bool unmergeable)
1406{
1407 update_for_int_cst_return (cd, 0, unmergeable);
1408}
1409
73da34a5
DM
1410/* Update this model for an outcome of a call that returns non-zero.
1411 Specifically, assign an svalue to the LHS, and add a constraint that
1412 that svalue is non-zero. */
07e30160
DM
1413
1414void
1415region_model::update_for_nonzero_return (const call_details &cd)
1416{
1417 if (!cd.get_lhs_type ())
1418 return;
4e4e45a4
DM
1419 if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1420 return;
73da34a5 1421 cd.set_any_lhs_with_defaults ();
07e30160
DM
1422 const svalue *zero
1423 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), 0);
1424 const svalue *result
1425 = get_store_value (cd.get_lhs_region (), cd.get_ctxt ());
1426 add_constraint (result, NE_EXPR, zero, cd.get_ctxt ());
1427}
1428
1429/* Subroutine of region_model::maybe_get_copy_bounds.
1430 The Linux kernel commonly uses
1431 min_t([unsigned] long, VAR, sizeof(T));
1432 to set an upper bound on the size of a copy_to_user.
1433 Attempt to simplify such sizes by trying to get the upper bound as a
1434 constant.
1435 Return the simplified svalue if possible, or NULL otherwise. */
1436
1437static const svalue *
1438maybe_simplify_upper_bound (const svalue *num_bytes_sval,
1439 region_model_manager *mgr)
1440{
1441 tree type = num_bytes_sval->get_type ();
1442 while (const svalue *raw = num_bytes_sval->maybe_undo_cast ())
1443 num_bytes_sval = raw;
1444 if (const binop_svalue *binop_sval = num_bytes_sval->dyn_cast_binop_svalue ())
1445 if (binop_sval->get_op () == MIN_EXPR)
1446 if (binop_sval->get_arg1 ()->get_kind () == SK_CONSTANT)
1447 {
1448 return mgr->get_or_create_cast (type, binop_sval->get_arg1 ());
1449 /* TODO: we might want to also capture the constraint
1450 when recording the diagnostic, or note that we're using
1451 the upper bound. */
1452 }
1453 return NULL;
1454}
1455
1456/* Attempt to get an upper bound for the size of a copy when simulating a
1457 copy function.
1458
1459 NUM_BYTES_SVAL is the symbolic value for the size of the copy.
1460 Use it if it's constant, otherwise try to simplify it. Failing
1461 that, use the size of SRC_REG if constant.
1462
1463 Return a symbolic value for an upper limit on the number of bytes
1464 copied, or NULL if no such value could be determined. */
1465
1466const svalue *
1467region_model::maybe_get_copy_bounds (const region *src_reg,
1468 const svalue *num_bytes_sval)
1469{
1470 if (num_bytes_sval->maybe_get_constant ())
1471 return num_bytes_sval;
1472
1473 if (const svalue *simplified
1474 = maybe_simplify_upper_bound (num_bytes_sval, m_mgr))
1475 num_bytes_sval = simplified;
1476
1477 if (num_bytes_sval->maybe_get_constant ())
1478 return num_bytes_sval;
1479
1480 /* For now, try just guessing the size as the capacity of the
1481 base region of the src.
1482 This is a hack; we might get too large a value. */
1483 const region *src_base_reg = src_reg->get_base_region ();
1484 num_bytes_sval = get_capacity (src_base_reg);
1485
1486 if (num_bytes_sval->maybe_get_constant ())
1487 return num_bytes_sval;
1488
1489 /* Non-constant: give up. */
1490 return NULL;
1491}
1492
6bd31b33
DM
1493/* Get any known_function for FNDECL for call CD.
1494
1495 The call must match all assumptions made by the known_function (such as
1496 e.g. "argument 1's type must be a pointer type").
1497
1498 Return NULL if no known_function is found, or it does not match the
1499 assumption(s). */
1500
1501const known_function *
1502region_model::get_known_function (tree fndecl, const call_details &cd) const
1503{
1504 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
1505 return known_fn_mgr->get_match (fndecl, cd);
1506}
1507
1508/* Get any known_function for IFN, or NULL. */
07e30160
DM
1509
1510const known_function *
6bd31b33 1511region_model::get_known_function (enum internal_fn ifn) const
07e30160
DM
1512{
1513 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
6bd31b33 1514 return known_fn_mgr->get_internal_fn (ifn);
07e30160
DM
1515}
1516
757bf1df
DM
1517/* Update this model for the CALL stmt, using CTXT to report any
1518 diagnostics - the first half.
1519
1520 Updates to the region_model that should be made *before* sm-states
1521 are updated are done here; other updates to the region_model are done
ef7827b0 1522 in region_model::on_call_post.
757bf1df 1523
ef7827b0
DM
1524 Return true if the function call has unknown side effects (it wasn't
1525 recognized and we don't have a body for it, or are unable to tell which
6bd31b33 1526 fndecl it is). */
ef7827b0
DM
1527
1528bool
6bd31b33 1529region_model::on_call_pre (const gcall *call, region_model_context *ctxt)
757bf1df 1530{
48e8a7a6
DM
1531 call_details cd (call, this, ctxt);
1532
9b4eee5f
DM
1533 /* Special-case for IFN_DEFERRED_INIT.
1534 We want to report uninitialized variables with -fanalyzer (treating
1535 -ftrivial-auto-var-init= as purely a mitigation feature).
1536 Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the
1537 lhs of the call, so that it is still uninitialized from the point of
1538 view of the analyzer. */
1539 if (gimple_call_internal_p (call)
1540 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
24ebc540 1541 return false; /* No side effects. */
9b4eee5f 1542
bddd8d86
DM
1543 /* Get svalues for all of the arguments at the callsite, to ensure that we
1544 complain about any uninitialized arguments. This might lead to
1545 duplicates if any of the handling below also looks up the svalues,
1546 but the deduplication code should deal with that. */
1547 if (ctxt)
ca123e01 1548 check_call_args (cd);
bddd8d86 1549
688fc162
DM
1550 tree callee_fndecl = get_fndecl_for_call (call, ctxt);
1551
48e8a7a6 1552 if (gimple_call_internal_p (call))
6bd31b33
DM
1553 if (const known_function *kf
1554 = get_known_function (gimple_call_internal_fn (call)))
1555 {
1556 kf->impl_call_pre (cd);
24ebc540 1557 return false; /* No further side effects. */
6bd31b33 1558 }
808f4dfe 1559
24ebc540 1560 if (!callee_fndecl)
73da34a5
DM
1561 {
1562 cd.set_any_lhs_with_defaults ();
1563 return true; /* Unknown side effects. */
1564 }
ee7bfbe5 1565
24ebc540
DM
1566 if (const known_function *kf = get_known_function (callee_fndecl, cd))
1567 {
1568 kf->impl_call_pre (cd);
1569 return false; /* No further side effects. */
757bf1df 1570 }
757bf1df 1571
73da34a5
DM
1572 cd.set_any_lhs_with_defaults ();
1573
24ebc540
DM
1574 const int callee_fndecl_flags = flags_from_decl_or_type (callee_fndecl);
1575 if (callee_fndecl_flags & (ECF_CONST | ECF_PURE))
1576 return false; /* No side effects. */
1577
1578 if (fndecl_built_in_p (callee_fndecl))
1579 return true; /* Unknown side effects. */
1580
1581 if (!fndecl_has_gimple_body_p (callee_fndecl))
1582 return true; /* Unknown side effects. */
1583
1584 return false; /* No side effects. */
757bf1df
DM
1585}
1586
1587/* Update this model for the CALL stmt, using CTXT to report any
1588 diagnostics - the second half.
1589
1590 Updates to the region_model that should be made *after* sm-states
1591 are updated are done here; other updates to the region_model are done
ef7827b0
DM
1592 in region_model::on_call_pre.
1593
1594 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
1595 to purge state. */
757bf1df
DM
1596
1597void
ef7827b0
DM
1598region_model::on_call_post (const gcall *call,
1599 bool unknown_side_effects,
1600 region_model_context *ctxt)
757bf1df 1601{
757bf1df 1602 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1690a839 1603 {
eafa9d96 1604 call_details cd (call, this, ctxt);
6bd31b33 1605 if (const known_function *kf = get_known_function (callee_fndecl, cd))
55e04240 1606 {
6bd31b33 1607 kf->impl_call_post (cd);
55e04240
DM
1608 return;
1609 }
c7e276b8
DM
1610 /* Was this fndecl referenced by
1611 __attribute__((malloc(FOO)))? */
1612 if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
1613 {
c7e276b8
DM
1614 impl_deallocation_call (cd);
1615 return;
1616 }
1690a839 1617 }
ef7827b0
DM
1618
1619 if (unknown_side_effects)
1620 handle_unrecognized_call (call, ctxt);
1621}
1622
33255ad3
DM
1623/* Purge state involving SVAL from this region_model, using CTXT
1624 (if non-NULL) to purge other state in a program_state.
1625
1626 For example, if we're at the def-stmt of an SSA name, then we need to
1627 purge any state for svalues that involve that SSA name. This avoids
1628 false positives in loops, since a symbolic value referring to the
1629 SSA name will be referring to the previous value of that SSA name.
1630
1631 For example, in:
1632 while ((e = hashmap_iter_next(&iter))) {
1633 struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
1634 free (e_strbuf->value);
1635 }
1636 at the def-stmt of e_8:
1637 e_8 = hashmap_iter_next (&iter);
1638 we should purge the "freed" state of:
1639 INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
1640 which is the "e_strbuf->value" value from the previous iteration,
1641 or we will erroneously report a double-free - the "e_8" within it
1642 refers to the previous value. */
1643
1644void
1645region_model::purge_state_involving (const svalue *sval,
1646 region_model_context *ctxt)
1647{
a113b143
DM
1648 if (!sval->can_have_associated_state_p ())
1649 return;
33255ad3
DM
1650 m_store.purge_state_involving (sval, m_mgr);
1651 m_constraints->purge_state_involving (sval);
1652 m_dynamic_extents.purge_state_involving (sval);
1653 if (ctxt)
1654 ctxt->purge_state_involving (sval);
1655}
1656
c65d3c7f
DM
1657/* A pending_note subclass for adding a note about an
1658 __attribute__((access, ...)) to a diagnostic. */
1659
1660class reason_attr_access : public pending_note_subclass<reason_attr_access>
1661{
1662public:
1663 reason_attr_access (tree callee_fndecl, const attr_access &access)
1664 : m_callee_fndecl (callee_fndecl),
1665 m_ptr_argno (access.ptrarg),
1666 m_access_str (TREE_STRING_POINTER (access.to_external_string ()))
1667 {
1668 }
1669
ff171cb1 1670 const char *get_kind () const final override { return "reason_attr_access"; }
c65d3c7f 1671
2ac1459f 1672 void emit () const final override
c65d3c7f
DM
1673 {
1674 inform (DECL_SOURCE_LOCATION (m_callee_fndecl),
1675 "parameter %i of %qD marked with attribute %qs",
1676 m_ptr_argno + 1, m_callee_fndecl, m_access_str);
1677 }
1678
1679 bool operator== (const reason_attr_access &other) const
1680 {
1681 return (m_callee_fndecl == other.m_callee_fndecl
1682 && m_ptr_argno == other.m_ptr_argno
1683 && !strcmp (m_access_str, other.m_access_str));
1684 }
1685
1686private:
1687 tree m_callee_fndecl;
1688 unsigned m_ptr_argno;
1689 const char *m_access_str;
1690};
1691
b6eaf90c
DM
1692/* Check CALL a call to external function CALLEE_FNDECL based on
1693 any __attribute__ ((access, ....) on the latter, complaining to
1694 CTXT about any issues.
1695
1696 Currently we merely call check_region_for_write on any regions
1697 pointed to by arguments marked with a "write_only" or "read_write"
1698 attribute. */
1699
1700void
1701region_model::
1702check_external_function_for_access_attr (const gcall *call,
1703 tree callee_fndecl,
1704 region_model_context *ctxt) const
1705{
1706 gcc_assert (call);
1707 gcc_assert (callee_fndecl);
1708 gcc_assert (ctxt);
1709
1710 tree fntype = TREE_TYPE (callee_fndecl);
1711 if (!fntype)
1712 return;
1713
1714 if (!TYPE_ATTRIBUTES (fntype))
1715 return;
1716
1717 /* Initialize a map of attribute access specifications for arguments
1718 to the function call. */
1719 rdwr_map rdwr_idx;
1720 init_attr_rdwr_indices (&rdwr_idx, TYPE_ATTRIBUTES (fntype));
1721
1722 unsigned argno = 0;
1723
1724 for (tree iter = TYPE_ARG_TYPES (fntype); iter;
1725 iter = TREE_CHAIN (iter), ++argno)
1726 {
1727 const attr_access* access = rdwr_idx.get (argno);
1728 if (!access)
1729 continue;
1730
1731 /* Ignore any duplicate entry in the map for the size argument. */
1732 if (access->ptrarg != argno)
1733 continue;
1734
1735 if (access->mode == access_write_only
1736 || access->mode == access_read_write)
1737 {
e40a935d 1738 /* Subclass of annotating_context that
c65d3c7f 1739 adds a note about the attr access to any saved diagnostics. */
e40a935d 1740 class annotating_ctxt : public annotating_context
c65d3c7f
DM
1741 {
1742 public:
1743 annotating_ctxt (tree callee_fndecl,
1744 const attr_access &access,
1745 region_model_context *ctxt)
e40a935d 1746 : annotating_context (ctxt),
c65d3c7f
DM
1747 m_callee_fndecl (callee_fndecl),
1748 m_access (access)
1749 {
1750 }
e40a935d 1751 void add_annotations () final override
c65d3c7f 1752 {
e40a935d
DM
1753 add_note (make_unique<reason_attr_access>
1754 (m_callee_fndecl, m_access));
c65d3c7f
DM
1755 }
1756 private:
1757 tree m_callee_fndecl;
1758 const attr_access &m_access;
1759 };
1760
1761 /* Use this ctxt below so that any diagnostics get the
1762 note added to them. */
1763 annotating_ctxt my_ctxt (callee_fndecl, *access, ctxt);
1764
b6eaf90c 1765 tree ptr_tree = gimple_call_arg (call, access->ptrarg);
c65d3c7f
DM
1766 const svalue *ptr_sval = get_rvalue (ptr_tree, &my_ctxt);
1767 const region *reg = deref_rvalue (ptr_sval, ptr_tree, &my_ctxt);
0e466e97 1768 check_region_for_write (reg, nullptr, &my_ctxt);
b6eaf90c
DM
1769 /* We don't use the size arg for now. */
1770 }
1771 }
1772}
1773
ef7827b0
DM
1774/* Handle a call CALL to a function with unknown behavior.
1775
1776 Traverse the regions in this model, determining what regions are
1777 reachable from pointer arguments to CALL and from global variables,
1778 recursively.
1779
1780 Set all reachable regions to new unknown values and purge sm-state
1781 from their values, and from values that point to them. */
1782
1783void
1784region_model::handle_unrecognized_call (const gcall *call,
1785 region_model_context *ctxt)
1786{
1787 tree fndecl = get_fndecl_for_call (call, ctxt);
1788
b6eaf90c
DM
1789 if (fndecl && ctxt)
1790 check_external_function_for_access_attr (call, fndecl, ctxt);
1791
c710051a 1792 reachable_regions reachable_regs (this);
ef7827b0
DM
1793
1794 /* Determine the reachable regions and their mutability. */
1795 {
808f4dfe
DM
1796 /* Add globals and regions that already escaped in previous
1797 unknown calls. */
1798 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1799 &reachable_regs);
ef7827b0
DM
1800
1801 /* Params that are pointers. */
1802 tree iter_param_types = NULL_TREE;
1803 if (fndecl)
1804 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
1805 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
1806 {
1807 /* Track expected param type, where available. */
1808 tree param_type = NULL_TREE;
1809 if (iter_param_types)
1810 {
1811 param_type = TREE_VALUE (iter_param_types);
1812 gcc_assert (param_type);
1813 iter_param_types = TREE_CHAIN (iter_param_types);
1814 }
1815
1816 tree parm = gimple_call_arg (call, arg_idx);
808f4dfe
DM
1817 const svalue *parm_sval = get_rvalue (parm, ctxt);
1818 reachable_regs.handle_parm (parm_sval, param_type);
ef7827b0
DM
1819 }
1820 }
1821
33255ad3 1822 uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULL;
3a66c289 1823
808f4dfe
DM
1824 /* Purge sm-state for the svalues that were reachable,
1825 both in non-mutable and mutable form. */
1826 for (svalue_set::iterator iter
1827 = reachable_regs.begin_reachable_svals ();
1828 iter != reachable_regs.end_reachable_svals (); ++iter)
ef7827b0 1829 {
808f4dfe 1830 const svalue *sval = (*iter);
33255ad3
DM
1831 if (ctxt)
1832 ctxt->on_unknown_change (sval, false);
808f4dfe
DM
1833 }
1834 for (svalue_set::iterator iter
1835 = reachable_regs.begin_mutable_svals ();
1836 iter != reachable_regs.end_mutable_svals (); ++iter)
1837 {
1838 const svalue *sval = (*iter);
33255ad3
DM
1839 if (ctxt)
1840 ctxt->on_unknown_change (sval, true);
3a66c289
DM
1841 if (uncertainty)
1842 uncertainty->on_mutable_sval_at_unknown_call (sval);
808f4dfe 1843 }
ef7827b0 1844
808f4dfe 1845 /* Mark any clusters that have escaped. */
af66094d 1846 reachable_regs.mark_escaped_clusters (ctxt);
ef7827b0 1847
808f4dfe
DM
1848 /* Update bindings for all clusters that have escaped, whether above,
1849 or previously. */
3734527d
DM
1850 m_store.on_unknown_fncall (call, m_mgr->get_store_manager (),
1851 conjured_purge (this, ctxt));
9a2c9579
DM
1852
1853 /* Purge dynamic extents from any regions that have escaped mutably:
1854 realloc could have been called on them. */
1855 for (hash_set<const region *>::iterator
1856 iter = reachable_regs.begin_mutable_base_regs ();
1857 iter != reachable_regs.end_mutable_base_regs ();
1858 ++iter)
1859 {
1860 const region *base_reg = (*iter);
1861 unset_dynamic_extents (base_reg);
1862 }
808f4dfe 1863}
ef7827b0 1864
808f4dfe
DM
1865/* Traverse the regions in this model, determining what regions are
1866 reachable from the store and populating *OUT.
ef7827b0 1867
808f4dfe
DM
1868 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
1869 for reachability (for handling return values from functions when
1870 analyzing return of the only function on the stack).
1871
3a66c289
DM
1872 If UNCERTAINTY is non-NULL, treat any svalues that were recorded
1873 within it as being maybe-bound as additional "roots" for reachability.
1874
808f4dfe
DM
1875 Find svalues that haven't leaked. */
1876
1877void
1878region_model::get_reachable_svalues (svalue_set *out,
3a66c289
DM
1879 const svalue *extra_sval,
1880 const uncertainty_t *uncertainty)
808f4dfe 1881{
c710051a 1882 reachable_regions reachable_regs (this);
808f4dfe
DM
1883
1884 /* Add globals and regions that already escaped in previous
1885 unknown calls. */
1886 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1887 &reachable_regs);
1888
1889 if (extra_sval)
1890 reachable_regs.handle_sval (extra_sval);
ef7827b0 1891
3a66c289
DM
1892 if (uncertainty)
1893 for (uncertainty_t::iterator iter
1894 = uncertainty->begin_maybe_bound_svals ();
1895 iter != uncertainty->end_maybe_bound_svals (); ++iter)
1896 reachable_regs.handle_sval (*iter);
1897
808f4dfe
DM
1898 /* Get regions for locals that have explicitly bound values. */
1899 for (store::cluster_map_t::iterator iter = m_store.begin ();
1900 iter != m_store.end (); ++iter)
1901 {
1902 const region *base_reg = (*iter).first;
1903 if (const region *parent = base_reg->get_parent_region ())
1904 if (parent->get_kind () == RK_FRAME)
1905 reachable_regs.add (base_reg, false);
1906 }
1907
1908 /* Populate *OUT based on the values that were reachable. */
1909 for (svalue_set::iterator iter
1910 = reachable_regs.begin_reachable_svals ();
1911 iter != reachable_regs.end_reachable_svals (); ++iter)
1912 out->add (*iter);
757bf1df
DM
1913}
1914
1915/* Update this model for the RETURN_STMT, using CTXT to report any
1916 diagnostics. */
1917
1918void
1919region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
1920{
1921 tree callee = get_current_function ()->decl;
1922 tree lhs = DECL_RESULT (callee);
1923 tree rhs = gimple_return_retval (return_stmt);
1924
1925 if (lhs && rhs)
13ad6d9f
DM
1926 {
1927 const svalue *sval = get_rvalue (rhs, ctxt);
1928 const region *ret_reg = get_lvalue (lhs, ctxt);
1929 set_value (ret_reg, sval, ctxt);
1930 }
757bf1df
DM
1931}
1932
342e14ff
DM
1933/* Update this model for a call and return of setjmp/sigsetjmp at CALL within
1934 ENODE, using CTXT to report any diagnostics.
757bf1df 1935
342e14ff
DM
1936 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
1937 0), as opposed to any second return due to longjmp/sigsetjmp. */
757bf1df
DM
1938
1939void
1940region_model::on_setjmp (const gcall *call, const exploded_node *enode,
1941 region_model_context *ctxt)
1942{
808f4dfe
DM
1943 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
1944 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
1945 ctxt);
757bf1df 1946
808f4dfe
DM
1947 /* Create a setjmp_svalue for this call and store it in BUF_REG's
1948 region. */
1949 if (buf_reg)
757bf1df 1950 {
fd9982bb 1951 setjmp_record r (enode, call);
808f4dfe
DM
1952 const svalue *sval
1953 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
1954 set_value (buf_reg, sval, ctxt);
757bf1df
DM
1955 }
1956
1957 /* Direct calls to setjmp return 0. */
1958 if (tree lhs = gimple_call_lhs (call))
1959 {
1aff29d4
DM
1960 const svalue *new_sval
1961 = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
808f4dfe
DM
1962 const region *lhs_reg = get_lvalue (lhs, ctxt);
1963 set_value (lhs_reg, new_sval, ctxt);
757bf1df
DM
1964 }
1965}
1966
1967/* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
1968 to a "setjmp" at SETJMP_CALL where the final stack depth should be
808f4dfe
DM
1969 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
1970 done, and should be done by the caller. */
757bf1df
DM
1971
1972void
1973region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
808f4dfe 1974 int setjmp_stack_depth, region_model_context *ctxt)
757bf1df
DM
1975{
1976 /* Evaluate the val, using the frame of the "longjmp". */
1977 tree fake_retval = gimple_call_arg (longjmp_call, 1);
808f4dfe 1978 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
757bf1df
DM
1979
1980 /* Pop any frames until we reach the stack depth of the function where
1981 setjmp was called. */
1982 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
1983 while (get_stack_depth () > setjmp_stack_depth)
430d7d88 1984 pop_frame (NULL, NULL, ctxt, false);
757bf1df
DM
1985
1986 gcc_assert (get_stack_depth () == setjmp_stack_depth);
1987
1988 /* Assign to LHS of "setjmp" in new_state. */
1989 if (tree lhs = gimple_call_lhs (setjmp_call))
1990 {
1991 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
1aff29d4
DM
1992 const svalue *zero_sval
1993 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
808f4dfe 1994 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
757bf1df
DM
1995 /* If we have 0, use 1. */
1996 if (eq_zero.is_true ())
1997 {
808f4dfe 1998 const svalue *one_sval
1aff29d4 1999 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
808f4dfe 2000 fake_retval_sval = one_sval;
757bf1df
DM
2001 }
2002 else
2003 {
2004 /* Otherwise note that the value is nonzero. */
808f4dfe 2005 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
757bf1df
DM
2006 }
2007
808f4dfe
DM
2008 /* Decorate the return value from setjmp as being unmergeable,
2009 so that we don't attempt to merge states with it as zero
2010 with states in which it's nonzero, leading to a clean distinction
2011 in the exploded_graph betweeen the first return and the second
2012 return. */
2013 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
757bf1df 2014
808f4dfe
DM
2015 const region *lhs_reg = get_lvalue (lhs, ctxt);
2016 set_value (lhs_reg, fake_retval_sval, ctxt);
2017 }
757bf1df
DM
2018}
2019
2020/* Update this region_model for a phi stmt of the form
2021 LHS = PHI <...RHS...>.
e0a7a675
DM
2022 where RHS is for the appropriate edge.
2023 Get state from OLD_STATE so that all of the phi stmts for a basic block
2024 are effectively handled simultaneously. */
757bf1df
DM
2025
2026void
8525d1f5 2027region_model::handle_phi (const gphi *phi,
808f4dfe 2028 tree lhs, tree rhs,
e0a7a675 2029 const region_model &old_state,
757bf1df
DM
2030 region_model_context *ctxt)
2031{
2032 /* For now, don't bother tracking the .MEM SSA names. */
2033 if (tree var = SSA_NAME_VAR (lhs))
2034 if (TREE_CODE (var) == VAR_DECL)
2035 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
2036 return;
2037
e0a7a675
DM
2038 const svalue *src_sval = old_state.get_rvalue (rhs, ctxt);
2039 const region *dst_reg = old_state.get_lvalue (lhs, ctxt);
757bf1df 2040
e0a7a675 2041 set_value (dst_reg, src_sval, ctxt);
8525d1f5
DM
2042
2043 if (ctxt)
2044 ctxt->on_phi (phi, rhs);
757bf1df
DM
2045}
2046
2047/* Implementation of region_model::get_lvalue; the latter adds type-checking.
2048
2049 Get the id of the region for PV within this region_model,
2050 emitting any diagnostics to CTXT. */
2051
808f4dfe 2052const region *
53cb324c 2053region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2054{
2055 tree expr = pv.m_tree;
2056
2057 gcc_assert (expr);
2058
2059 switch (TREE_CODE (expr))
2060 {
2061 default:
808f4dfe
DM
2062 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
2063 dump_location_t ());
757bf1df
DM
2064
2065 case ARRAY_REF:
2066 {
2067 tree array = TREE_OPERAND (expr, 0);
2068 tree index = TREE_OPERAND (expr, 1);
757bf1df 2069
808f4dfe
DM
2070 const region *array_reg = get_lvalue (array, ctxt);
2071 const svalue *index_sval = get_rvalue (index, ctxt);
2072 return m_mgr->get_element_region (array_reg,
2073 TREE_TYPE (TREE_TYPE (array)),
2074 index_sval);
757bf1df
DM
2075 }
2076 break;
2077
93e759fc
DM
2078 case BIT_FIELD_REF:
2079 {
2080 tree inner_expr = TREE_OPERAND (expr, 0);
2081 const region *inner_reg = get_lvalue (inner_expr, ctxt);
2082 tree num_bits = TREE_OPERAND (expr, 1);
2083 tree first_bit_offset = TREE_OPERAND (expr, 2);
2084 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2085 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2086 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2087 TREE_INT_CST_LOW (num_bits));
2088 return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits);
2089 }
2090 break;
2091
757bf1df
DM
2092 case MEM_REF:
2093 {
2094 tree ptr = TREE_OPERAND (expr, 0);
2095 tree offset = TREE_OPERAND (expr, 1);
808f4dfe
DM
2096 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2097 const svalue *offset_sval = get_rvalue (offset, ctxt);
2098 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
2099 return m_mgr->get_offset_region (star_ptr,
2100 TREE_TYPE (expr),
2101 offset_sval);
757bf1df
DM
2102 }
2103 break;
2104
808f4dfe
DM
2105 case FUNCTION_DECL:
2106 return m_mgr->get_region_for_fndecl (expr);
2107
2108 case LABEL_DECL:
2109 return m_mgr->get_region_for_label (expr);
2110
757bf1df
DM
2111 case VAR_DECL:
2112 /* Handle globals. */
2113 if (is_global_var (expr))
808f4dfe 2114 return m_mgr->get_region_for_global (expr);
757bf1df
DM
2115
2116 /* Fall through. */
2117
2118 case SSA_NAME:
2119 case PARM_DECL:
2120 case RESULT_DECL:
2121 {
2122 gcc_assert (TREE_CODE (expr) == SSA_NAME
2123 || TREE_CODE (expr) == PARM_DECL
778aca1b 2124 || VAR_P (expr)
757bf1df
DM
2125 || TREE_CODE (expr) == RESULT_DECL);
2126
808f4dfe
DM
2127 int stack_index = pv.m_stack_depth;
2128 const frame_region *frame = get_frame_at_index (stack_index);
757bf1df 2129 gcc_assert (frame);
4cebae09 2130 return frame->get_region_for_local (m_mgr, expr, ctxt);
757bf1df
DM
2131 }
2132
2133 case COMPONENT_REF:
2134 {
2135 /* obj.field */
2136 tree obj = TREE_OPERAND (expr, 0);
2137 tree field = TREE_OPERAND (expr, 1);
808f4dfe
DM
2138 const region *obj_reg = get_lvalue (obj, ctxt);
2139 return m_mgr->get_field_region (obj_reg, field);
41a9e940
DM
2140 }
2141 break;
2142
757bf1df 2143 case STRING_CST:
808f4dfe 2144 return m_mgr->get_region_for_string (expr);
757bf1df
DM
2145 }
2146}
2147
2148/* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
2149
09bea584
DM
2150static void
2151assert_compat_types (tree src_type, tree dst_type)
2152{
2153 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
808f4dfe
DM
2154 {
2155#if CHECKING_P
2156 if (!(useless_type_conversion_p (src_type, dst_type)))
2157 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
2158#endif
2159 }
09bea584 2160}
757bf1df 2161
ea4e3218
DM
2162/* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
2163
e66b9f67 2164bool
ea4e3218
DM
2165compat_types_p (tree src_type, tree dst_type)
2166{
2167 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2168 if (!(useless_type_conversion_p (src_type, dst_type)))
2169 return false;
2170 return true;
2171}
2172
808f4dfe 2173/* Get the region for PV within this region_model,
757bf1df
DM
2174 emitting any diagnostics to CTXT. */
2175
808f4dfe 2176const region *
53cb324c 2177region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2178{
2179 if (pv.m_tree == NULL_TREE)
808f4dfe 2180 return NULL;
757bf1df 2181
808f4dfe
DM
2182 const region *result_reg = get_lvalue_1 (pv, ctxt);
2183 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
2184 return result_reg;
757bf1df
DM
2185}
2186
808f4dfe 2187/* Get the region for EXPR within this region_model (assuming the most
757bf1df
DM
2188 recent stack frame if it's a local). */
2189
808f4dfe 2190const region *
53cb324c 2191region_model::get_lvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
2192{
2193 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2194}
2195
2196/* Implementation of region_model::get_rvalue; the latter adds type-checking.
2197
2198 Get the value of PV within this region_model,
2199 emitting any diagnostics to CTXT. */
2200
808f4dfe 2201const svalue *
53cb324c 2202region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2203{
2204 gcc_assert (pv.m_tree);
2205
2206 switch (TREE_CODE (pv.m_tree))
2207 {
2208 default:
2242b975 2209 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
757bf1df
DM
2210
2211 case ADDR_EXPR:
2212 {
2213 /* "&EXPR". */
2214 tree expr = pv.m_tree;
2215 tree op0 = TREE_OPERAND (expr, 0);
808f4dfe
DM
2216 const region *expr_reg = get_lvalue (op0, ctxt);
2217 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
757bf1df
DM
2218 }
2219 break;
2220
808f4dfe 2221 case BIT_FIELD_REF:
d3b1ef7a
DM
2222 {
2223 tree expr = pv.m_tree;
2224 tree op0 = TREE_OPERAND (expr, 0);
2225 const region *reg = get_lvalue (op0, ctxt);
2226 tree num_bits = TREE_OPERAND (expr, 1);
2227 tree first_bit_offset = TREE_OPERAND (expr, 2);
2228 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2229 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2230 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2231 TREE_INT_CST_LOW (num_bits));
9faf8348 2232 return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
d3b1ef7a 2233 }
808f4dfe 2234
808f4dfe 2235 case VAR_DECL:
20bd258d
DM
2236 if (DECL_HARD_REGISTER (pv.m_tree))
2237 {
2238 /* If it has a hard register, it doesn't have a memory region
2239 and can't be referred to as an lvalue. */
2240 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
2241 }
2242 /* Fall through. */
808f4dfe 2243 case PARM_DECL:
20bd258d 2244 case SSA_NAME:
808f4dfe 2245 case RESULT_DECL:
757bf1df
DM
2246 case ARRAY_REF:
2247 {
da7c2773 2248 const region *reg = get_lvalue (pv, ctxt);
9faf8348 2249 return get_store_value (reg, ctxt);
757bf1df
DM
2250 }
2251
808f4dfe
DM
2252 case REALPART_EXPR:
2253 case IMAGPART_EXPR:
2254 case VIEW_CONVERT_EXPR:
2255 {
2256 tree expr = pv.m_tree;
2257 tree arg = TREE_OPERAND (expr, 0);
2258 const svalue *arg_sval = get_rvalue (arg, ctxt);
2259 const svalue *sval_unaryop
2260 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
2261 arg_sval);
2262 return sval_unaryop;
2263 };
2264
757bf1df
DM
2265 case INTEGER_CST:
2266 case REAL_CST:
808f4dfe
DM
2267 case COMPLEX_CST:
2268 case VECTOR_CST:
757bf1df 2269 case STRING_CST:
808f4dfe
DM
2270 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
2271
2272 case POINTER_PLUS_EXPR:
2273 {
2274 tree expr = pv.m_tree;
2275 tree ptr = TREE_OPERAND (expr, 0);
2276 tree offset = TREE_OPERAND (expr, 1);
2277 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2278 const svalue *offset_sval = get_rvalue (offset, ctxt);
2279 const svalue *sval_binop
2280 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
2281 ptr_sval, offset_sval);
2282 return sval_binop;
2283 }
2284
2285 /* Binary ops. */
2286 case PLUS_EXPR:
2287 case MULT_EXPR:
4d3b7be2
DM
2288 case BIT_AND_EXPR:
2289 case BIT_IOR_EXPR:
2290 case BIT_XOR_EXPR:
808f4dfe
DM
2291 {
2292 tree expr = pv.m_tree;
2293 tree arg0 = TREE_OPERAND (expr, 0);
2294 tree arg1 = TREE_OPERAND (expr, 1);
2295 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
2296 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
2297 const svalue *sval_binop
2298 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
2299 arg0_sval, arg1_sval);
2300 return sval_binop;
2301 }
757bf1df
DM
2302
2303 case COMPONENT_REF:
2304 case MEM_REF:
757bf1df 2305 {
808f4dfe 2306 const region *ref_reg = get_lvalue (pv, ctxt);
9faf8348 2307 return get_store_value (ref_reg, ctxt);
757bf1df 2308 }
1b342485
AS
2309 case OBJ_TYPE_REF:
2310 {
2311 tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
2312 return get_rvalue (expr, ctxt);
2313 }
757bf1df
DM
2314 }
2315}
2316
2317/* Get the value of PV within this region_model,
2318 emitting any diagnostics to CTXT. */
2319
808f4dfe 2320const svalue *
53cb324c 2321region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2322{
2323 if (pv.m_tree == NULL_TREE)
808f4dfe 2324 return NULL;
757bf1df 2325
808f4dfe 2326 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
757bf1df 2327
808f4dfe
DM
2328 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
2329
2fdc8546 2330 result_sval = check_for_poison (result_sval, pv.m_tree, NULL, ctxt);
33255ad3 2331
808f4dfe 2332 return result_sval;
757bf1df
DM
2333}
2334
2335/* Get the value of EXPR within this region_model (assuming the most
2336 recent stack frame if it's a local). */
2337
808f4dfe 2338const svalue *
53cb324c 2339region_model::get_rvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
2340{
2341 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2342}
2343
623bc027
DM
2344/* Return true if this model is on a path with "main" as the entrypoint
2345 (as opposed to one in which we're merely analyzing a subset of the
2346 path through the code). */
2347
2348bool
2349region_model::called_from_main_p () const
2350{
2351 if (!m_current_frame)
2352 return false;
2353 /* Determine if the oldest stack frame in this model is for "main". */
2354 const frame_region *frame0 = get_frame_at_index (0);
2355 gcc_assert (frame0);
2356 return id_equal (DECL_NAME (frame0->get_function ()->decl), "main");
2357}
2358
2359/* Subroutine of region_model::get_store_value for when REG is (or is within)
2360 a global variable that hasn't been touched since the start of this path
2361 (or was implicitly touched due to a call to an unknown function). */
2362
2363const svalue *
2364region_model::get_initial_value_for_global (const region *reg) const
2365{
2366 /* Get the decl that REG is for (or is within). */
2367 const decl_region *base_reg
2368 = reg->get_base_region ()->dyn_cast_decl_region ();
2369 gcc_assert (base_reg);
2370 tree decl = base_reg->get_decl ();
2371
2372 /* Special-case: to avoid having to explicitly update all previously
2373 untracked globals when calling an unknown fn, they implicitly have
2374 an unknown value if an unknown call has occurred, unless this is
2375 static to-this-TU and hasn't escaped. Globals that have escaped
2376 are explicitly tracked, so we shouldn't hit this case for them. */
af66094d
DM
2377 if (m_store.called_unknown_fn_p ()
2378 && TREE_PUBLIC (decl)
2379 && !TREE_READONLY (decl))
623bc027
DM
2380 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2381
2382 /* If we are on a path from the entrypoint from "main" and we have a
2383 global decl defined in this TU that hasn't been touched yet, then
2384 the initial value of REG can be taken from the initialization value
2385 of the decl. */
16ad9ae8 2386 if (called_from_main_p () || TREE_READONLY (decl))
fe9771b5 2387 return reg->get_initial_value_at_main (m_mgr);
623bc027
DM
2388
2389 /* Otherwise, return INIT_VAL(REG). */
2390 return m_mgr->get_or_create_initial_value (reg);
2391}
2392
808f4dfe 2393/* Get a value for REG, looking it up in the store, or otherwise falling
9faf8348
DM
2394 back to "initial" or "unknown" values.
2395 Use CTXT to report any warnings associated with reading from REG. */
757bf1df 2396
808f4dfe 2397const svalue *
9faf8348
DM
2398region_model::get_store_value (const region *reg,
2399 region_model_context *ctxt) const
757bf1df 2400{
dfe2ef7f
DM
2401 /* Getting the value of an empty region gives an unknown_svalue. */
2402 if (reg->empty_p ())
2403 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2404
1eb90f46 2405 bool check_poisoned = true;
9589a46d 2406 if (check_region_for_read (reg, ctxt))
1eb90f46 2407 check_poisoned = false;
9faf8348 2408
2867118d
DM
2409 /* Special-case: handle var_decls in the constant pool. */
2410 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
2411 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
2412 return sval;
2413
808f4dfe
DM
2414 const svalue *sval
2415 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
2416 if (sval)
757bf1df 2417 {
808f4dfe
DM
2418 if (reg->get_type ())
2419 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
2420 return sval;
757bf1df 2421 }
757bf1df 2422
808f4dfe
DM
2423 /* Special-case: read at a constant index within a STRING_CST. */
2424 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
2425 if (tree byte_offset_cst
2426 = offset_reg->get_byte_offset ()->maybe_get_constant ())
2427 if (const string_region *str_reg
2428 = reg->get_parent_region ()->dyn_cast_string_region ())
757bf1df 2429 {
808f4dfe
DM
2430 tree string_cst = str_reg->get_string_cst ();
2431 if (const svalue *char_sval
2432 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2433 byte_offset_cst))
2434 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
757bf1df 2435 }
757bf1df 2436
808f4dfe
DM
2437 /* Special-case: read the initial char of a STRING_CST. */
2438 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
2439 if (const string_region *str_reg
2440 = cast_reg->get_original_region ()->dyn_cast_string_region ())
2441 {
2442 tree string_cst = str_reg->get_string_cst ();
2443 tree byte_offset_cst = build_int_cst (integer_type_node, 0);
2444 if (const svalue *char_sval
2445 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2446 byte_offset_cst))
2447 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2448 }
757bf1df 2449
808f4dfe
DM
2450 /* Otherwise we implicitly have the initial value of the region
2451 (if the cluster had been touched, binding_cluster::get_any_binding,
2452 would have returned UNKNOWN, and we would already have returned
2453 that above). */
757bf1df 2454
623bc027
DM
2455 /* Handle globals. */
2456 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
2457 == RK_GLOBALS)
2458 return get_initial_value_for_global (reg);
757bf1df 2459
1eb90f46 2460 return m_mgr->get_or_create_initial_value (reg, check_poisoned);
757bf1df
DM
2461}
2462
808f4dfe
DM
2463/* Return false if REG does not exist, true if it may do.
2464 This is for detecting regions within the stack that don't exist anymore
2465 after frames are popped. */
757bf1df 2466
808f4dfe
DM
2467bool
2468region_model::region_exists_p (const region *reg) const
757bf1df 2469{
808f4dfe
DM
2470 /* If within a stack frame, check that the stack frame is live. */
2471 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
757bf1df 2472 {
808f4dfe
DM
2473 /* Check that the current frame is the enclosing frame, or is called
2474 by it. */
2475 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
2476 iter_frame = iter_frame->get_calling_frame ())
2477 if (iter_frame == enclosing_frame)
2478 return true;
2479 return false;
757bf1df 2480 }
808f4dfe
DM
2481
2482 return true;
757bf1df
DM
2483}
2484
808f4dfe
DM
2485/* Get a region for referencing PTR_SVAL, creating a region if need be, and
2486 potentially generating warnings via CTXT.
35e3f082 2487 PTR_SVAL must be of pointer type.
808f4dfe 2488 PTR_TREE if non-NULL can be used when emitting diagnostics. */
757bf1df 2489
808f4dfe
DM
2490const region *
2491region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
021077b9
DM
2492 region_model_context *ctxt,
2493 bool add_nonnull_constraint) const
757bf1df 2494{
808f4dfe 2495 gcc_assert (ptr_sval);
35e3f082 2496 gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
757bf1df 2497
49bfbf18
DM
2498 /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
2499 as a constraint. This suppresses false positives from
2500 -Wanalyzer-null-dereference for the case where we later have an
2501 if (PTR_SVAL) that would occur if we considered the false branch
2502 and transitioned the malloc state machine from start->null. */
021077b9
DM
2503 if (add_nonnull_constraint)
2504 {
2505 tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
2506 const svalue *null_ptr
2507 = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
2508 m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
2509 }
49bfbf18 2510
808f4dfe 2511 switch (ptr_sval->get_kind ())
757bf1df 2512 {
808f4dfe 2513 default:
23ebfda0 2514 break;
808f4dfe 2515
757bf1df
DM
2516 case SK_REGION:
2517 {
808f4dfe
DM
2518 const region_svalue *region_sval
2519 = as_a <const region_svalue *> (ptr_sval);
757bf1df
DM
2520 return region_sval->get_pointee ();
2521 }
2522
808f4dfe
DM
2523 case SK_BINOP:
2524 {
2525 const binop_svalue *binop_sval
2526 = as_a <const binop_svalue *> (ptr_sval);
2527 switch (binop_sval->get_op ())
2528 {
2529 case POINTER_PLUS_EXPR:
2530 {
2531 /* If we have a symbolic value expressing pointer arithmentic,
2532 try to convert it to a suitable region. */
2533 const region *parent_region
2534 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
2535 const svalue *offset = binop_sval->get_arg1 ();
2536 tree type= TREE_TYPE (ptr_sval->get_type ());
2537 return m_mgr->get_offset_region (parent_region, type, offset);
2538 }
2539 default:
23ebfda0 2540 break;
808f4dfe
DM
2541 }
2542 }
23ebfda0 2543 break;
757bf1df
DM
2544
2545 case SK_POISONED:
2546 {
2547 if (ctxt)
808f4dfe
DM
2548 {
2549 tree ptr = get_representative_tree (ptr_sval);
2550 /* If we can't get a representative tree for PTR_SVAL
2551 (e.g. if it hasn't been bound into the store), then
2552 fall back on PTR_TREE, if non-NULL. */
2553 if (!ptr)
2554 ptr = ptr_tree;
2555 if (ptr)
2556 {
2557 const poisoned_svalue *poisoned_sval
2558 = as_a <const poisoned_svalue *> (ptr_sval);
2559 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
0e466e97
DM
2560 ctxt->warn (::make_unique<poisoned_value_diagnostic>
2561 (ptr, pkind, nullptr, nullptr));
808f4dfe
DM
2562 }
2563 }
757bf1df 2564 }
23ebfda0 2565 break;
757bf1df
DM
2566 }
2567
23ebfda0 2568 return m_mgr->get_symbolic_region (ptr_sval);
757bf1df
DM
2569}
2570
d3b1ef7a
DM
2571/* Attempt to get BITS within any value of REG, as TYPE.
2572 In particular, extract values from compound_svalues for the case
2573 where there's a concrete binding at BITS.
9faf8348
DM
2574 Return an unknown svalue if we can't handle the given case.
2575 Use CTXT to report any warnings associated with reading from REG. */
d3b1ef7a
DM
2576
2577const svalue *
2578region_model::get_rvalue_for_bits (tree type,
2579 const region *reg,
9faf8348
DM
2580 const bit_range &bits,
2581 region_model_context *ctxt) const
d3b1ef7a 2582{
9faf8348 2583 const svalue *sval = get_store_value (reg, ctxt);
e61ffa20 2584 return m_mgr->get_or_create_bits_within (type, bits, sval);
d3b1ef7a
DM
2585}
2586
3175d40f
DM
2587/* A subclass of pending_diagnostic for complaining about writes to
2588 constant regions of memory. */
2589
2590class write_to_const_diagnostic
2591: public pending_diagnostic_subclass<write_to_const_diagnostic>
2592{
2593public:
2594 write_to_const_diagnostic (const region *reg, tree decl)
2595 : m_reg (reg), m_decl (decl)
2596 {}
2597
ff171cb1 2598 const char *get_kind () const final override
3175d40f
DM
2599 {
2600 return "write_to_const_diagnostic";
2601 }
2602
2603 bool operator== (const write_to_const_diagnostic &other) const
2604 {
2605 return (m_reg == other.m_reg
2606 && m_decl == other.m_decl);
2607 }
2608
ff171cb1 2609 int get_controlling_option () const final override
7fd6e36e
DM
2610 {
2611 return OPT_Wanalyzer_write_to_const;
2612 }
2613
0e466e97 2614 bool emit (rich_location *rich_loc, logger *) final override
3175d40f 2615 {
111fd515
DM
2616 auto_diagnostic_group d;
2617 bool warned;
2618 switch (m_reg->get_kind ())
2619 {
2620 default:
7fd6e36e 2621 warned = warning_at (rich_loc, get_controlling_option (),
111fd515
DM
2622 "write to %<const%> object %qE", m_decl);
2623 break;
2624 case RK_FUNCTION:
7fd6e36e 2625 warned = warning_at (rich_loc, get_controlling_option (),
111fd515
DM
2626 "write to function %qE", m_decl);
2627 break;
2628 case RK_LABEL:
7fd6e36e 2629 warned = warning_at (rich_loc, get_controlling_option (),
111fd515
DM
2630 "write to label %qE", m_decl);
2631 break;
2632 }
3175d40f
DM
2633 if (warned)
2634 inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
2635 return warned;
2636 }
2637
ff171cb1 2638 label_text describe_final_event (const evdesc::final_event &ev) final override
3175d40f 2639 {
111fd515
DM
2640 switch (m_reg->get_kind ())
2641 {
2642 default:
2643 return ev.formatted_print ("write to %<const%> object %qE here", m_decl);
2644 case RK_FUNCTION:
2645 return ev.formatted_print ("write to function %qE here", m_decl);
2646 case RK_LABEL:
2647 return ev.formatted_print ("write to label %qE here", m_decl);
2648 }
3175d40f
DM
2649 }
2650
2651private:
2652 const region *m_reg;
2653 tree m_decl;
2654};
2655
2656/* A subclass of pending_diagnostic for complaining about writes to
2657 string literals. */
2658
2659class write_to_string_literal_diagnostic
2660: public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
2661{
2662public:
2663 write_to_string_literal_diagnostic (const region *reg)
2664 : m_reg (reg)
2665 {}
2666
ff171cb1 2667 const char *get_kind () const final override
3175d40f
DM
2668 {
2669 return "write_to_string_literal_diagnostic";
2670 }
2671
2672 bool operator== (const write_to_string_literal_diagnostic &other) const
2673 {
2674 return m_reg == other.m_reg;
2675 }
2676
ff171cb1 2677 int get_controlling_option () const final override
7fd6e36e
DM
2678 {
2679 return OPT_Wanalyzer_write_to_string_literal;
2680 }
2681
0e466e97 2682 bool emit (rich_location *rich_loc, logger *) final override
3175d40f 2683 {
7fd6e36e 2684 return warning_at (rich_loc, get_controlling_option (),
3175d40f
DM
2685 "write to string literal");
2686 /* Ideally we would show the location of the STRING_CST as well,
2687 but it is not available at this point. */
2688 }
2689
ff171cb1 2690 label_text describe_final_event (const evdesc::final_event &ev) final override
3175d40f
DM
2691 {
2692 return ev.formatted_print ("write to string literal here");
2693 }
2694
2695private:
2696 const region *m_reg;
2697};
2698
2699/* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
2700
2701void
2702region_model::check_for_writable_region (const region* dest_reg,
2703 region_model_context *ctxt) const
2704{
2705 /* Fail gracefully if CTXT is NULL. */
2706 if (!ctxt)
2707 return;
2708
2709 const region *base_reg = dest_reg->get_base_region ();
2710 switch (base_reg->get_kind ())
2711 {
2712 default:
2713 break;
111fd515
DM
2714 case RK_FUNCTION:
2715 {
2716 const function_region *func_reg = as_a <const function_region *> (base_reg);
2717 tree fndecl = func_reg->get_fndecl ();
6341f14e
DM
2718 ctxt->warn (make_unique<write_to_const_diagnostic>
2719 (func_reg, fndecl));
111fd515
DM
2720 }
2721 break;
2722 case RK_LABEL:
2723 {
2724 const label_region *label_reg = as_a <const label_region *> (base_reg);
2725 tree label = label_reg->get_label ();
6341f14e
DM
2726 ctxt->warn (make_unique<write_to_const_diagnostic>
2727 (label_reg, label));
111fd515
DM
2728 }
2729 break;
3175d40f
DM
2730 case RK_DECL:
2731 {
2732 const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
2733 tree decl = decl_reg->get_decl ();
2734 /* Warn about writes to const globals.
2735 Don't warn for writes to const locals, and params in particular,
2736 since we would warn in push_frame when setting them up (e.g the
2737 "this" param is "T* const"). */
2738 if (TREE_READONLY (decl)
2739 && is_global_var (decl))
6341f14e 2740 ctxt->warn (make_unique<write_to_const_diagnostic> (dest_reg, decl));
3175d40f
DM
2741 }
2742 break;
2743 case RK_STRING:
6341f14e 2744 ctxt->warn (make_unique<write_to_string_literal_diagnostic> (dest_reg));
3175d40f
DM
2745 break;
2746 }
2747}
2748
9a2c9579
DM
2749/* Get the capacity of REG in bytes. */
2750
2751const svalue *
2752region_model::get_capacity (const region *reg) const
2753{
2754 switch (reg->get_kind ())
2755 {
2756 default:
2757 break;
2758 case RK_DECL:
2759 {
2760 const decl_region *decl_reg = as_a <const decl_region *> (reg);
2761 tree decl = decl_reg->get_decl ();
2762 if (TREE_CODE (decl) == SSA_NAME)
2763 {
2764 tree type = TREE_TYPE (decl);
2765 tree size = TYPE_SIZE (type);
2766 return get_rvalue (size, NULL);
2767 }
2768 else
2769 {
2770 tree size = decl_init_size (decl, false);
2771 if (size)
2772 return get_rvalue (size, NULL);
2773 }
2774 }
2775 break;
e61ffa20
DM
2776 case RK_SIZED:
2777 /* Look through sized regions to get at the capacity
2778 of the underlying regions. */
2779 return get_capacity (reg->get_parent_region ());
0e466e97
DM
2780 case RK_STRING:
2781 {
2782 /* "Capacity" here means "size". */
2783 const string_region *string_reg = as_a <const string_region *> (reg);
2784 tree string_cst = string_reg->get_string_cst ();
2785 return m_mgr->get_or_create_int_cst (size_type_node,
2786 TREE_STRING_LENGTH (string_cst));
2787 }
2788 break;
9a2c9579
DM
2789 }
2790
2791 if (const svalue *recorded = get_dynamic_extents (reg))
2792 return recorded;
2793
2794 return m_mgr->get_or_create_unknown_svalue (sizetype);
2795}
2796
9faf8348 2797/* If CTXT is non-NULL, use it to warn about any problems accessing REG,
9589a46d 2798 using DIR to determine if this access is a read or write.
1eb90f46 2799 Return TRUE if an OOB access was detected.
0e466e97
DM
2800 If SVAL_HINT is non-NULL, use it as a hint in diagnostics
2801 about the value that would be written to REG. */
9faf8348 2802
9589a46d 2803bool
9faf8348
DM
2804region_model::check_region_access (const region *reg,
2805 enum access_direction dir,
0e466e97 2806 const svalue *sval_hint,
9faf8348
DM
2807 region_model_context *ctxt) const
2808{
2809 /* Fail gracefully if CTXT is NULL. */
2810 if (!ctxt)
9589a46d 2811 return false;
9faf8348 2812
1eb90f46 2813 bool oob_access_detected = false;
b9365b93 2814 check_region_for_taint (reg, dir, ctxt);
0e466e97 2815 if (!check_region_bounds (reg, dir, sval_hint, ctxt))
1eb90f46 2816 oob_access_detected = true;
b9365b93 2817
9faf8348
DM
2818 switch (dir)
2819 {
2820 default:
2821 gcc_unreachable ();
2822 case DIR_READ:
2823 /* Currently a no-op. */
2824 break;
2825 case DIR_WRITE:
2826 check_for_writable_region (reg, ctxt);
2827 break;
2828 }
1eb90f46 2829 return oob_access_detected;
9faf8348
DM
2830}
2831
2832/* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
2833
2834void
2835region_model::check_region_for_write (const region *dest_reg,
0e466e97 2836 const svalue *sval_hint,
9faf8348
DM
2837 region_model_context *ctxt) const
2838{
0e466e97 2839 check_region_access (dest_reg, DIR_WRITE, sval_hint, ctxt);
9faf8348
DM
2840}
2841
9589a46d 2842/* If CTXT is non-NULL, use it to warn about any problems reading from REG.
1eb90f46 2843 Returns TRUE if an OOB read was detected. */
9faf8348 2844
9589a46d 2845bool
9faf8348
DM
2846region_model::check_region_for_read (const region *src_reg,
2847 region_model_context *ctxt) const
2848{
0e466e97 2849 return check_region_access (src_reg, DIR_READ, NULL, ctxt);
9faf8348
DM
2850}
2851
e6c3bb37
TL
2852/* Concrete subclass for casts of pointers that lead to trailing bytes. */
2853
2854class dubious_allocation_size
2855: public pending_diagnostic_subclass<dubious_allocation_size>
2856{
2857public:
021077b9
DM
2858 dubious_allocation_size (const region *lhs, const region *rhs,
2859 const gimple *stmt)
2860 : m_lhs (lhs), m_rhs (rhs), m_expr (NULL_TREE), m_stmt (stmt),
f5758fe5 2861 m_has_allocation_event (false)
e6c3bb37
TL
2862 {}
2863
2864 dubious_allocation_size (const region *lhs, const region *rhs,
021077b9
DM
2865 tree expr, const gimple *stmt)
2866 : m_lhs (lhs), m_rhs (rhs), m_expr (expr), m_stmt (stmt),
f5758fe5 2867 m_has_allocation_event (false)
e6c3bb37
TL
2868 {}
2869
2870 const char *get_kind () const final override
2871 {
2872 return "dubious_allocation_size";
2873 }
2874
2875 bool operator== (const dubious_allocation_size &other) const
2876 {
021077b9
DM
2877 return (m_stmt == other.m_stmt
2878 && pending_diagnostic::same_tree_p (m_expr, other.m_expr));
e6c3bb37
TL
2879 }
2880
2881 int get_controlling_option () const final override
2882 {
2883 return OPT_Wanalyzer_allocation_size;
2884 }
2885
0e466e97 2886 bool emit (rich_location *rich_loc, logger *) final override
e6c3bb37
TL
2887 {
2888 diagnostic_metadata m;
2889 m.add_cwe (131);
2890
2891 return warning_meta (rich_loc, m, get_controlling_option (),
c83e9731
TL
2892 "allocated buffer size is not a multiple"
2893 " of the pointee's size");
e6c3bb37
TL
2894 }
2895
e6c3bb37
TL
2896 label_text describe_final_event (const evdesc::final_event &ev) final
2897 override
2898 {
2899 tree pointee_type = TREE_TYPE (m_lhs->get_type ());
f5758fe5 2900 if (m_has_allocation_event)
e6c3bb37
TL
2901 return ev.formatted_print ("assigned to %qT here;"
2902 " %<sizeof (%T)%> is %qE",
2903 m_lhs->get_type (), pointee_type,
2904 size_in_bytes (pointee_type));
f5758fe5
DM
2905 /* Fallback: Typically, we should always see an allocation_event
2906 before. */
e6c3bb37
TL
2907 if (m_expr)
2908 {
2909 if (TREE_CODE (m_expr) == INTEGER_CST)
2910 return ev.formatted_print ("allocated %E bytes and assigned to"
2911 " %qT here; %<sizeof (%T)%> is %qE",
2912 m_expr, m_lhs->get_type (), pointee_type,
2913 size_in_bytes (pointee_type));
2914 else
2915 return ev.formatted_print ("allocated %qE bytes and assigned to"
2916 " %qT here; %<sizeof (%T)%> is %qE",
2917 m_expr, m_lhs->get_type (), pointee_type,
2918 size_in_bytes (pointee_type));
2919 }
2920
2921 return ev.formatted_print ("allocated and assigned to %qT here;"
2922 " %<sizeof (%T)%> is %qE",
2923 m_lhs->get_type (), pointee_type,
2924 size_in_bytes (pointee_type));
2925 }
2926
f5758fe5
DM
2927 void
2928 add_region_creation_events (const region *,
2929 tree capacity,
e24fe128 2930 const event_loc_info &loc_info,
f5758fe5
DM
2931 checker_path &emission_path) final override
2932 {
2933 emission_path.add_event
e24fe128 2934 (make_unique<region_creation_event_allocation_size> (capacity, loc_info));
f5758fe5
DM
2935
2936 m_has_allocation_event = true;
2937 }
2938
e6c3bb37
TL
2939 void mark_interesting_stuff (interesting_t *interest) final override
2940 {
2941 interest->add_region_creation (m_rhs);
2942 }
2943
2944private:
2945 const region *m_lhs;
2946 const region *m_rhs;
2947 const tree m_expr;
021077b9 2948 const gimple *m_stmt;
f5758fe5 2949 bool m_has_allocation_event;
e6c3bb37
TL
2950};
2951
2952/* Return true on dubious allocation sizes for constant sizes. */
2953
2954static bool
2955capacity_compatible_with_type (tree cst, tree pointee_size_tree,
2956 bool is_struct)
2957{
2958 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
2959 gcc_assert (TREE_CODE (pointee_size_tree) == INTEGER_CST);
2960
2961 unsigned HOST_WIDE_INT pointee_size = TREE_INT_CST_LOW (pointee_size_tree);
2962 unsigned HOST_WIDE_INT alloc_size = TREE_INT_CST_LOW (cst);
2963
2964 if (is_struct)
b4cc945c 2965 return alloc_size == 0 || alloc_size >= pointee_size;
e6c3bb37
TL
2966 return alloc_size % pointee_size == 0;
2967}
2968
2969static bool
2970capacity_compatible_with_type (tree cst, tree pointee_size_tree)
2971{
2972 return capacity_compatible_with_type (cst, pointee_size_tree, false);
2973}
2974
2975/* Checks whether SVAL could be a multiple of SIZE_CST.
2976
2977 It works by visiting all svalues inside SVAL until it reaches
2978 atomic nodes. From those, it goes back up again and adds each
1d57a223 2979 node that is not a multiple of SIZE_CST to the RESULT_SET. */
e6c3bb37
TL
2980
2981class size_visitor : public visitor
2982{
2983public:
c83e9731
TL
2984 size_visitor (tree size_cst, const svalue *root_sval, constraint_manager *cm)
2985 : m_size_cst (size_cst), m_root_sval (root_sval), m_cm (cm)
e6c3bb37 2986 {
c83e9731 2987 m_root_sval->accept (this);
e6c3bb37
TL
2988 }
2989
1d57a223 2990 bool is_dubious_capacity ()
e6c3bb37 2991 {
c83e9731 2992 return result_set.contains (m_root_sval);
e6c3bb37
TL
2993 }
2994
2995 void visit_constant_svalue (const constant_svalue *sval) final override
2996 {
c83e9731 2997 check_constant (sval->get_constant (), sval);
e6c3bb37
TL
2998 }
2999
bdd385b2 3000 void visit_unaryop_svalue (const unaryop_svalue *sval) final override
e6c3bb37 3001 {
1d57a223
TL
3002 if (CONVERT_EXPR_CODE_P (sval->get_op ())
3003 && result_set.contains (sval->get_arg ()))
e6c3bb37
TL
3004 result_set.add (sval);
3005 }
3006
3007 void visit_binop_svalue (const binop_svalue *sval) final override
3008 {
3009 const svalue *arg0 = sval->get_arg0 ();
3010 const svalue *arg1 = sval->get_arg1 ();
3011
1d57a223 3012 switch (sval->get_op ())
e6c3bb37 3013 {
1d57a223
TL
3014 case MULT_EXPR:
3015 if (result_set.contains (arg0) && result_set.contains (arg1))
3016 result_set.add (sval);
3017 break;
3018 case PLUS_EXPR:
3019 case MINUS_EXPR:
3020 if (result_set.contains (arg0) || result_set.contains (arg1))
3021 result_set.add (sval);
3022 break;
3023 default:
3024 break;
e6c3bb37
TL
3025 }
3026 }
3027
e6c3bb37
TL
3028 void visit_unmergeable_svalue (const unmergeable_svalue *sval) final override
3029 {
e6c3bb37
TL
3030 if (result_set.contains (sval->get_arg ()))
3031 result_set.add (sval);
3032 }
3033
3034 void visit_widening_svalue (const widening_svalue *sval) final override
3035 {
3036 const svalue *base = sval->get_base_svalue ();
3037 const svalue *iter = sval->get_iter_svalue ();
3038
1d57a223 3039 if (result_set.contains (base) || result_set.contains (iter))
e6c3bb37
TL
3040 result_set.add (sval);
3041 }
3042
1d57a223 3043 void visit_initial_svalue (const initial_svalue *sval) final override
e6c3bb37 3044 {
1d57a223 3045 equiv_class_id id = equiv_class_id::null ();
e6c3bb37
TL
3046 if (m_cm->get_equiv_class_by_svalue (sval, &id))
3047 {
c83e9731
TL
3048 if (tree cst = id.get_obj (*m_cm).get_any_constant ())
3049 check_constant (cst, sval);
1d57a223
TL
3050 }
3051 else if (!m_cm->sval_constrained_p (sval))
3052 {
3053 result_set.add (sval);
e6c3bb37
TL
3054 }
3055 }
3056
1d57a223 3057 void visit_conjured_svalue (const conjured_svalue *sval) final override
e6c3bb37 3058 {
1d57a223
TL
3059 equiv_class_id id = equiv_class_id::null ();
3060 if (m_cm->get_equiv_class_by_svalue (sval, &id))
3061 if (tree cst = id.get_obj (*m_cm).get_any_constant ())
3062 check_constant (cst, sval);
e6c3bb37
TL
3063 }
3064
3065private:
c83e9731
TL
3066 void check_constant (tree cst, const svalue *sval)
3067 {
3068 switch (TREE_CODE (cst))
3069 {
3070 default:
3071 /* Assume all unhandled operands are compatible. */
c83e9731
TL
3072 break;
3073 case INTEGER_CST:
1d57a223 3074 if (!capacity_compatible_with_type (cst, m_size_cst))
c83e9731
TL
3075 result_set.add (sval);
3076 break;
3077 }
3078 }
3079
e6c3bb37 3080 tree m_size_cst;
c83e9731 3081 const svalue *m_root_sval;
e6c3bb37
TL
3082 constraint_manager *m_cm;
3083 svalue_set result_set; /* Used as a mapping of svalue*->bool. */
3084};
3085
3086/* Return true if a struct or union either uses the inheritance pattern,
3087 where the first field is a base struct, or the flexible array member
3088 pattern, where the last field is an array without a specified size. */
3089
3090static bool
3091struct_or_union_with_inheritance_p (tree struc)
3092{
3093 tree iter = TYPE_FIELDS (struc);
3094 if (iter == NULL_TREE)
3095 return false;
3096 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (iter)))
3097 return true;
3098
3099 tree last_field;
3100 while (iter != NULL_TREE)
3101 {
3102 last_field = iter;
3103 iter = DECL_CHAIN (iter);
3104 }
3105
3106 if (last_field != NULL_TREE
3107 && TREE_CODE (TREE_TYPE (last_field)) == ARRAY_TYPE)
3108 return true;
3109
3110 return false;
3111}
3112
3113/* Return true if the lhs and rhs of an assignment have different types. */
3114
3115static bool
3116is_any_cast_p (const gimple *stmt)
3117{
c83e9731 3118 if (const gassign *assign = dyn_cast <const gassign *> (stmt))
e6c3bb37
TL
3119 return gimple_assign_cast_p (assign)
3120 || !pending_diagnostic::same_tree_p (
3121 TREE_TYPE (gimple_assign_lhs (assign)),
3122 TREE_TYPE (gimple_assign_rhs1 (assign)));
c83e9731 3123 else if (const gcall *call = dyn_cast <const gcall *> (stmt))
e6c3bb37
TL
3124 {
3125 tree lhs = gimple_call_lhs (call);
3126 return lhs != NULL_TREE && !pending_diagnostic::same_tree_p (
3127 TREE_TYPE (gimple_call_lhs (call)),
3128 gimple_call_return_type (call));
3129 }
3130
3131 return false;
3132}
3133
3134/* On pointer assignments, check whether the buffer size of
3135 RHS_SVAL is compatible with the type of the LHS_REG.
3136 Use a non-null CTXT to report allocation size warnings. */
3137
3138void
3139region_model::check_region_size (const region *lhs_reg, const svalue *rhs_sval,
3140 region_model_context *ctxt) const
3141{
3142 if (!ctxt || ctxt->get_stmt () == NULL)
3143 return;
3144 /* Only report warnings on assignments that actually change the type. */
3145 if (!is_any_cast_p (ctxt->get_stmt ()))
3146 return;
3147
e6c3bb37
TL
3148 tree pointer_type = lhs_reg->get_type ();
3149 if (pointer_type == NULL_TREE || !POINTER_TYPE_P (pointer_type))
3150 return;
3151
3152 tree pointee_type = TREE_TYPE (pointer_type);
3153 /* Make sure that the type on the left-hand size actually has a size. */
3154 if (pointee_type == NULL_TREE || VOID_TYPE_P (pointee_type)
3155 || TYPE_SIZE_UNIT (pointee_type) == NULL_TREE)
3156 return;
3157
3158 /* Bail out early on pointers to structs where we can
3159 not deduce whether the buffer size is compatible. */
3160 bool is_struct = RECORD_OR_UNION_TYPE_P (pointee_type);
3161 if (is_struct && struct_or_union_with_inheritance_p (pointee_type))
3162 return;
3163
3164 tree pointee_size_tree = size_in_bytes (pointee_type);
3165 /* We give up if the type size is not known at compile-time or the
3166 type size is always compatible regardless of the buffer size. */
3167 if (TREE_CODE (pointee_size_tree) != INTEGER_CST
3168 || integer_zerop (pointee_size_tree)
3169 || integer_onep (pointee_size_tree))
3170 return;
3171
021077b9 3172 const region *rhs_reg = deref_rvalue (rhs_sval, NULL_TREE, ctxt, false);
e6c3bb37
TL
3173 const svalue *capacity = get_capacity (rhs_reg);
3174 switch (capacity->get_kind ())
3175 {
3176 case svalue_kind::SK_CONSTANT:
3177 {
3178 const constant_svalue *cst_cap_sval
c83e9731 3179 = as_a <const constant_svalue *> (capacity);
e6c3bb37 3180 tree cst_cap = cst_cap_sval->get_constant ();
c83e9731
TL
3181 if (TREE_CODE (cst_cap) == INTEGER_CST
3182 && !capacity_compatible_with_type (cst_cap, pointee_size_tree,
3183 is_struct))
6341f14e 3184 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg, rhs_reg,
021077b9
DM
3185 cst_cap,
3186 ctxt->get_stmt ()));
e6c3bb37
TL
3187 }
3188 break;
3189 default:
3190 {
3191 if (!is_struct)
3192 {
3193 size_visitor v (pointee_size_tree, capacity, m_constraints);
1d57a223 3194 if (v.is_dubious_capacity ())
e6c3bb37
TL
3195 {
3196 tree expr = get_representative_tree (capacity);
6341f14e
DM
3197 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg,
3198 rhs_reg,
021077b9
DM
3199 expr,
3200 ctxt->get_stmt ()));
e6c3bb37
TL
3201 }
3202 }
3203 break;
3204 }
3205 }
3206}
3207
808f4dfe 3208/* Set the value of the region given by LHS_REG to the value given
9faf8348
DM
3209 by RHS_SVAL.
3210 Use CTXT to report any warnings associated with writing to LHS_REG. */
757bf1df 3211
808f4dfe
DM
3212void
3213region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
3175d40f 3214 region_model_context *ctxt)
757bf1df 3215{
808f4dfe
DM
3216 gcc_assert (lhs_reg);
3217 gcc_assert (rhs_sval);
3218
dfe2ef7f
DM
3219 /* Setting the value of an empty region is a no-op. */
3220 if (lhs_reg->empty_p ())
3221 return;
3222
e6c3bb37
TL
3223 check_region_size (lhs_reg, rhs_sval, ctxt);
3224
0e466e97 3225 check_region_for_write (lhs_reg, rhs_sval, ctxt);
3175d40f 3226
808f4dfe 3227 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 3228 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
3229}
3230
808f4dfe 3231/* Set the value of the region given by LHS to the value given by RHS. */
757bf1df
DM
3232
3233void
808f4dfe 3234region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
757bf1df 3235{
808f4dfe
DM
3236 const region *lhs_reg = get_lvalue (lhs, ctxt);
3237 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
3238 gcc_assert (lhs_reg);
3239 gcc_assert (rhs_sval);
3240 set_value (lhs_reg, rhs_sval, ctxt);
757bf1df
DM
3241}
3242
325f9e88
DM
3243/* Issue a note specifying that a particular function parameter is expected
3244 to be a valid null-terminated string. */
3245
3246static void
3247inform_about_expected_null_terminated_string_arg (const call_arg_details &ad)
3248{
3249 // TODO: ideally we'd underline the param here
3250 inform (DECL_SOURCE_LOCATION (ad.m_called_fndecl),
3251 "argument %d of %qD must be a pointer to a null-terminated string",
3252 ad.m_arg_idx + 1, ad.m_called_fndecl);
3253}
3254
fe97f09a 3255/* A binding of a specific svalue at a concrete byte range. */
325f9e88 3256
fe97f09a 3257struct fragment
325f9e88 3258{
fe97f09a
DM
3259 fragment ()
3260 : m_byte_range (0, 0), m_sval (nullptr)
325f9e88 3261 {
325f9e88
DM
3262 }
3263
fe97f09a
DM
3264 fragment (const byte_range &bytes, const svalue *sval)
3265 : m_byte_range (bytes), m_sval (sval)
325f9e88 3266 {
325f9e88
DM
3267 }
3268
fe97f09a 3269 static int cmp_ptrs (const void *p1, const void *p2)
325f9e88 3270 {
fe97f09a
DM
3271 const fragment *f1 = (const fragment *)p1;
3272 const fragment *f2 = (const fragment *)p2;
3273 return byte_range::cmp (f1->m_byte_range, f2->m_byte_range);
325f9e88
DM
3274 }
3275
fe97f09a
DM
3276 /* Determine if there is a zero terminator somewhere in the
3277 bytes of this fragment, starting at START_READ_OFFSET (which
3278 is absolute to the start of the cluster as a whole), and stopping
3279 at the end of this fragment.
3280
3281 Return a tristate:
3282 - true if there definitely is a zero byte, writing to *OUT_BYTES_READ
3283 the number of bytes from that would be read, including the zero byte.
3284 - false if there definitely isn't a zero byte
3285 - unknown if we don't know. */
3286 tristate has_null_terminator (byte_offset_t start_read_offset,
3287 byte_offset_t *out_bytes_read) const
325f9e88 3288 {
fe97f09a
DM
3289 byte_offset_t rel_start_read_offset
3290 = start_read_offset - m_byte_range.get_start_byte_offset ();
3291 gcc_assert (rel_start_read_offset >= 0);
3292 byte_offset_t available_bytes
3293 = (m_byte_range.get_next_byte_offset () - start_read_offset);
3294 gcc_assert (available_bytes >= 0);
3295
3296 if (rel_start_read_offset > INT_MAX)
3297 return tristate::TS_UNKNOWN;
3298 HOST_WIDE_INT rel_start_read_offset_hwi = rel_start_read_offset.slow ();
3299
3300 if (available_bytes > INT_MAX)
3301 return tristate::TS_UNKNOWN;
3302 HOST_WIDE_INT available_bytes_hwi = available_bytes.slow ();
3303
3304 switch (m_sval->get_kind ())
3305 {
3306 case SK_CONSTANT:
3307 {
3308 tree cst
3309 = as_a <const constant_svalue *> (m_sval)->get_constant ();
3310 switch (TREE_CODE (cst))
3311 {
3312 case STRING_CST:
3313 {
3314 /* Look for the first 0 byte within STRING_CST
3315 from START_READ_OFFSET onwards. */
3316 const HOST_WIDE_INT num_bytes_to_search
3317 = std::min<HOST_WIDE_INT> ((TREE_STRING_LENGTH (cst)
3318 - rel_start_read_offset_hwi),
3319 available_bytes_hwi);
3320 const char *start = (TREE_STRING_POINTER (cst)
3321 + rel_start_read_offset_hwi);
3322 if (num_bytes_to_search >= 0)
3323 if (const void *p = memchr (start, 0,
3324 num_bytes_to_search))
3325 {
3326 *out_bytes_read = (const char *)p - start + 1;
3327 return tristate (true);
3328 }
3329
3330 *out_bytes_read = available_bytes;
3331 return tristate (false);
3332 }
3333 break;
3334 case INTEGER_CST:
3335 if (rel_start_read_offset_hwi == 0
3336 && integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (cst))))
3337 {
3338 /* Model accesses to the initial byte of a 1-byte
3339 INTEGER_CST. */
3340 if (zerop (cst))
3341 {
3342 *out_bytes_read = 1;
3343 return tristate (true);
3344 }
3345 else
3346 {
3347 *out_bytes_read = available_bytes;
3348 return tristate (false);
3349 }
3350 }
3351 /* Treat any other access to an INTEGER_CST as unknown. */
3352 return tristate::TS_UNKNOWN;
3353
3354 default:
3355 gcc_unreachable ();
3356 break;
3357 }
3358 }
3359 break;
3360 default:
3361 // TODO: it may be possible to handle other cases here.
3362 return tristate::TS_UNKNOWN;
3363 }
325f9e88
DM
3364 }
3365
fe97f09a
DM
3366 byte_range m_byte_range;
3367 const svalue *m_sval;
3368};
3369
3370/* A frozen copy of a single base region's binding_cluster within a store,
3371 optimized for traversal of the concrete parts in byte order.
3372 This only captures concrete bindings, and is an implementation detail
3373 of region_model::scan_for_null_terminator. */
3374
3375class iterable_cluster
3376{
3377public:
3378 iterable_cluster (const binding_cluster *cluster)
325f9e88 3379 {
fe97f09a
DM
3380 if (!cluster)
3381 return;
3382 for (auto iter : *cluster)
3383 {
3384 const binding_key *key = iter.first;
3385 const svalue *sval = iter.second;
3386
3387 if (const concrete_binding *concrete_key
3388 = key->dyn_cast_concrete_binding ())
3389 {
3390 byte_range fragment_bytes (0, 0);
3391 if (concrete_key->get_byte_range (&fragment_bytes))
3392 m_fragments.safe_push (fragment (fragment_bytes, sval));
3393 }
5ef89c5c
DM
3394 else
3395 m_symbolic_bindings.safe_push (key);
fe97f09a
DM
3396 }
3397 m_fragments.qsort (fragment::cmp_ptrs);
325f9e88
DM
3398 }
3399
fe97f09a
DM
3400 bool
3401 get_fragment_for_byte (byte_offset_t byte, fragment *out_frag) const
325f9e88 3402 {
fe97f09a
DM
3403 /* TODO: binary search rather than linear. */
3404 unsigned iter_idx;
3405 for (iter_idx = 0; iter_idx < m_fragments.length (); iter_idx++)
3406 {
3407 if (m_fragments[iter_idx].m_byte_range.contains_p (byte))
3408 {
3409 *out_frag = m_fragments[iter_idx];
3410 return true;
3411 }
3412 }
3413 return false;
325f9e88
DM
3414 }
3415
5ef89c5c
DM
3416 bool has_symbolic_bindings_p () const
3417 {
3418 return !m_symbolic_bindings.is_empty ();
3419 }
3420
325f9e88 3421private:
fe97f09a 3422 auto_vec<fragment> m_fragments;
5ef89c5c 3423 auto_vec<const binding_key *> m_symbolic_bindings;
325f9e88
DM
3424};
3425
fe97f09a
DM
3426/* Simulate reading the bytes at BYTES from BASE_REG.
3427 Complain to CTXT about any issues with the read e.g. out-of-bounds. */
3428
3429const svalue *
3430region_model::get_store_bytes (const region *base_reg,
3431 const byte_range &bytes,
3432 region_model_context *ctxt) const
3433{
0ae07a72
DM
3434 /* Shortcut reading all of a string_region. */
3435 if (bytes.get_start_byte_offset () == 0)
3436 if (const string_region *string_reg = base_reg->dyn_cast_string_region ())
3437 if (bytes.m_size_in_bytes
3438 == TREE_STRING_LENGTH (string_reg->get_string_cst ()))
3439 return m_mgr->get_or_create_initial_value (base_reg);
3440
fe97f09a
DM
3441 const svalue *index_sval
3442 = m_mgr->get_or_create_int_cst (size_type_node,
3443 bytes.get_start_byte_offset ());
3444 const region *offset_reg = m_mgr->get_offset_region (base_reg,
3445 NULL_TREE,
3446 index_sval);
3447 const svalue *byte_size_sval
3448 = m_mgr->get_or_create_int_cst (size_type_node, bytes.m_size_in_bytes);
3449 const region *read_reg = m_mgr->get_sized_region (offset_reg,
3450 NULL_TREE,
3451 byte_size_sval);
3452
3453 /* Simulate reading those bytes from the store. */
3454 const svalue *sval = get_store_value (read_reg, ctxt);
3455 return sval;
3456}
3457
3458static tree
3459get_tree_for_byte_offset (tree ptr_expr, byte_offset_t byte_offset)
3460{
3461 gcc_assert (ptr_expr);
3462 return fold_build2 (MEM_REF,
3463 char_type_node,
3464 ptr_expr, wide_int_to_tree (size_type_node, byte_offset));
3465}
3466
3467/* Simulate a series of reads of REG until we find a 0 byte
3468 (equivalent to calling strlen).
3469
3470 Complain to CTXT and return NULL if:
3471 - the buffer pointed to isn't null-terminated
3472 - the buffer pointed to has any uninitalized bytes before any 0-terminator
3473 - any of the reads aren't within the bounds of the underlying base region
3474
3475 Otherwise, return a svalue for the number of bytes read (strlen + 1),
3476 and, if OUT_SVAL is non-NULL, write to *OUT_SVAL with an svalue
3477 representing the content of REG up to and including the terminator.
3478
3479 Algorithm
3480 =========
3481
3482 Get offset for first byte to read.
3483 Find the binding (if any) that contains it.
3484 Find the size in bits of that binding.
3485 Round to the nearest byte (which way???)
3486 Or maybe give up if we have a partial binding there.
3487 Get the svalue from the binding.
3488 Determine the strlen (if any) of that svalue.
3489 Does it have a 0-terminator within it?
3490 If so, we have a partial read up to and including that terminator
3491 Read those bytes from the store; add to the result in the correct place.
3492 Finish
3493 If not, we have a full read of that svalue
3494 Read those bytes from the store; add to the result in the correct place.
3495 Update read/write offsets
3496 Continue
3497 If unknown:
3498 Result is unknown
3499 Finish
3500*/
3501
3502const svalue *
3503region_model::scan_for_null_terminator (const region *reg,
3504 tree expr,
3505 const svalue **out_sval,
3506 region_model_context *ctxt) const
3507{
3508 store_manager *store_mgr = m_mgr->get_store_manager ();
3509
3510 region_offset offset = reg->get_offset (m_mgr);
3511 if (offset.symbolic_p ())
3512 {
3513 if (out_sval)
0ae07a72 3514 *out_sval = get_store_value (reg, nullptr);
fe97f09a
DM
3515 return m_mgr->get_or_create_unknown_svalue (size_type_node);
3516 }
3517 byte_offset_t src_byte_offset;
3518 if (!offset.get_concrete_byte_offset (&src_byte_offset))
3519 {
3520 if (out_sval)
0ae07a72 3521 *out_sval = get_store_value (reg, nullptr);
fe97f09a
DM
3522 return m_mgr->get_or_create_unknown_svalue (size_type_node);
3523 }
3524 const byte_offset_t initial_src_byte_offset = src_byte_offset;
3525 byte_offset_t dst_byte_offset = 0;
3526
3527 const region *base_reg = reg->get_base_region ();
3528
3529 if (const string_region *str_reg = base_reg->dyn_cast_string_region ())
3530 {
3531 tree string_cst = str_reg->get_string_cst ();
3532 if (const void *p = memchr (TREE_STRING_POINTER (string_cst),
3533 0,
3534 TREE_STRING_LENGTH (string_cst)))
3535 {
3536 size_t num_bytes_read
3537 = (const char *)p - TREE_STRING_POINTER (string_cst) + 1;
3538 /* Simulate the read. */
3539 byte_range bytes_to_read (0, num_bytes_read);
3540 const svalue *sval = get_store_bytes (reg, bytes_to_read, ctxt);
3541 if (out_sval)
3542 *out_sval = sval;
3543 return m_mgr->get_or_create_int_cst (size_type_node,
3544 num_bytes_read);
3545 }
3546 }
3547
3548 const binding_cluster *cluster = m_store.get_cluster (base_reg);
3549 iterable_cluster c (cluster);
3550 binding_map result;
3551
3552 while (1)
3553 {
3554 fragment f;
3555 if (c.get_fragment_for_byte (src_byte_offset, &f))
3556 {
3557 byte_offset_t fragment_bytes_read;
3558 tristate is_terminated
3559 = f.has_null_terminator (src_byte_offset, &fragment_bytes_read);
3560 if (is_terminated.is_unknown ())
3561 {
3562 if (out_sval)
0ae07a72 3563 *out_sval = get_store_value (reg, nullptr);
fe97f09a
DM
3564 return m_mgr->get_or_create_unknown_svalue (size_type_node);
3565 }
3566
3567 /* Simulate reading those bytes from the store. */
3568 byte_range bytes_to_read (src_byte_offset, fragment_bytes_read);
3569 const svalue *sval = get_store_bytes (base_reg, bytes_to_read, ctxt);
3570 check_for_poison (sval, expr, nullptr, ctxt);
3571
3572 if (out_sval)
3573 {
3574 byte_range bytes_to_write (dst_byte_offset, fragment_bytes_read);
3575 const binding_key *key
3576 = store_mgr->get_concrete_binding (bytes_to_write);
3577 result.put (key, sval);
3578 }
3579
3580 src_byte_offset += fragment_bytes_read;
3581 dst_byte_offset += fragment_bytes_read;
3582
3583 if (is_terminated.is_true ())
3584 {
3585 if (out_sval)
3586 *out_sval = m_mgr->get_or_create_compound_svalue (NULL_TREE,
3587 result);
3588 return m_mgr->get_or_create_int_cst (size_type_node,
3589 dst_byte_offset);
3590 }
3591 }
3592 else
3593 break;
3594 }
3595
3596 /* No binding for this base_region, or no binding at src_byte_offset
3597 (or a symbolic binding). */
3598
5ef89c5c
DM
3599 if (c.has_symbolic_bindings_p ())
3600 {
3601 if (out_sval)
0ae07a72 3602 *out_sval = get_store_value (reg, nullptr);
5ef89c5c
DM
3603 return m_mgr->get_or_create_unknown_svalue (size_type_node);
3604 }
3605
fe97f09a
DM
3606 /* TODO: the various special-cases seen in
3607 region_model::get_store_value. */
3608
3609 /* Simulate reading from this byte, then give up. */
3610 byte_range bytes_to_read (src_byte_offset, 1);
3611 const svalue *sval = get_store_bytes (base_reg, bytes_to_read, ctxt);
3612 tree byte_expr
3613 = get_tree_for_byte_offset (expr,
3614 src_byte_offset - initial_src_byte_offset);
3615 check_for_poison (sval, byte_expr, nullptr, ctxt);
3616 if (base_reg->can_have_initial_svalue_p ())
3617 {
3618 if (out_sval)
0ae07a72 3619 *out_sval = get_store_value (reg, nullptr);
fe97f09a
DM
3620 return m_mgr->get_or_create_unknown_svalue (size_type_node);
3621 }
3622 else
3623 return nullptr;
3624}
3625
325f9e88
DM
3626/* Check that argument ARG_IDX (0-based) to the call described by CD
3627 is a pointer to a valid null-terminated string.
3628
fe97f09a
DM
3629 Simulate scanning through the buffer, reading until we find a 0 byte
3630 (equivalent to calling strlen).
325f9e88 3631
fe97f09a
DM
3632 Complain and return NULL if:
3633 - the buffer pointed to isn't null-terminated
3634 - the buffer pointed to has any uninitalized bytes before any 0-terminator
3635 - any of the reads aren't within the bounds of the underlying base region
325f9e88 3636
fe97f09a
DM
3637 Otherwise, return a svalue for the number of bytes read (strlen + 1),
3638 and, if OUT_SVAL is non-NULL, write to *OUT_SVAL with an svalue
3639 representing the content of the buffer up to and including the terminator.
325f9e88 3640
fe97f09a
DM
3641 TODO: we should also complain if:
3642 - the pointer is NULL (or could be). */
3643
3644const svalue *
325f9e88 3645region_model::check_for_null_terminated_string_arg (const call_details &cd,
fe97f09a
DM
3646 unsigned arg_idx,
3647 const svalue **out_sval)
325f9e88 3648{
fe97f09a
DM
3649 class null_terminator_check_event : public custom_event
3650 {
3651 public:
3652 null_terminator_check_event (const event_loc_info &loc_info,
3653 const call_arg_details &arg_details)
3654 : custom_event (loc_info),
3655 m_arg_details (arg_details)
3656 {
3657 }
3658
3659 label_text get_desc (bool can_colorize) const final override
3660 {
3661 if (m_arg_details.m_arg_expr)
3662 return make_label_text (can_colorize,
3663 "while looking for null terminator"
3664 " for argument %i (%qE) of %qD...",
3665 m_arg_details.m_arg_idx + 1,
3666 m_arg_details.m_arg_expr,
3667 m_arg_details.m_called_fndecl);
3668 else
3669 return make_label_text (can_colorize,
3670 "while looking for null terminator"
3671 " for argument %i of %qD...",
3672 m_arg_details.m_arg_idx + 1,
3673 m_arg_details.m_called_fndecl);
3674 }
3675
3676 private:
3677 const call_arg_details m_arg_details;
3678 };
3679
3680 class null_terminator_check_decl_note
3681 : public pending_note_subclass<null_terminator_check_decl_note>
3682 {
3683 public:
3684 null_terminator_check_decl_note (const call_arg_details &arg_details)
3685 : m_arg_details (arg_details)
3686 {
3687 }
3688
3689 const char *get_kind () const final override
3690 {
3691 return "null_terminator_check_decl_note";
3692 }
3693
3694 void emit () const final override
3695 {
3696 inform_about_expected_null_terminated_string_arg (m_arg_details);
3697 }
3698
3699 bool operator== (const null_terminator_check_decl_note &other) const
3700 {
3701 return m_arg_details == other.m_arg_details;
3702 }
3703
3704 private:
3705 const call_arg_details m_arg_details;
3706 };
3707
3708 /* Subclass of decorated_region_model_context that
3709 adds the above event and note to any saved diagnostics. */
3710 class annotating_ctxt : public annotating_context
3711 {
3712 public:
3713 annotating_ctxt (const call_details &cd,
3714 unsigned arg_idx)
3715 : annotating_context (cd.get_ctxt ()),
3716 m_cd (cd),
3717 m_arg_idx (arg_idx)
3718 {
3719 }
3720 void add_annotations () final override
3721 {
3722 call_arg_details arg_details (m_cd, m_arg_idx);
3723 event_loc_info loc_info (m_cd.get_location (),
3724 m_cd.get_model ()->get_current_function ()->decl,
3725 m_cd.get_model ()->get_stack_depth ());
3726
3727 add_event (make_unique<null_terminator_check_event> (loc_info,
3728 arg_details));
3729 add_note (make_unique <null_terminator_check_decl_note> (arg_details));
3730 }
3731 private:
3732 const call_details &m_cd;
3733 unsigned m_arg_idx;
3734 };
3735
3736 /* Use this ctxt below so that any diagnostics that get added
3737 get annotated. */
3738 annotating_ctxt my_ctxt (cd, arg_idx);
325f9e88
DM
3739
3740 const svalue *arg_sval = cd.get_arg_svalue (arg_idx);
3741 const region *buf_reg
fe97f09a 3742 = deref_rvalue (arg_sval, cd.get_arg_tree (arg_idx), &my_ctxt);
325f9e88 3743
fe97f09a
DM
3744 return scan_for_null_terminator (buf_reg,
3745 cd.get_arg_tree (arg_idx),
3746 out_sval,
3747 &my_ctxt);
325f9e88
DM
3748}
3749
808f4dfe 3750/* Remove all bindings overlapping REG within the store. */
884d9141
DM
3751
3752void
808f4dfe
DM
3753region_model::clobber_region (const region *reg)
3754{
3755 m_store.clobber_region (m_mgr->get_store_manager(), reg);
3756}
3757
3758/* Remove any bindings for REG within the store. */
3759
3760void
3761region_model::purge_region (const region *reg)
3762{
3763 m_store.purge_region (m_mgr->get_store_manager(), reg);
3764}
3765
e61ffa20
DM
3766/* Fill REG with SVAL. */
3767
3768void
3769region_model::fill_region (const region *reg, const svalue *sval)
3770{
3771 m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
3772}
3773
808f4dfe
DM
3774/* Zero-fill REG. */
3775
3776void
3777region_model::zero_fill_region (const region *reg)
3778{
3779 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
3780}
3781
0ae07a72
DM
3782/* Copy NUM_BYTES_SVAL of SVAL to DEST_REG.
3783 Use CTXT to report any warnings associated with the copy
3784 (e.g. out-of-bounds writes). */
3785
3786void
3787region_model::write_bytes (const region *dest_reg,
3788 const svalue *num_bytes_sval,
3789 const svalue *sval,
3790 region_model_context *ctxt)
3791{
3792 const region *sized_dest_reg
3793 = m_mgr->get_sized_region (dest_reg, NULL_TREE, num_bytes_sval);
3794 set_value (sized_dest_reg, sval, ctxt);
3795}
3796
8556d001
DM
3797/* Read NUM_BYTES_SVAL from SRC_REG.
3798 Use CTXT to report any warnings associated with the copy
3799 (e.g. out-of-bounds reads, copying of uninitialized values, etc). */
3800
3801const svalue *
3802region_model::read_bytes (const region *src_reg,
3803 tree src_ptr_expr,
3804 const svalue *num_bytes_sval,
3805 region_model_context *ctxt) const
3806{
3807 const region *sized_src_reg
3808 = m_mgr->get_sized_region (src_reg, NULL_TREE, num_bytes_sval);
3809 const svalue *src_contents_sval = get_store_value (sized_src_reg, ctxt);
3810 check_for_poison (src_contents_sval, src_ptr_expr,
3811 sized_src_reg, ctxt);
3812 return src_contents_sval;
3813}
3814
3815/* Copy NUM_BYTES_SVAL bytes from SRC_REG to DEST_REG.
3816 Use CTXT to report any warnings associated with the copy
3817 (e.g. out-of-bounds reads/writes, copying of uninitialized values,
3818 etc). */
3819
3820void
3821region_model::copy_bytes (const region *dest_reg,
3822 const region *src_reg,
3823 tree src_ptr_expr,
3824 const svalue *num_bytes_sval,
3825 region_model_context *ctxt)
3826{
3827 const svalue *data_sval
3828 = read_bytes (src_reg, src_ptr_expr, num_bytes_sval, ctxt);
3829 write_bytes (dest_reg, num_bytes_sval, data_sval, ctxt);
3830}
3831
808f4dfe
DM
3832/* Mark REG as having unknown content. */
3833
3834void
3a66c289
DM
3835region_model::mark_region_as_unknown (const region *reg,
3836 uncertainty_t *uncertainty)
884d9141 3837{
14f5e56a 3838 svalue_set maybe_live_values;
3a66c289 3839 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
14f5e56a
DM
3840 uncertainty, &maybe_live_values);
3841 m_store.on_maybe_live_values (maybe_live_values);
884d9141
DM
3842}
3843
808f4dfe 3844/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
757bf1df
DM
3845 this model. */
3846
3847tristate
808f4dfe
DM
3848region_model::eval_condition (const svalue *lhs,
3849 enum tree_code op,
3850 const svalue *rhs) const
757bf1df 3851{
757bf1df
DM
3852 gcc_assert (lhs);
3853 gcc_assert (rhs);
3854
808f4dfe
DM
3855 /* For now, make no attempt to capture constraints on floating-point
3856 values. */
3857 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
3858 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
3859 return tristate::unknown ();
3860
9bbcee45
DM
3861 /* See what we know based on the values. */
3862
808f4dfe
DM
3863 /* Unwrap any unmergeable values. */
3864 lhs = lhs->unwrap_any_unmergeable ();
3865 rhs = rhs->unwrap_any_unmergeable ();
3866
3867 if (lhs == rhs)
757bf1df 3868 {
808f4dfe
DM
3869 /* If we have the same svalue, then we have equality
3870 (apart from NaN-handling).
3871 TODO: should this definitely be the case for poisoned values? */
3872 /* Poisoned and unknown values are "unknowable". */
3873 if (lhs->get_kind () == SK_POISONED
3874 || lhs->get_kind () == SK_UNKNOWN)
3875 return tristate::TS_UNKNOWN;
e978955d 3876
808f4dfe 3877 switch (op)
757bf1df 3878 {
808f4dfe
DM
3879 case EQ_EXPR:
3880 case GE_EXPR:
3881 case LE_EXPR:
3882 return tristate::TS_TRUE;
07c86323 3883
808f4dfe
DM
3884 case NE_EXPR:
3885 case GT_EXPR:
3886 case LT_EXPR:
3887 return tristate::TS_FALSE;
3888
3889 default:
3890 /* For other ops, use the logic below. */
3891 break;
757bf1df 3892 }
808f4dfe 3893 }
757bf1df 3894
808f4dfe
DM
3895 /* If we have a pair of region_svalues, compare them. */
3896 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
3897 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
3898 {
3899 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
3900 if (res.is_known ())
3901 return res;
3902 /* Otherwise, only known through constraints. */
3903 }
757bf1df 3904
808f4dfe 3905 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
18faaeb3
DM
3906 {
3907 /* If we have a pair of constants, compare them. */
3908 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
3909 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
3910 else
3911 {
3912 /* When we have one constant, put it on the RHS. */
3913 std::swap (lhs, rhs);
3914 op = swap_tree_comparison (op);
3915 }
3916 }
3917 gcc_assert (lhs->get_kind () != SK_CONSTANT);
757bf1df 3918
e82e0f14
DM
3919 /* Handle comparison against zero. */
3920 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
3921 if (zerop (cst_rhs->get_constant ()))
3922 {
3923 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
3924 {
3925 /* A region_svalue is a non-NULL pointer, except in certain
3926 special cases (see the comment for region::non_null_p). */
3927 const region *pointee = ptr->get_pointee ();
3928 if (pointee->non_null_p ())
3929 {
3930 switch (op)
3931 {
3932 default:
3933 gcc_unreachable ();
3934
3935 case EQ_EXPR:
3936 case GE_EXPR:
3937 case LE_EXPR:
3938 return tristate::TS_FALSE;
3939
3940 case NE_EXPR:
3941 case GT_EXPR:
3942 case LT_EXPR:
3943 return tristate::TS_TRUE;
3944 }
3945 }
3946 }
3947 else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
3948 {
3949 /* Treat offsets from a non-NULL pointer as being non-NULL. This
3950 isn't strictly true, in that eventually ptr++ will wrap
3951 around and be NULL, but it won't occur in practise and thus
3952 can be used to suppress effectively false positives that we
3953 shouldn't warn for. */
3954 if (binop->get_op () == POINTER_PLUS_EXPR)
3955 {
9bbcee45 3956 tristate lhs_ts = eval_condition (binop->get_arg0 (), op, rhs);
e82e0f14
DM
3957 if (lhs_ts.is_known ())
3958 return lhs_ts;
3959 }
3960 }
0b737090
DM
3961 else if (const unaryop_svalue *unaryop
3962 = lhs->dyn_cast_unaryop_svalue ())
3963 {
3964 if (unaryop->get_op () == NEGATE_EXPR)
3965 {
3966 /* e.g. "-X <= 0" is equivalent to X >= 0". */
3967 tristate lhs_ts = eval_condition (unaryop->get_arg (),
3968 swap_tree_comparison (op),
3969 rhs);
3970 if (lhs_ts.is_known ())
3971 return lhs_ts;
3972 }
3973 }
e82e0f14 3974 }
808f4dfe
DM
3975
3976 /* Handle rejection of equality for comparisons of the initial values of
3977 "external" values (such as params) with the address of locals. */
3978 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
3979 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
3980 {
3981 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
3982 if (res.is_known ())
3983 return res;
3984 }
3985 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
3986 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
3987 {
3988 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
3989 if (res.is_known ())
3990 return res;
3991 }
3992
3993 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
3994 if (tree rhs_cst = rhs->maybe_get_constant ())
3995 {
3996 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
3997 if (res.is_known ())
3998 return res;
3999 }
4000
7a6564c9 4001 /* Handle comparisons between two svalues with more than one operand. */
9bbcee45 4002 if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
7a6564c9
TL
4003 {
4004 switch (op)
4005 {
4006 default:
4007 break;
4008 case EQ_EXPR:
4009 {
4010 /* TODO: binops can be equal even if they are not structurally
4011 equal in case of commutative operators. */
4012 tristate res = structural_equality (lhs, rhs);
4013 if (res.is_true ())
4014 return res;
4015 }
4016 break;
4017 case LE_EXPR:
4018 {
4019 tristate res = structural_equality (lhs, rhs);
4020 if (res.is_true ())
4021 return res;
4022 }
4023 break;
4024 case GE_EXPR:
4025 {
4026 tristate res = structural_equality (lhs, rhs);
4027 if (res.is_true ())
4028 return res;
4029 res = symbolic_greater_than (binop, rhs);
4030 if (res.is_true ())
4031 return res;
4032 }
4033 break;
4034 case GT_EXPR:
4035 {
4036 tristate res = symbolic_greater_than (binop, rhs);
4037 if (res.is_true ())
4038 return res;
4039 }
4040 break;
4041 }
4042 }
4043
9bbcee45
DM
4044 /* Otherwise, try constraints.
4045 Cast to const to ensure we don't change the constraint_manager as we
4046 do this (e.g. by creating equivalence classes). */
4047 const constraint_manager *constraints = m_constraints;
4048 return constraints->eval_condition (lhs, op, rhs);
808f4dfe
DM
4049}
4050
9bbcee45 4051/* Subroutine of region_model::eval_condition, for rejecting
808f4dfe
DM
4052 equality of INIT_VAL(PARM) with &LOCAL. */
4053
4054tristate
4055region_model::compare_initial_and_pointer (const initial_svalue *init,
4056 const region_svalue *ptr) const
4057{
4058 const region *pointee = ptr->get_pointee ();
4059
4060 /* If we have a pointer to something within a stack frame, it can't be the
4061 initial value of a param. */
4062 if (pointee->maybe_get_frame_region ())
e0139b2a
DM
4063 if (init->initial_value_of_param_p ())
4064 return tristate::TS_FALSE;
757bf1df
DM
4065
4066 return tristate::TS_UNKNOWN;
4067}
4068
7a6564c9
TL
4069/* Return true if SVAL is definitely positive. */
4070
4071static bool
4072is_positive_svalue (const svalue *sval)
4073{
4074 if (tree cst = sval->maybe_get_constant ())
4075 return !zerop (cst) && get_range_pos_neg (cst) == 1;
4076 tree type = sval->get_type ();
4077 if (!type)
4078 return false;
4079 /* Consider a binary operation size_t + int. The analyzer wraps the int in
4080 an unaryop_svalue, converting it to a size_t, but in the dynamic execution
4081 the result is smaller than the first operand. Thus, we have to look if
4082 the argument of the unaryop_svalue is also positive. */
4083 if (const unaryop_svalue *un_op = dyn_cast <const unaryop_svalue *> (sval))
4084 return CONVERT_EXPR_CODE_P (un_op->get_op ()) && TYPE_UNSIGNED (type)
4085 && is_positive_svalue (un_op->get_arg ());
4086 return TYPE_UNSIGNED (type);
4087}
4088
4089/* Return true if A is definitely larger than B.
4090
4091 Limitation: does not account for integer overflows and does not try to
4092 return false, so it can not be used negated. */
4093
4094tristate
4095region_model::symbolic_greater_than (const binop_svalue *bin_a,
4096 const svalue *b) const
4097{
4098 if (bin_a->get_op () == PLUS_EXPR || bin_a->get_op () == MULT_EXPR)
4099 {
4100 /* Eliminate the right-hand side of both svalues. */
4101 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
4102 if (bin_a->get_op () == bin_b->get_op ()
9bbcee45
DM
4103 && eval_condition (bin_a->get_arg1 (),
4104 GT_EXPR,
4105 bin_b->get_arg1 ()).is_true ()
4106 && eval_condition (bin_a->get_arg0 (),
4107 GE_EXPR,
4108 bin_b->get_arg0 ()).is_true ())
7a6564c9
TL
4109 return tristate (tristate::TS_TRUE);
4110
4111 /* Otherwise, try to remove a positive offset or factor from BIN_A. */
4112 if (is_positive_svalue (bin_a->get_arg1 ())
9bbcee45
DM
4113 && eval_condition (bin_a->get_arg0 (),
4114 GE_EXPR, b).is_true ())
7a6564c9
TL
4115 return tristate (tristate::TS_TRUE);
4116 }
4117 return tristate::unknown ();
4118}
4119
4120/* Return true if A and B are equal structurally.
4121
4122 Structural equality means that A and B are equal if the svalues A and B have
4123 the same nodes at the same positions in the tree and the leafs are equal.
4124 Equality for conjured_svalues and initial_svalues is determined by comparing
4125 the pointers while constants are compared by value. That behavior is useful
4126 to check for binaryop_svlaues that evaluate to the same concrete value but
4127 might use one operand with a different type but the same constant value.
4128
4129 For example,
4130 binop_svalue (mult_expr,
4131 initial_svalue (‘size_t’, decl_region (..., 'some_var')),
4132 constant_svalue (‘size_t’, 4))
4133 and
4134 binop_svalue (mult_expr,
4135 initial_svalue (‘size_t’, decl_region (..., 'some_var'),
4136 constant_svalue (‘sizetype’, 4))
4137 are structurally equal. A concrete C code example, where this occurs, can
4138 be found in test7 of out-of-bounds-5.c. */
4139
4140tristate
4141region_model::structural_equality (const svalue *a, const svalue *b) const
4142{
4143 /* If A and B are referentially equal, they are also structurally equal. */
4144 if (a == b)
4145 return tristate (tristate::TS_TRUE);
4146
4147 switch (a->get_kind ())
4148 {
4149 default:
4150 return tristate::unknown ();
4151 /* SK_CONJURED and SK_INITIAL are already handled
4152 by the referential equality above. */
4153 case SK_CONSTANT:
4154 {
4155 tree a_cst = a->maybe_get_constant ();
4156 tree b_cst = b->maybe_get_constant ();
4157 if (a_cst && b_cst)
4158 return tristate (tree_int_cst_equal (a_cst, b_cst));
4159 }
4160 return tristate (tristate::TS_FALSE);
4161 case SK_UNARYOP:
4162 {
4163 const unaryop_svalue *un_a = as_a <const unaryop_svalue *> (a);
4164 if (const unaryop_svalue *un_b = dyn_cast <const unaryop_svalue *> (b))
4165 return tristate (pending_diagnostic::same_tree_p (un_a->get_type (),
4166 un_b->get_type ())
4167 && un_a->get_op () == un_b->get_op ()
4168 && structural_equality (un_a->get_arg (),
4169 un_b->get_arg ()));
4170 }
4171 return tristate (tristate::TS_FALSE);
4172 case SK_BINOP:
4173 {
4174 const binop_svalue *bin_a = as_a <const binop_svalue *> (a);
4175 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
4176 return tristate (bin_a->get_op () == bin_b->get_op ()
4177 && structural_equality (bin_a->get_arg0 (),
4178 bin_b->get_arg0 ())
4179 && structural_equality (bin_a->get_arg1 (),
4180 bin_b->get_arg1 ()));
4181 }
4182 return tristate (tristate::TS_FALSE);
4183 }
4184}
4185
48e8a7a6
DM
4186/* Handle various constraints of the form:
4187 LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
4188 OP : == or !=
4189 RHS: zero
4190 and (with a cast):
4191 LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
4192 OP : == or !=
4193 RHS: zero
4194 by adding constraints for INNER_LHS INNEROP INNER_RHS.
4195
4196 Return true if this function can fully handle the constraint; if
4197 so, add the implied constraint(s) and write true to *OUT if they
4198 are consistent with existing constraints, or write false to *OUT
4199 if they contradicts existing constraints.
4200
4201 Return false for cases that this function doeesn't know how to handle.
4202
4203 For example, if we're checking a stored conditional, we'll have
4204 something like:
4205 LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
4206 OP : NE_EXPR
4207 RHS: zero
4208 which this function can turn into an add_constraint of:
4209 (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
4210
4211 Similarly, optimized && and || conditionals lead to e.g.
4212 if (p && q)
4213 becoming gimple like this:
4214 _1 = p_6 == 0B;
4215 _2 = q_8 == 0B
4216 _3 = _1 | _2
4217 On the "_3 is false" branch we can have constraints of the form:
4218 ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
4219 | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
4220 == 0
4221 which implies that both _1 and _2 are false,
4222 which this function can turn into a pair of add_constraints of
4223 (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
4224 and:
4225 (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
4226
4227bool
4228region_model::add_constraints_from_binop (const svalue *outer_lhs,
4229 enum tree_code outer_op,
4230 const svalue *outer_rhs,
4231 bool *out,
4232 region_model_context *ctxt)
4233{
4234 while (const svalue *cast = outer_lhs->maybe_undo_cast ())
4235 outer_lhs = cast;
4236 const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
4237 if (!binop_sval)
4238 return false;
4239 if (!outer_rhs->all_zeroes_p ())
4240 return false;
4241
4242 const svalue *inner_lhs = binop_sval->get_arg0 ();
4243 enum tree_code inner_op = binop_sval->get_op ();
4244 const svalue *inner_rhs = binop_sval->get_arg1 ();
4245
4246 if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
4247 return false;
4248
4249 /* We have either
4250 - "OUTER_LHS != false" (i.e. OUTER is true), or
4251 - "OUTER_LHS == false" (i.e. OUTER is false). */
4252 bool is_true = outer_op == NE_EXPR;
4253
4254 switch (inner_op)
4255 {
4256 default:
4257 return false;
4258
4259 case EQ_EXPR:
4260 case NE_EXPR:
4261 {
4262 /* ...and "(inner_lhs OP inner_rhs) == 0"
4263 then (inner_lhs OP inner_rhs) must have the same
4264 logical value as LHS. */
4265 if (!is_true)
4266 inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
4267 *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
4268 return true;
4269 }
4270 break;
4271
4272 case BIT_AND_EXPR:
4273 if (is_true)
4274 {
4275 /* ...and "(inner_lhs & inner_rhs) != 0"
4276 then both inner_lhs and inner_rhs must be true. */
4277 const svalue *false_sval
4278 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
4279 bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
4280 bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
4281 *out = sat1 && sat2;
4282 return true;
4283 }
4284 return false;
4285
4286 case BIT_IOR_EXPR:
4287 if (!is_true)
4288 {
4289 /* ...and "(inner_lhs | inner_rhs) == 0"
4290 i.e. "(inner_lhs | inner_rhs)" is false
4291 then both inner_lhs and inner_rhs must be false. */
4292 const svalue *false_sval
4293 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
4294 bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
4295 bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
4296 *out = sat1 && sat2;
4297 return true;
4298 }
4299 return false;
4300 }
4301}
4302
757bf1df
DM
4303/* Attempt to add the constraint "LHS OP RHS" to this region_model.
4304 If it is consistent with existing constraints, add it, and return true.
4305 Return false if it contradicts existing constraints.
4306 Use CTXT for reporting any diagnostics associated with the accesses. */
4307
4308bool
4309region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
4310 region_model_context *ctxt)
4311{
e978955d
DM
4312 /* For now, make no attempt to capture constraints on floating-point
4313 values. */
4314 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
4315 return true;
4316
808f4dfe
DM
4317 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
4318 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
757bf1df 4319
48e8a7a6
DM
4320 return add_constraint (lhs_sval, op, rhs_sval, ctxt);
4321}
4322
4323/* Attempt to add the constraint "LHS OP RHS" to this region_model.
4324 If it is consistent with existing constraints, add it, and return true.
4325 Return false if it contradicts existing constraints.
4326 Use CTXT for reporting any diagnostics associated with the accesses. */
4327
4328bool
4329region_model::add_constraint (const svalue *lhs,
4330 enum tree_code op,
4331 const svalue *rhs,
4332 region_model_context *ctxt)
4333{
4334 tristate t_cond = eval_condition (lhs, op, rhs);
757bf1df
DM
4335
4336 /* If we already have the condition, do nothing. */
4337 if (t_cond.is_true ())
4338 return true;
4339
4340 /* Reject a constraint that would contradict existing knowledge, as
4341 unsatisfiable. */
4342 if (t_cond.is_false ())
4343 return false;
4344
48e8a7a6
DM
4345 bool out;
4346 if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
4347 return out;
757bf1df 4348
c4b8f373
DM
4349 /* Attempt to store the constraint. */
4350 if (!m_constraints->add_constraint (lhs, op, rhs))
4351 return false;
757bf1df
DM
4352
4353 /* Notify the context, if any. This exists so that the state machines
4354 in a program_state can be notified about the condition, and so can
4355 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
4356 when synthesizing constraints as above. */
4357 if (ctxt)
4358 ctxt->on_condition (lhs, op, rhs);
4359
9a2c9579
DM
4360 /* If we have &REGION == NULL, then drop dynamic extents for REGION (for
4361 the case where REGION is heap-allocated and thus could be NULL). */
48e8a7a6
DM
4362 if (tree rhs_cst = rhs->maybe_get_constant ())
4363 if (op == EQ_EXPR && zerop (rhs_cst))
4364 if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
4365 unset_dynamic_extents (region_sval->get_pointee ());
9a2c9579 4366
757bf1df
DM
4367 return true;
4368}
4369
84fb3546
DM
4370/* As above, but when returning false, if OUT is non-NULL, write a
4371 new rejected_constraint to *OUT. */
4372
4373bool
4374region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
4375 region_model_context *ctxt,
4376 rejected_constraint **out)
4377{
4378 bool sat = add_constraint (lhs, op, rhs, ctxt);
4379 if (!sat && out)
8ca7fa84 4380 *out = new rejected_op_constraint (*this, lhs, op, rhs);
84fb3546
DM
4381 return sat;
4382}
4383
757bf1df
DM
4384/* Determine what is known about the condition "LHS OP RHS" within
4385 this model.
4386 Use CTXT for reporting any diagnostics associated with the accesses. */
4387
4388tristate
4389region_model::eval_condition (tree lhs,
4390 enum tree_code op,
4391 tree rhs,
5c6546ca 4392 region_model_context *ctxt) const
757bf1df 4393{
e978955d
DM
4394 /* For now, make no attempt to model constraints on floating-point
4395 values. */
4396 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
4397 return tristate::unknown ();
4398
757bf1df
DM
4399 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
4400}
4401
467a4820
DM
4402/* Implementation of region_model::get_representative_path_var.
4403 Attempt to return a path_var that represents SVAL, or return NULL_TREE.
808f4dfe
DM
4404 Use VISITED to prevent infinite mutual recursion with the overload for
4405 regions. */
757bf1df 4406
808f4dfe 4407path_var
467a4820
DM
4408region_model::get_representative_path_var_1 (const svalue *sval,
4409 svalue_set *visited) const
757bf1df 4410{
467a4820 4411 gcc_assert (sval);
757bf1df 4412
808f4dfe
DM
4413 /* Prevent infinite recursion. */
4414 if (visited->contains (sval))
0e466e97
DM
4415 {
4416 if (sval->get_kind () == SK_CONSTANT)
4417 return path_var (sval->maybe_get_constant (), 0);
4418 else
4419 return path_var (NULL_TREE, 0);
4420 }
808f4dfe 4421 visited->add (sval);
757bf1df 4422
467a4820
DM
4423 /* Handle casts by recursion into get_representative_path_var. */
4424 if (const svalue *cast_sval = sval->maybe_undo_cast ())
4425 {
4426 path_var result = get_representative_path_var (cast_sval, visited);
4427 tree orig_type = sval->get_type ();
4428 /* If necessary, wrap the result in a cast. */
4429 if (result.m_tree && orig_type)
4430 result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
4431 return result;
4432 }
4433
808f4dfe
DM
4434 auto_vec<path_var> pvs;
4435 m_store.get_representative_path_vars (this, visited, sval, &pvs);
757bf1df 4436
808f4dfe
DM
4437 if (tree cst = sval->maybe_get_constant ())
4438 pvs.safe_push (path_var (cst, 0));
757bf1df 4439
90f7c300 4440 /* Handle string literals and various other pointers. */
808f4dfe
DM
4441 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
4442 {
4443 const region *reg = ptr_sval->get_pointee ();
4444 if (path_var pv = get_representative_path_var (reg, visited))
4445 return path_var (build1 (ADDR_EXPR,
467a4820 4446 sval->get_type (),
808f4dfe
DM
4447 pv.m_tree),
4448 pv.m_stack_depth);
4449 }
4450
4451 /* If we have a sub_svalue, look for ways to represent the parent. */
4452 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
90f7c300 4453 {
808f4dfe
DM
4454 const svalue *parent_sval = sub_sval->get_parent ();
4455 const region *subreg = sub_sval->get_subregion ();
4456 if (path_var parent_pv
4457 = get_representative_path_var (parent_sval, visited))
4458 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
4459 return path_var (build3 (COMPONENT_REF,
4460 sval->get_type (),
4461 parent_pv.m_tree,
4462 field_reg->get_field (),
4463 NULL_TREE),
4464 parent_pv.m_stack_depth);
90f7c300
DM
4465 }
4466
b9365b93
DM
4467 /* Handle binops. */
4468 if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
4469 if (path_var lhs_pv
4470 = get_representative_path_var (binop_sval->get_arg0 (), visited))
4471 if (path_var rhs_pv
4472 = get_representative_path_var (binop_sval->get_arg1 (), visited))
4473 return path_var (build2 (binop_sval->get_op (),
4474 sval->get_type (),
4475 lhs_pv.m_tree, rhs_pv.m_tree),
4476 lhs_pv.m_stack_depth);
4477
808f4dfe
DM
4478 if (pvs.length () < 1)
4479 return path_var (NULL_TREE, 0);
4480
4481 pvs.qsort (readability_comparator);
4482 return pvs[0];
757bf1df
DM
4483}
4484
467a4820
DM
4485/* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
4486 Use VISITED to prevent infinite mutual recursion with the overload for
4487 regions
4488
4489 This function defers to get_representative_path_var_1 to do the work;
4490 it adds verification that get_representative_path_var_1 returned a tree
4491 of the correct type. */
4492
4493path_var
4494region_model::get_representative_path_var (const svalue *sval,
4495 svalue_set *visited) const
4496{
4497 if (sval == NULL)
4498 return path_var (NULL_TREE, 0);
4499
4500 tree orig_type = sval->get_type ();
4501
4502 path_var result = get_representative_path_var_1 (sval, visited);
4503
4504 /* Verify that the result has the same type as SVAL, if any. */
4505 if (result.m_tree && orig_type)
4506 gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
4507
4508 return result;
4509}
4510
4511/* Attempt to return a tree that represents SVAL, or return NULL_TREE.
4512
4513 Strip off any top-level cast, to avoid messages like
4514 double-free of '(void *)ptr'
4515 from analyzer diagnostics. */
757bf1df 4516
808f4dfe
DM
4517tree
4518region_model::get_representative_tree (const svalue *sval) const
757bf1df 4519{
808f4dfe 4520 svalue_set visited;
467a4820
DM
4521 tree expr = get_representative_path_var (sval, &visited).m_tree;
4522
4523 /* Strip off any top-level cast. */
7e3b45be
TL
4524 if (expr && TREE_CODE (expr) == NOP_EXPR)
4525 expr = TREE_OPERAND (expr, 0);
4526
4527 return fixup_tree_for_diagnostic (expr);
4528}
4529
4530tree
4531region_model::get_representative_tree (const region *reg) const
4532{
4533 svalue_set visited;
4534 tree expr = get_representative_path_var (reg, &visited).m_tree;
4535
4536 /* Strip off any top-level cast. */
467a4820 4537 if (expr && TREE_CODE (expr) == NOP_EXPR)
e4bb1bd6 4538 expr = TREE_OPERAND (expr, 0);
467a4820 4539
e4bb1bd6 4540 return fixup_tree_for_diagnostic (expr);
808f4dfe
DM
4541}
4542
467a4820
DM
4543/* Implementation of region_model::get_representative_path_var.
4544
4545 Attempt to return a path_var that represents REG, or return
808f4dfe
DM
4546 the NULL path_var.
4547 For example, a region for a field of a local would be a path_var
4548 wrapping a COMPONENT_REF.
4549 Use VISITED to prevent infinite mutual recursion with the overload for
4550 svalues. */
757bf1df 4551
808f4dfe 4552path_var
467a4820
DM
4553region_model::get_representative_path_var_1 (const region *reg,
4554 svalue_set *visited) const
808f4dfe
DM
4555{
4556 switch (reg->get_kind ())
757bf1df 4557 {
808f4dfe
DM
4558 default:
4559 gcc_unreachable ();
e516294a 4560
808f4dfe
DM
4561 case RK_FRAME:
4562 case RK_GLOBALS:
4563 case RK_CODE:
4564 case RK_HEAP:
4565 case RK_STACK:
358dab90 4566 case RK_THREAD_LOCAL:
808f4dfe
DM
4567 case RK_ROOT:
4568 /* Regions that represent memory spaces are not expressible as trees. */
4569 return path_var (NULL_TREE, 0);
757bf1df 4570
808f4dfe 4571 case RK_FUNCTION:
884d9141 4572 {
808f4dfe
DM
4573 const function_region *function_reg
4574 = as_a <const function_region *> (reg);
4575 return path_var (function_reg->get_fndecl (), 0);
884d9141 4576 }
808f4dfe 4577 case RK_LABEL:
9e78634c
DM
4578 {
4579 const label_region *label_reg = as_a <const label_region *> (reg);
4580 return path_var (label_reg->get_label (), 0);
4581 }
90f7c300 4582
808f4dfe
DM
4583 case RK_SYMBOLIC:
4584 {
4585 const symbolic_region *symbolic_reg
4586 = as_a <const symbolic_region *> (reg);
4587 const svalue *pointer = symbolic_reg->get_pointer ();
4588 path_var pointer_pv = get_representative_path_var (pointer, visited);
4589 if (!pointer_pv)
4590 return path_var (NULL_TREE, 0);
4591 tree offset = build_int_cst (pointer->get_type (), 0);
4592 return path_var (build2 (MEM_REF,
4593 reg->get_type (),
4594 pointer_pv.m_tree,
4595 offset),
4596 pointer_pv.m_stack_depth);
4597 }
4598 case RK_DECL:
4599 {
4600 const decl_region *decl_reg = as_a <const decl_region *> (reg);
4601 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
4602 }
4603 case RK_FIELD:
4604 {
4605 const field_region *field_reg = as_a <const field_region *> (reg);
4606 path_var parent_pv
4607 = get_representative_path_var (reg->get_parent_region (), visited);
4608 if (!parent_pv)
4609 return path_var (NULL_TREE, 0);
4610 return path_var (build3 (COMPONENT_REF,
4611 reg->get_type (),
4612 parent_pv.m_tree,
4613 field_reg->get_field (),
4614 NULL_TREE),
4615 parent_pv.m_stack_depth);
4616 }
757bf1df 4617
808f4dfe
DM
4618 case RK_ELEMENT:
4619 {
4620 const element_region *element_reg
4621 = as_a <const element_region *> (reg);
4622 path_var parent_pv
4623 = get_representative_path_var (reg->get_parent_region (), visited);
4624 if (!parent_pv)
4625 return path_var (NULL_TREE, 0);
4626 path_var index_pv
4627 = get_representative_path_var (element_reg->get_index (), visited);
4628 if (!index_pv)
4629 return path_var (NULL_TREE, 0);
4630 return path_var (build4 (ARRAY_REF,
4631 reg->get_type (),
4632 parent_pv.m_tree, index_pv.m_tree,
4633 NULL_TREE, NULL_TREE),
4634 parent_pv.m_stack_depth);
4635 }
757bf1df 4636
808f4dfe 4637 case RK_OFFSET:
757bf1df 4638 {
808f4dfe
DM
4639 const offset_region *offset_reg
4640 = as_a <const offset_region *> (reg);
4641 path_var parent_pv
4642 = get_representative_path_var (reg->get_parent_region (), visited);
4643 if (!parent_pv)
4644 return path_var (NULL_TREE, 0);
4645 path_var offset_pv
4646 = get_representative_path_var (offset_reg->get_byte_offset (),
4647 visited);
29f5db8e 4648 if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
808f4dfe 4649 return path_var (NULL_TREE, 0);
29f5db8e
DM
4650 tree addr_parent = build1 (ADDR_EXPR,
4651 build_pointer_type (reg->get_type ()),
4652 parent_pv.m_tree);
808f4dfe
DM
4653 return path_var (build2 (MEM_REF,
4654 reg->get_type (),
29f5db8e 4655 addr_parent, offset_pv.m_tree),
808f4dfe 4656 parent_pv.m_stack_depth);
757bf1df 4657 }
757bf1df 4658
e61ffa20
DM
4659 case RK_SIZED:
4660 return path_var (NULL_TREE, 0);
4661
808f4dfe
DM
4662 case RK_CAST:
4663 {
4664 path_var parent_pv
4665 = get_representative_path_var (reg->get_parent_region (), visited);
4666 if (!parent_pv)
4667 return path_var (NULL_TREE, 0);
4668 return path_var (build1 (NOP_EXPR,
4669 reg->get_type (),
4670 parent_pv.m_tree),
4671 parent_pv.m_stack_depth);
4672 }
757bf1df 4673
808f4dfe
DM
4674 case RK_HEAP_ALLOCATED:
4675 case RK_ALLOCA:
4676 /* No good way to express heap-allocated/alloca regions as trees. */
4677 return path_var (NULL_TREE, 0);
757bf1df 4678
808f4dfe
DM
4679 case RK_STRING:
4680 {
4681 const string_region *string_reg = as_a <const string_region *> (reg);
4682 return path_var (string_reg->get_string_cst (), 0);
4683 }
757bf1df 4684
2402dc6b 4685 case RK_VAR_ARG:
358dab90 4686 case RK_ERRNO:
808f4dfe
DM
4687 case RK_UNKNOWN:
4688 return path_var (NULL_TREE, 0);
4689 }
757bf1df
DM
4690}
4691
467a4820
DM
4692/* Attempt to return a path_var that represents REG, or return
4693 the NULL path_var.
4694 For example, a region for a field of a local would be a path_var
4695 wrapping a COMPONENT_REF.
4696 Use VISITED to prevent infinite mutual recursion with the overload for
4697 svalues.
4698
4699 This function defers to get_representative_path_var_1 to do the work;
4700 it adds verification that get_representative_path_var_1 returned a tree
4701 of the correct type. */
4702
4703path_var
4704region_model::get_representative_path_var (const region *reg,
4705 svalue_set *visited) const
4706{
4707 path_var result = get_representative_path_var_1 (reg, visited);
4708
4709 /* Verify that the result has the same type as REG, if any. */
4710 if (result.m_tree && reg->get_type ())
4711 gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
4712
4713 return result;
4714}
4715
757bf1df
DM
4716/* Update this model for any phis in SNODE, assuming we came from
4717 LAST_CFG_SUPEREDGE. */
4718
4719void
4720region_model::update_for_phis (const supernode *snode,
4721 const cfg_superedge *last_cfg_superedge,
4722 region_model_context *ctxt)
4723{
4724 gcc_assert (last_cfg_superedge);
4725
e0a7a675
DM
4726 /* Copy this state and pass it to handle_phi so that all of the phi stmts
4727 are effectively handled simultaneously. */
4728 const region_model old_state (*this);
4729
757bf1df
DM
4730 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
4731 !gsi_end_p (gpi); gsi_next (&gpi))
4732 {
4733 gphi *phi = gpi.phi ();
4734
4735 tree src = last_cfg_superedge->get_phi_arg (phi);
4736 tree lhs = gimple_phi_result (phi);
4737
e0a7a675
DM
4738 /* Update next_state based on phi and old_state. */
4739 handle_phi (phi, lhs, src, old_state, ctxt);
757bf1df
DM
4740 }
4741}
4742
4743/* Attempt to update this model for taking EDGE (where the last statement
4744 was LAST_STMT), returning true if the edge can be taken, false
4745 otherwise.
84fb3546
DM
4746 When returning false, if OUT is non-NULL, write a new rejected_constraint
4747 to it.
757bf1df
DM
4748
4749 For CFG superedges where LAST_STMT is a conditional or a switch
4750 statement, attempt to add the relevant conditions for EDGE to this
4751 model, returning true if they are feasible, or false if they are
4752 impossible.
4753
4754 For call superedges, push frame information and store arguments
4755 into parameters.
4756
4757 For return superedges, pop frame information and store return
4758 values into any lhs.
4759
4760 Rejection of call/return superedges happens elsewhere, in
4761 program_point::on_edge (i.e. based on program point, rather
4762 than program state). */
4763
4764bool
4765region_model::maybe_update_for_edge (const superedge &edge,
4766 const gimple *last_stmt,
84fb3546
DM
4767 region_model_context *ctxt,
4768 rejected_constraint **out)
757bf1df
DM
4769{
4770 /* Handle frame updates for interprocedural edges. */
4771 switch (edge.m_kind)
4772 {
4773 default:
4774 break;
4775
4776 case SUPEREDGE_CALL:
4777 {
4778 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
4779 update_for_call_superedge (*call_edge, ctxt);
4780 }
4781 break;
4782
4783 case SUPEREDGE_RETURN:
4784 {
4785 const return_superedge *return_edge
4786 = as_a <const return_superedge *> (&edge);
4787 update_for_return_superedge (*return_edge, ctxt);
4788 }
4789 break;
4790
4791 case SUPEREDGE_INTRAPROCEDURAL_CALL:
bfca9505
DM
4792 /* This is a no-op for call summaries; we should already
4793 have handled the effect of the call summary at the call stmt. */
757bf1df
DM
4794 break;
4795 }
4796
4797 if (last_stmt == NULL)
4798 return true;
4799
4800 /* Apply any constraints for conditionals/switch statements. */
4801
4802 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
4803 {
4804 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
84fb3546 4805 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out);
757bf1df
DM
4806 }
4807
4808 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
4809 {
4810 const switch_cfg_superedge *switch_sedge
4811 = as_a <const switch_cfg_superedge *> (&edge);
84fb3546
DM
4812 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt,
4813 ctxt, out);
757bf1df
DM
4814 }
4815
1690a839
DM
4816 /* Apply any constraints due to an exception being thrown. */
4817 if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge))
4818 if (cfg_sedge->get_flags () & EDGE_EH)
84fb3546 4819 return apply_constraints_for_exception (last_stmt, ctxt, out);
1690a839 4820
757bf1df
DM
4821 return true;
4822}
4823
4824/* Push a new frame_region on to the stack region.
4825 Populate the frame_region with child regions for the function call's
4826 parameters, using values from the arguments at the callsite in the
4827 caller's frame. */
4828
4829void
aef703cf 4830region_model::update_for_gcall (const gcall *call_stmt,
e92d0ff6
AS
4831 region_model_context *ctxt,
4832 function *callee)
757bf1df 4833{
808f4dfe 4834 /* Build a vec of argument svalues, using the current top
757bf1df 4835 frame for resolving tree expressions. */
808f4dfe 4836 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
757bf1df
DM
4837
4838 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
4839 {
4840 tree arg = gimple_call_arg (call_stmt, i);
808f4dfe 4841 arg_svals.quick_push (get_rvalue (arg, ctxt));
757bf1df
DM
4842 }
4843
e92d0ff6
AS
4844 if(!callee)
4845 {
4846 /* Get the function * from the gcall. */
4847 tree fn_decl = get_fndecl_for_call (call_stmt,ctxt);
4848 callee = DECL_STRUCT_FUNCTION (fn_decl);
4849 }
4850
4851 push_frame (callee, &arg_svals, ctxt);
757bf1df
DM
4852}
4853
a96f1c38
DM
4854/* Pop the top-most frame_region from the stack, and copy the return
4855 region's values (if any) into the region for the lvalue of the LHS of
757bf1df 4856 the call (if any). */
aef703cf 4857
757bf1df 4858void
aef703cf
AS
4859region_model::update_for_return_gcall (const gcall *call_stmt,
4860 region_model_context *ctxt)
757bf1df 4861{
4cebae09
DM
4862 /* Get the lvalue for the result of the call, passing it to pop_frame,
4863 so that pop_frame can determine the region with respect to the
4864 *caller* frame. */
757bf1df 4865 tree lhs = gimple_call_lhs (call_stmt);
4cebae09 4866 pop_frame (lhs, NULL, ctxt);
757bf1df
DM
4867}
4868
aef703cf
AS
4869/* Extract calling information from the superedge and update the model for the
4870 call */
4871
4872void
4873region_model::update_for_call_superedge (const call_superedge &call_edge,
4874 region_model_context *ctxt)
4875{
4876 const gcall *call_stmt = call_edge.get_call_stmt ();
e92d0ff6 4877 update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ());
aef703cf
AS
4878}
4879
4880/* Extract calling information from the return superedge and update the model
4881 for the returning call */
4882
4883void
4884region_model::update_for_return_superedge (const return_superedge &return_edge,
4885 region_model_context *ctxt)
4886{
4887 const gcall *call_stmt = return_edge.get_call_stmt ();
4888 update_for_return_gcall (call_stmt, ctxt);
4889}
4890
bfca9505
DM
4891/* Attempt to to use R to replay SUMMARY into this object.
4892 Return true if it is possible. */
757bf1df 4893
bfca9505
DM
4894bool
4895region_model::replay_call_summary (call_summary_replay &r,
4896 const region_model &summary)
757bf1df 4897{
bfca9505
DM
4898 gcc_assert (summary.get_stack_depth () == 1);
4899
4900 m_store.replay_call_summary (r, summary.m_store);
757bf1df 4901
bfca9505
DM
4902 if (!m_constraints->replay_call_summary (r, *summary.m_constraints))
4903 return false;
4904
4905 for (auto kv : summary.m_dynamic_extents)
4906 {
4907 const region *summary_reg = kv.first;
4908 const region *caller_reg = r.convert_region_from_summary (summary_reg);
4909 if (!caller_reg)
4910 continue;
4911 const svalue *summary_sval = kv.second;
4912 const svalue *caller_sval = r.convert_svalue_from_summary (summary_sval);
4913 if (!caller_sval)
4914 continue;
4915 m_dynamic_extents.put (caller_reg, caller_sval);
4916 }
4917
4918 return true;
757bf1df
DM
4919}
4920
4921/* Given a true or false edge guarded by conditional statement COND_STMT,
4922 determine appropriate constraints for the edge to be taken.
4923
4924 If they are feasible, add the constraints and return true.
4925
4926 Return false if the constraints contradict existing knowledge
84fb3546
DM
4927 (and so the edge should not be taken).
4928 When returning false, if OUT is non-NULL, write a new rejected_constraint
4929 to it. */
757bf1df
DM
4930
4931bool
4932region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
4933 const gcond *cond_stmt,
84fb3546
DM
4934 region_model_context *ctxt,
4935 rejected_constraint **out)
757bf1df
DM
4936{
4937 ::edge cfg_edge = sedge.get_cfg_edge ();
4938 gcc_assert (cfg_edge != NULL);
4939 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
4940
4941 enum tree_code op = gimple_cond_code (cond_stmt);
4942 tree lhs = gimple_cond_lhs (cond_stmt);
4943 tree rhs = gimple_cond_rhs (cond_stmt);
4944 if (cfg_edge->flags & EDGE_FALSE_VALUE)
4945 op = invert_tree_comparison (op, false /* honor_nans */);
84fb3546 4946 return add_constraint (lhs, op, rhs, ctxt, out);
757bf1df
DM
4947}
4948
ccd4df81
DM
4949/* Return true iff SWITCH_STMT has a non-default label that contains
4950 INT_CST. */
4951
4952static bool
4953has_nondefault_case_for_value_p (const gswitch *switch_stmt, tree int_cst)
4954{
4955 /* We expect the initial label to be the default; skip it. */
4956 gcc_assert (CASE_LOW (gimple_switch_label (switch_stmt, 0)) == NULL);
4957 unsigned min_idx = 1;
4958 unsigned max_idx = gimple_switch_num_labels (switch_stmt) - 1;
4959
4960 /* Binary search: try to find the label containing INT_CST.
4961 This requires the cases to be sorted by CASE_LOW (done by the
4962 gimplifier). */
4963 while (max_idx >= min_idx)
4964 {
4965 unsigned case_idx = (min_idx + max_idx) / 2;
4966 tree label = gimple_switch_label (switch_stmt, case_idx);
4967 tree low = CASE_LOW (label);
4968 gcc_assert (low);
4969 tree high = CASE_HIGH (label);
4970 if (!high)
4971 high = low;
4972 if (tree_int_cst_compare (int_cst, low) < 0)
4973 {
4974 /* INT_CST is below the range of this label. */
4975 gcc_assert (case_idx > 0);
4976 max_idx = case_idx - 1;
4977 }
4978 else if (tree_int_cst_compare (int_cst, high) > 0)
4979 {
4980 /* INT_CST is above the range of this case. */
4981 min_idx = case_idx + 1;
4982 }
4983 else
4984 /* This case contains INT_CST. */
4985 return true;
4986 }
4987 /* Not found. */
4988 return false;
4989}
4990
4991/* Return true iff SWITCH_STMT (which must be on an enum value)
4992 has nondefault cases handling all values in the enum. */
4993
4994static bool
4995has_nondefault_cases_for_all_enum_values_p (const gswitch *switch_stmt)
4996{
4997 gcc_assert (switch_stmt);
4998 tree type = TREE_TYPE (gimple_switch_index (switch_stmt));
4999 gcc_assert (TREE_CODE (type) == ENUMERAL_TYPE);
5000
5001 for (tree enum_val_iter = TYPE_VALUES (type);
5002 enum_val_iter;
5003 enum_val_iter = TREE_CHAIN (enum_val_iter))
5004 {
5005 tree enum_val = TREE_VALUE (enum_val_iter);
5006 gcc_assert (TREE_CODE (enum_val) == CONST_DECL);
5007 gcc_assert (TREE_CODE (DECL_INITIAL (enum_val)) == INTEGER_CST);
5008 if (!has_nondefault_case_for_value_p (switch_stmt,
5009 DECL_INITIAL (enum_val)))
5010 return false;
5011 }
5012 return true;
5013}
5014
757bf1df
DM
5015/* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
5016 for the edge to be taken.
5017
5018 If they are feasible, add the constraints and return true.
5019
5020 Return false if the constraints contradict existing knowledge
84fb3546
DM
5021 (and so the edge should not be taken).
5022 When returning false, if OUT is non-NULL, write a new rejected_constraint
5023 to it. */
757bf1df
DM
5024
5025bool
5026region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
5027 const gswitch *switch_stmt,
84fb3546
DM
5028 region_model_context *ctxt,
5029 rejected_constraint **out)
757bf1df 5030{
ccd4df81
DM
5031 tree index = gimple_switch_index (switch_stmt);
5032 const svalue *index_sval = get_rvalue (index, ctxt);
5033
5034 /* If we're switching based on an enum type, assume that the user is only
5035 working with values from the enum. Hence if this is an
5036 implicitly-created "default", assume it doesn't get followed.
5037 This fixes numerous "uninitialized" false positives where we otherwise
5038 consider jumping past the initialization cases. */
5039
5040 if (/* Don't check during feasibility-checking (when ctxt is NULL). */
5041 ctxt
5042 /* Must be an enum value. */
5043 && index_sval->get_type ()
5044 && TREE_CODE (TREE_TYPE (index)) == ENUMERAL_TYPE
5045 && TREE_CODE (index_sval->get_type ()) == ENUMERAL_TYPE
5046 /* If we have a constant, then we can check it directly. */
5047 && index_sval->get_kind () != SK_CONSTANT
5048 && edge.implicitly_created_default_p ()
5049 && has_nondefault_cases_for_all_enum_values_p (switch_stmt)
5050 /* Don't do this if there's a chance that the index is
5051 attacker-controlled. */
5052 && !ctxt->possibly_tainted_p (index_sval))
5053 {
5054 if (out)
5055 *out = new rejected_default_case (*this);
5056 return false;
5057 }
5058
8ca7fa84
DM
5059 bounded_ranges_manager *ranges_mgr = get_range_manager ();
5060 const bounded_ranges *all_cases_ranges
5061 = ranges_mgr->get_or_create_ranges_for_switch (&edge, switch_stmt);
8ca7fa84
DM
5062 bool sat = m_constraints->add_bounded_ranges (index_sval, all_cases_ranges);
5063 if (!sat && out)
5064 *out = new rejected_ranges_constraint (*this, index, all_cases_ranges);
2c044ff1
DM
5065 if (sat && ctxt && !all_cases_ranges->empty_p ())
5066 ctxt->on_bounded_ranges (*index_sval, *all_cases_ranges);
8ca7fa84 5067 return sat;
757bf1df
DM
5068}
5069
1690a839
DM
5070/* Apply any constraints due to an exception being thrown at LAST_STMT.
5071
5072 If they are feasible, add the constraints and return true.
5073
5074 Return false if the constraints contradict existing knowledge
84fb3546
DM
5075 (and so the edge should not be taken).
5076 When returning false, if OUT is non-NULL, write a new rejected_constraint
5077 to it. */
1690a839
DM
5078
5079bool
5080region_model::apply_constraints_for_exception (const gimple *last_stmt,
84fb3546
DM
5081 region_model_context *ctxt,
5082 rejected_constraint **out)
1690a839
DM
5083{
5084 gcc_assert (last_stmt);
5085 if (const gcall *call = dyn_cast <const gcall *> (last_stmt))
5086 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
5087 if (is_named_call_p (callee_fndecl, "operator new", call, 1)
5088 || is_named_call_p (callee_fndecl, "operator new []", call, 1))
5089 {
5090 /* We have an exception thrown from operator new.
5091 Add a constraint that the result was NULL, to avoid a false
5092 leak report due to the result being lost when following
5093 the EH edge. */
5094 if (tree lhs = gimple_call_lhs (call))
84fb3546 5095 return add_constraint (lhs, EQ_EXPR, null_pointer_node, ctxt, out);
1690a839
DM
5096 return true;
5097 }
5098 return true;
5099}
5100
808f4dfe
DM
5101/* For use with push_frame when handling a top-level call within the analysis.
5102 PARAM has a defined but unknown initial value.
5103 Anything it points to has escaped, since the calling context "knows"
5104 the pointer, and thus calls to unknown functions could read/write into
dcfc7ac9
DM
5105 the region.
5106 If NONNULL is true, then assume that PARAM must be non-NULL. */
757bf1df
DM
5107
5108void
808f4dfe 5109region_model::on_top_level_param (tree param,
dcfc7ac9
DM
5110 bool nonnull,
5111 region_model_context *ctxt)
757bf1df 5112{
808f4dfe 5113 if (POINTER_TYPE_P (TREE_TYPE (param)))
5eae0ac7 5114 {
808f4dfe
DM
5115 const region *param_reg = get_lvalue (param, ctxt);
5116 const svalue *init_ptr_sval
5117 = m_mgr->get_or_create_initial_value (param_reg);
5118 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
5119 m_store.mark_as_escaped (pointee_reg);
dcfc7ac9
DM
5120 if (nonnull)
5121 {
5122 const svalue *null_ptr_sval
5123 = m_mgr->get_or_create_null_ptr (TREE_TYPE (param));
5124 add_constraint (init_ptr_sval, NE_EXPR, null_ptr_sval, ctxt);
5125 }
5eae0ac7 5126 }
757bf1df
DM
5127}
5128
808f4dfe
DM
5129/* Update this region_model to reflect pushing a frame onto the stack
5130 for a call to FUN.
757bf1df 5131
808f4dfe
DM
5132 If ARG_SVALS is non-NULL, use it to populate the parameters
5133 in the new frame.
5134 Otherwise, the params have their initial_svalues.
757bf1df 5135
808f4dfe 5136 Return the frame_region for the new frame. */
757bf1df 5137
808f4dfe
DM
5138const region *
5139region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
5140 region_model_context *ctxt)
757bf1df 5141{
808f4dfe
DM
5142 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
5143 if (arg_svals)
757bf1df 5144 {
808f4dfe
DM
5145 /* Arguments supplied from a caller frame. */
5146 tree fndecl = fun->decl;
5147 unsigned idx = 0;
5148 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
5149 iter_parm = DECL_CHAIN (iter_parm), ++idx)
757bf1df 5150 {
808f4dfe
DM
5151 /* If there's a mismatching declaration, the call stmt might
5152 not have enough args. Handle this case by leaving the
5153 rest of the params as uninitialized. */
5154 if (idx >= arg_svals->length ())
5155 break;
294b6da2
DM
5156 tree parm_lval = iter_parm;
5157 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
5158 parm_lval = parm_default_ssa;
5159 const region *parm_reg = get_lvalue (parm_lval, ctxt);
808f4dfe 5160 const svalue *arg_sval = (*arg_svals)[idx];
808f4dfe 5161 set_value (parm_reg, arg_sval, ctxt);
757bf1df 5162 }
2402dc6b
DM
5163
5164 /* Handle any variadic args. */
5165 unsigned va_arg_idx = 0;
5166 for (; idx < arg_svals->length (); idx++, va_arg_idx++)
5167 {
5168 const svalue *arg_sval = (*arg_svals)[idx];
5169 const region *var_arg_reg
5170 = m_mgr->get_var_arg_region (m_current_frame,
5171 va_arg_idx);
5172 set_value (var_arg_reg, arg_sval, ctxt);
5173 }
757bf1df 5174 }
808f4dfe 5175 else
757bf1df 5176 {
808f4dfe
DM
5177 /* Otherwise we have a top-level call within the analysis. The params
5178 have defined but unknown initial values.
5179 Anything they point to has escaped. */
5180 tree fndecl = fun->decl;
dcfc7ac9
DM
5181
5182 /* Handle "__attribute__((nonnull))". */
5183 tree fntype = TREE_TYPE (fndecl);
5184 bitmap nonnull_args = get_nonnull_args (fntype);
5185
5186 unsigned parm_idx = 0;
808f4dfe
DM
5187 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
5188 iter_parm = DECL_CHAIN (iter_parm))
757bf1df 5189 {
dcfc7ac9
DM
5190 bool non_null = (nonnull_args
5191 ? (bitmap_empty_p (nonnull_args)
5192 || bitmap_bit_p (nonnull_args, parm_idx))
5193 : false);
294b6da2 5194 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
dcfc7ac9 5195 on_top_level_param (parm_default_ssa, non_null, ctxt);
294b6da2 5196 else
dcfc7ac9
DM
5197 on_top_level_param (iter_parm, non_null, ctxt);
5198 parm_idx++;
757bf1df 5199 }
dcfc7ac9
DM
5200
5201 BITMAP_FREE (nonnull_args);
757bf1df 5202 }
757bf1df 5203
808f4dfe 5204 return m_current_frame;
757bf1df
DM
5205}
5206
808f4dfe
DM
5207/* Get the function of the top-most frame in this region_model's stack.
5208 There must be such a frame. */
757bf1df 5209
808f4dfe
DM
5210function *
5211region_model::get_current_function () const
757bf1df 5212{
808f4dfe
DM
5213 const frame_region *frame = get_current_frame ();
5214 gcc_assert (frame);
5215 return frame->get_function ();
757bf1df
DM
5216}
5217
808f4dfe 5218/* Pop the topmost frame_region from this region_model's stack;
757bf1df 5219
4cebae09
DM
5220 If RESULT_LVALUE is non-null, copy any return value from the frame
5221 into the corresponding region (evaluated with respect to the *caller*
5222 frame, rather than the called frame).
808f4dfe
DM
5223 If OUT_RESULT is non-null, copy any return value from the frame
5224 into *OUT_RESULT.
757bf1df 5225
430d7d88
DM
5226 If EVAL_RETURN_SVALUE is false, then don't evaluate the return value.
5227 This is for use when unwinding frames e.g. due to longjmp, to suppress
5228 erroneously reporting uninitialized return values.
5229
808f4dfe
DM
5230 Purge the frame region and all its descendent regions.
5231 Convert any pointers that point into such regions into
5232 POISON_KIND_POPPED_STACK svalues. */
757bf1df 5233
808f4dfe 5234void
4cebae09 5235region_model::pop_frame (tree result_lvalue,
808f4dfe 5236 const svalue **out_result,
430d7d88
DM
5237 region_model_context *ctxt,
5238 bool eval_return_svalue)
808f4dfe
DM
5239{
5240 gcc_assert (m_current_frame);
757bf1df 5241
808f4dfe 5242 const frame_region *frame_reg = m_current_frame;
5c6546ca
DM
5243
5244 /* Notify state machines. */
5245 if (ctxt)
5246 ctxt->on_pop_frame (frame_reg);
5247
5248 /* Evaluate the result, within the callee frame. */
808f4dfe
DM
5249 tree fndecl = m_current_frame->get_function ()->decl;
5250 tree result = DECL_RESULT (fndecl);
4cebae09 5251 const svalue *retval = NULL;
430d7d88
DM
5252 if (result
5253 && TREE_TYPE (result) != void_type_node
5254 && eval_return_svalue)
808f4dfe 5255 {
4cebae09 5256 retval = get_rvalue (result, ctxt);
808f4dfe 5257 if (out_result)
13ad6d9f 5258 *out_result = retval;
808f4dfe 5259 }
757bf1df 5260
808f4dfe
DM
5261 /* Pop the frame. */
5262 m_current_frame = m_current_frame->get_calling_frame ();
757bf1df 5263
4cebae09
DM
5264 if (result_lvalue && retval)
5265 {
430d7d88
DM
5266 gcc_assert (eval_return_svalue);
5267
4cebae09
DM
5268 /* Compute result_dst_reg using RESULT_LVALUE *after* popping
5269 the frame, but before poisoning pointers into the old frame. */
5270 const region *result_dst_reg = get_lvalue (result_lvalue, ctxt);
5271 set_value (result_dst_reg, retval, ctxt);
5272 }
5273
808f4dfe 5274 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
757bf1df
DM
5275}
5276
808f4dfe 5277/* Get the number of frames in this region_model's stack. */
757bf1df 5278
808f4dfe
DM
5279int
5280region_model::get_stack_depth () const
757bf1df 5281{
808f4dfe
DM
5282 const frame_region *frame = get_current_frame ();
5283 if (frame)
5284 return frame->get_stack_depth ();
5285 else
5286 return 0;
757bf1df
DM
5287}
5288
808f4dfe
DM
5289/* Get the frame_region with the given index within the stack.
5290 The frame_region must exist. */
757bf1df 5291
808f4dfe
DM
5292const frame_region *
5293region_model::get_frame_at_index (int index) const
757bf1df 5294{
808f4dfe
DM
5295 const frame_region *frame = get_current_frame ();
5296 gcc_assert (frame);
5297 gcc_assert (index >= 0);
5298 gcc_assert (index <= frame->get_index ());
5299 while (index != frame->get_index ())
5300 {
5301 frame = frame->get_calling_frame ();
5302 gcc_assert (frame);
5303 }
5304 return frame;
757bf1df
DM
5305}
5306
808f4dfe
DM
5307/* Unbind svalues for any regions in REG and below.
5308 Find any pointers to such regions; convert them to
9a2c9579
DM
5309 poisoned values of kind PKIND.
5310 Also purge any dynamic extents. */
757bf1df 5311
808f4dfe
DM
5312void
5313region_model::unbind_region_and_descendents (const region *reg,
5314 enum poison_kind pkind)
757bf1df 5315{
808f4dfe
DM
5316 /* Gather a set of base regions to be unbound. */
5317 hash_set<const region *> base_regs;
5318 for (store::cluster_map_t::iterator iter = m_store.begin ();
5319 iter != m_store.end (); ++iter)
757bf1df 5320 {
808f4dfe
DM
5321 const region *iter_base_reg = (*iter).first;
5322 if (iter_base_reg->descendent_of_p (reg))
5323 base_regs.add (iter_base_reg);
757bf1df 5324 }
808f4dfe
DM
5325 for (hash_set<const region *>::iterator iter = base_regs.begin ();
5326 iter != base_regs.end (); ++iter)
5327 m_store.purge_cluster (*iter);
757bf1df 5328
808f4dfe
DM
5329 /* Find any pointers to REG or its descendents; convert to poisoned. */
5330 poison_any_pointers_to_descendents (reg, pkind);
9a2c9579
DM
5331
5332 /* Purge dynamic extents of any base regions in REG and below
5333 (e.g. VLAs and alloca stack regions). */
5334 for (auto iter : m_dynamic_extents)
5335 {
5336 const region *iter_reg = iter.first;
5337 if (iter_reg->descendent_of_p (reg))
5338 unset_dynamic_extents (iter_reg);
5339 }
757bf1df
DM
5340}
5341
808f4dfe
DM
5342/* Implementation of BindingVisitor.
5343 Update the bound svalues for regions below REG to use poisoned
5344 values instead. */
757bf1df 5345
808f4dfe 5346struct bad_pointer_finder
757bf1df 5347{
808f4dfe
DM
5348 bad_pointer_finder (const region *reg, enum poison_kind pkind,
5349 region_model_manager *mgr)
5350 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
5351 {}
757bf1df 5352
808f4dfe
DM
5353 void on_binding (const binding_key *, const svalue *&sval)
5354 {
5355 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
5356 {
5357 const region *ptr_dst = ptr_sval->get_pointee ();
5358 /* Poison ptrs to descendents of REG, but not to REG itself,
5359 otherwise double-free detection doesn't work (since sm-state
5360 for "free" is stored on the original ptr svalue). */
5361 if (ptr_dst->descendent_of_p (m_reg)
5362 && ptr_dst != m_reg)
5363 {
5364 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
5365 sval->get_type ());
5366 ++m_count;
5367 }
5368 }
5369 }
757bf1df 5370
808f4dfe
DM
5371 const region *m_reg;
5372 enum poison_kind m_pkind;
5373 region_model_manager *const m_mgr;
5374 int m_count;
5375};
757bf1df 5376
808f4dfe
DM
5377/* Find any pointers to REG or its descendents; convert them to
5378 poisoned values of kind PKIND.
5379 Return the number of pointers that were poisoned. */
757bf1df 5380
808f4dfe
DM
5381int
5382region_model::poison_any_pointers_to_descendents (const region *reg,
5383 enum poison_kind pkind)
5384{
5385 bad_pointer_finder bv (reg, pkind, m_mgr);
5386 m_store.for_each_binding (bv);
5387 return bv.m_count;
757bf1df
DM
5388}
5389
808f4dfe
DM
5390/* Attempt to merge THIS with OTHER_MODEL, writing the result
5391 to OUT_MODEL. Use POINT to distinguish values created as a
5392 result of merging. */
757bf1df 5393
808f4dfe
DM
5394bool
5395region_model::can_merge_with_p (const region_model &other_model,
5396 const program_point &point,
f573d351
DM
5397 region_model *out_model,
5398 const extrinsic_state *ext_state,
5399 const program_state *state_a,
5400 const program_state *state_b) const
757bf1df 5401{
808f4dfe
DM
5402 gcc_assert (out_model);
5403 gcc_assert (m_mgr == other_model.m_mgr);
5404 gcc_assert (m_mgr == out_model->m_mgr);
757bf1df 5405
808f4dfe
DM
5406 if (m_current_frame != other_model.m_current_frame)
5407 return false;
5408 out_model->m_current_frame = m_current_frame;
757bf1df 5409
f573d351
DM
5410 model_merger m (this, &other_model, point, out_model,
5411 ext_state, state_a, state_b);
757bf1df 5412
808f4dfe
DM
5413 if (!store::can_merge_p (&m_store, &other_model.m_store,
5414 &out_model->m_store, m_mgr->get_store_manager (),
5415 &m))
5416 return false;
5417
9a2c9579
DM
5418 if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
5419 &out_model->m_dynamic_extents))
5420 return false;
5421
808f4dfe
DM
5422 /* Merge constraints. */
5423 constraint_manager::merge (*m_constraints,
5424 *other_model.m_constraints,
c710051a 5425 out_model->m_constraints);
757bf1df 5426
808f4dfe 5427 return true;
757bf1df
DM
5428}
5429
5430/* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
5431 otherwise. */
5432
5433tree
5434region_model::get_fndecl_for_call (const gcall *call,
5435 region_model_context *ctxt)
5436{
5437 tree fn_ptr = gimple_call_fn (call);
5438 if (fn_ptr == NULL_TREE)
5439 return NULL_TREE;
808f4dfe
DM
5440 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
5441 if (const region_svalue *fn_ptr_ptr
5442 = fn_ptr_sval->dyn_cast_region_svalue ())
757bf1df 5443 {
808f4dfe
DM
5444 const region *reg = fn_ptr_ptr->get_pointee ();
5445 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
757bf1df 5446 {
808f4dfe 5447 tree fn_decl = fn_reg->get_fndecl ();
0ba70d1b
DM
5448 cgraph_node *node = cgraph_node::get (fn_decl);
5449 if (!node)
5450 return NULL_TREE;
5451 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
91f993b7
DM
5452 if (ultimate_node)
5453 return ultimate_node->decl;
757bf1df
DM
5454 }
5455 }
5456
5457 return NULL_TREE;
5458}
5459
808f4dfe 5460/* Would be much simpler to use a lambda here, if it were supported. */
757bf1df 5461
faacafd2 5462struct append_regions_cb_data
757bf1df 5463{
808f4dfe
DM
5464 const region_model *model;
5465 auto_vec<const decl_region *> *out;
5466};
757bf1df 5467
faacafd2 5468/* Populate *OUT with all decl_regions in the current
808f4dfe 5469 frame that have clusters within the store. */
757bf1df
DM
5470
5471void
808f4dfe 5472region_model::
faacafd2 5473get_regions_for_current_frame (auto_vec<const decl_region *> *out) const
757bf1df 5474{
faacafd2 5475 append_regions_cb_data data;
808f4dfe
DM
5476 data.model = this;
5477 data.out = out;
faacafd2 5478 m_store.for_each_cluster (append_regions_cb, &data);
757bf1df
DM
5479}
5480
faacafd2 5481/* Implementation detail of get_regions_for_current_frame. */
757bf1df 5482
808f4dfe 5483void
faacafd2
DM
5484region_model::append_regions_cb (const region *base_reg,
5485 append_regions_cb_data *cb_data)
757bf1df 5486{
808f4dfe
DM
5487 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
5488 return;
5489 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
faacafd2 5490 cb_data->out->safe_push (decl_reg);
757bf1df
DM
5491}
5492
c83e9731
TL
5493
5494/* Abstract class for diagnostics related to the use of
5495 floating-point arithmetic where precision is needed. */
5496
5497class imprecise_floating_point_arithmetic : public pending_diagnostic
5498{
5499public:
5500 int get_controlling_option () const final override
5501 {
5502 return OPT_Wanalyzer_imprecise_fp_arithmetic;
5503 }
5504};
5505
5506/* Concrete diagnostic to complain about uses of floating-point arithmetic
5507 in the size argument of malloc etc. */
5508
5509class float_as_size_arg : public imprecise_floating_point_arithmetic
5510{
5511public:
5512 float_as_size_arg (tree arg) : m_arg (arg)
5513 {}
5514
5515 const char *get_kind () const final override
5516 {
5517 return "float_as_size_arg_diagnostic";
5518 }
5519
ac9230fb 5520 bool subclass_equal_p (const pending_diagnostic &other) const final override
c83e9731
TL
5521 {
5522 return same_tree_p (m_arg, ((const float_as_size_arg &) other).m_arg);
5523 }
5524
0e466e97 5525 bool emit (rich_location *rich_loc, logger *) final override
c83e9731
TL
5526 {
5527 diagnostic_metadata m;
5528 bool warned = warning_meta (rich_loc, m, get_controlling_option (),
5529 "use of floating-point arithmetic here might"
5530 " yield unexpected results");
5531 if (warned)
5532 inform (rich_loc->get_loc (), "only use operands of an integer type"
5533 " inside the size argument");
5534 return warned;
5535 }
5536
5537 label_text describe_final_event (const evdesc::final_event &ev) final
5538 override
5539 {
5540 if (m_arg)
5541 return ev.formatted_print ("operand %qE is of type %qT",
5542 m_arg, TREE_TYPE (m_arg));
5543 return ev.formatted_print ("at least one operand of the size argument is"
5544 " of a floating-point type");
5545 }
5546
5547private:
5548 tree m_arg;
5549};
5550
5551/* Visitor to find uses of floating-point variables/constants in an svalue. */
5552
5553class contains_floating_point_visitor : public visitor
5554{
5555public:
5556 contains_floating_point_visitor (const svalue *root_sval) : m_result (NULL)
5557 {
5558 root_sval->accept (this);
5559 }
5560
5561 const svalue *get_svalue_to_report ()
5562 {
5563 return m_result;
5564 }
5565
5566 void visit_constant_svalue (const constant_svalue *sval) final override
5567 {
5568 /* At the point the analyzer runs, constant integer operands in a floating
5569 point expression are already implictly converted to floating-points.
5570 Thus, we do prefer to report non-constants such that the diagnostic
5571 always reports a floating-point operand. */
5572 tree type = sval->get_type ();
5573 if (type && FLOAT_TYPE_P (type) && !m_result)
5574 m_result = sval;
5575 }
5576
5577 void visit_conjured_svalue (const conjured_svalue *sval) final override
5578 {
5579 tree type = sval->get_type ();
5580 if (type && FLOAT_TYPE_P (type))
5581 m_result = sval;
5582 }
5583
5584 void visit_initial_svalue (const initial_svalue *sval) final override
5585 {
5586 tree type = sval->get_type ();
5587 if (type && FLOAT_TYPE_P (type))
5588 m_result = sval;
5589 }
5590
5591private:
5592 /* Non-null if at least one floating-point operand was found. */
5593 const svalue *m_result;
5594};
5595
5596/* May complain about uses of floating-point operands in SIZE_IN_BYTES. */
5597
5598void
5599region_model::check_dynamic_size_for_floats (const svalue *size_in_bytes,
5600 region_model_context *ctxt) const
5601{
5602 gcc_assert (ctxt);
5603
5604 contains_floating_point_visitor v (size_in_bytes);
5605 if (const svalue *float_sval = v.get_svalue_to_report ())
5606 {
5607 tree diag_arg = get_representative_tree (float_sval);
6341f14e 5608 ctxt->warn (make_unique<float_as_size_arg> (diag_arg));
c83e9731
TL
5609 }
5610}
5611
ce917b04
DM
5612/* Return a region describing a heap-allocated block of memory.
5613 Use CTXT to complain about tainted sizes.
5614
5615 Reuse an existing heap_allocated_region if it's not being referenced by
38c00edd
EF
5616 this region_model; otherwise create a new one.
5617
5618 Optionally (update_state_machine) transitions the pointer pointing to the
5619 heap_allocated_region from start to assumed non-null. */
757bf1df 5620
808f4dfe 5621const region *
ce917b04 5622region_model::get_or_create_region_for_heap_alloc (const svalue *size_in_bytes,
38c00edd
EF
5623 region_model_context *ctxt,
5624 bool update_state_machine,
5625 const call_details *cd)
ce917b04
DM
5626{
5627 /* Determine which regions are referenced in this region_model, so that
5628 we can reuse an existing heap_allocated_region if it's not in use on
5629 this path. */
7dc0ecaf 5630 auto_bitmap base_regs_in_use;
ce917b04 5631 get_referenced_base_regions (base_regs_in_use);
b03a10b0
DM
5632
5633 /* Don't reuse regions that are marked as TOUCHED. */
5634 for (store::cluster_map_t::iterator iter = m_store.begin ();
5635 iter != m_store.end (); ++iter)
5636 if ((*iter).second->touched_p ())
5637 {
5638 const region *base_reg = (*iter).first;
5639 bitmap_set_bit (base_regs_in_use, base_reg->get_id ());
5640 }
5641
ce917b04
DM
5642 const region *reg
5643 = m_mgr->get_or_create_region_for_heap_alloc (base_regs_in_use);
688fc162
DM
5644 if (size_in_bytes)
5645 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
5646 set_dynamic_extents (reg, size_in_bytes, ctxt);
38c00edd
EF
5647
5648 if (update_state_machine && cd)
5649 {
5650 const svalue *ptr_sval
5651 = m_mgr->get_ptr_svalue (cd->get_lhs_type (), reg);
5652 transition_ptr_sval_non_null (ctxt, ptr_sval);
5653 }
5654
808f4dfe 5655 return reg;
757bf1df
DM
5656}
5657
ce917b04
DM
5658/* Populate OUT_IDS with the set of IDs of those base regions which are
5659 reachable in this region_model. */
5660
5661void
7dc0ecaf 5662region_model::get_referenced_base_regions (auto_bitmap &out_ids) const
ce917b04
DM
5663{
5664 reachable_regions reachable_regs (const_cast<region_model *> (this));
5665 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
5666 &reachable_regs);
5667 /* Get regions for locals that have explicitly bound values. */
5668 for (store::cluster_map_t::iterator iter = m_store.begin ();
5669 iter != m_store.end (); ++iter)
5670 {
5671 const region *base_reg = (*iter).first;
5672 if (const region *parent = base_reg->get_parent_region ())
5673 if (parent->get_kind () == RK_FRAME)
5674 reachable_regs.add (base_reg, false);
5675 }
5676
5677 bitmap_clear (out_ids);
5678 for (auto iter_reg : reachable_regs)
5679 bitmap_set_bit (out_ids, iter_reg->get_id ());
5680}
5681
808f4dfe 5682/* Return a new region describing a block of memory allocated within the
b9365b93
DM
5683 current frame.
5684 Use CTXT to complain about tainted sizes. */
757bf1df 5685
808f4dfe 5686const region *
b9365b93
DM
5687region_model::create_region_for_alloca (const svalue *size_in_bytes,
5688 region_model_context *ctxt)
757bf1df 5689{
808f4dfe 5690 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
ea4e3218 5691 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
b9365b93 5692 set_dynamic_extents (reg, size_in_bytes, ctxt);
808f4dfe 5693 return reg;
757bf1df
DM
5694}
5695
b9365b93
DM
5696/* Record that the size of REG is SIZE_IN_BYTES.
5697 Use CTXT to complain about tainted sizes. */
757bf1df
DM
5698
5699void
9a2c9579 5700region_model::set_dynamic_extents (const region *reg,
b9365b93
DM
5701 const svalue *size_in_bytes,
5702 region_model_context *ctxt)
9a2c9579
DM
5703{
5704 assert_compat_types (size_in_bytes->get_type (), size_type_node);
b9365b93 5705 if (ctxt)
c83e9731
TL
5706 {
5707 check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
5708 ctxt);
5709 check_dynamic_size_for_floats (size_in_bytes, ctxt);
5710 }
9a2c9579
DM
5711 m_dynamic_extents.put (reg, size_in_bytes);
5712}
5713
5714/* Get the recording of REG in bytes, or NULL if no dynamic size was
5715 recorded. */
5716
5717const svalue *
5718region_model::get_dynamic_extents (const region *reg) const
757bf1df 5719{
9a2c9579
DM
5720 if (const svalue * const *slot = m_dynamic_extents.get (reg))
5721 return *slot;
5722 return NULL;
5723}
5724
5725/* Unset any recorded dynamic size of REG. */
5726
5727void
5728region_model::unset_dynamic_extents (const region *reg)
5729{
5730 m_dynamic_extents.remove (reg);
757bf1df
DM
5731}
5732
c81b60b8
DM
5733/* Information of the layout of a RECORD_TYPE, capturing it as a vector
5734 of items, where each item is either a field or padding. */
5735
5736class record_layout
5737{
5738public:
5739 /* An item within a record; either a field, or padding after a field. */
5740 struct item
5741 {
5742 public:
5743 item (const bit_range &br,
5744 tree field,
5745 bool is_padding)
5746 : m_bit_range (br),
5747 m_field (field),
5748 m_is_padding (is_padding)
5749 {
5750 }
5751
5752 bit_offset_t get_start_bit_offset () const
5753 {
5754 return m_bit_range.get_start_bit_offset ();
5755 }
5756 bit_offset_t get_next_bit_offset () const
5757 {
5758 return m_bit_range.get_next_bit_offset ();
5759 }
5760
5761 bool contains_p (bit_offset_t offset) const
5762 {
5763 return m_bit_range.contains_p (offset);
5764 }
5765
5766 void dump_to_pp (pretty_printer *pp) const
5767 {
5768 if (m_is_padding)
5769 pp_printf (pp, "padding after %qD", m_field);
5770 else
5771 pp_printf (pp, "%qD", m_field);
5772 pp_string (pp, ", ");
5773 m_bit_range.dump_to_pp (pp);
5774 }
5775
5776 bit_range m_bit_range;
5777 tree m_field;
5778 bool m_is_padding;
5779 };
5780
5781 record_layout (tree record_type)
c81b60b8
DM
5782 {
5783 gcc_assert (TREE_CODE (record_type) == RECORD_TYPE);
5784
5785 for (tree iter = TYPE_FIELDS (record_type); iter != NULL_TREE;
5786 iter = DECL_CHAIN (iter))
5787 {
5788 if (TREE_CODE (iter) == FIELD_DECL)
5789 {
5790 int iter_field_offset = int_bit_position (iter);
5791 bit_size_t size_in_bits;
5792 if (!int_size_in_bits (TREE_TYPE (iter), &size_in_bits))
5793 size_in_bits = 0;
5794
5795 maybe_pad_to (iter_field_offset);
5796
5797 /* Add field. */
5798 m_items.safe_push (item (bit_range (iter_field_offset,
5799 size_in_bits),
5800 iter, false));
5801 }
5802 }
5803
5804 /* Add any trailing padding. */
5805 bit_size_t size_in_bits;
5806 if (int_size_in_bits (record_type, &size_in_bits))
5807 maybe_pad_to (size_in_bits);
5808 }
5809
5810 void dump_to_pp (pretty_printer *pp) const
5811 {
5812 unsigned i;
5813 item *it;
5814 FOR_EACH_VEC_ELT (m_items, i, it)
5815 {
5816 it->dump_to_pp (pp);
5817 pp_newline (pp);
5818 }
5819 }
5820
5821 DEBUG_FUNCTION void dump () const
5822 {
5823 pretty_printer pp;
5824 pp_format_decoder (&pp) = default_tree_printer;
5825 pp.buffer->stream = stderr;
5826 dump_to_pp (&pp);
5827 pp_flush (&pp);
5828 }
5829
5830 const record_layout::item *get_item_at (bit_offset_t offset) const
5831 {
5832 unsigned i;
5833 item *it;
5834 FOR_EACH_VEC_ELT (m_items, i, it)
5835 if (it->contains_p (offset))
5836 return it;
5837 return NULL;
5838 }
5839
5840private:
5841 /* Subroutine of ctor. Add padding item to NEXT_OFFSET if necessary. */
5842
5843 void maybe_pad_to (bit_offset_t next_offset)
5844 {
5845 if (m_items.length () > 0)
5846 {
5847 const item &last_item = m_items[m_items.length () - 1];
5848 bit_offset_t offset_after_last_item
5849 = last_item.get_next_bit_offset ();
5850 if (next_offset > offset_after_last_item)
5851 {
5852 bit_size_t padding_size
5853 = next_offset - offset_after_last_item;
5854 m_items.safe_push (item (bit_range (offset_after_last_item,
5855 padding_size),
5856 last_item.m_field, true));
5857 }
5858 }
5859 }
5860
c81b60b8
DM
5861 auto_vec<item> m_items;
5862};
5863
5864/* A subclass of pending_diagnostic for complaining about uninitialized data
5865 being copied across a trust boundary to an untrusted output
5866 (e.g. copy_to_user infoleaks in the Linux kernel). */
5867
5868class exposure_through_uninit_copy
5869 : public pending_diagnostic_subclass<exposure_through_uninit_copy>
5870{
5871public:
5872 exposure_through_uninit_copy (const region *src_region,
5873 const region *dest_region,
ffaeb9dc 5874 const svalue *copied_sval)
c81b60b8
DM
5875 : m_src_region (src_region),
5876 m_dest_region (dest_region),
ffaeb9dc 5877 m_copied_sval (copied_sval)
c81b60b8
DM
5878 {
5879 gcc_assert (m_copied_sval->get_kind () == SK_POISONED
5880 || m_copied_sval->get_kind () == SK_COMPOUND);
5881 }
5882
5883 const char *get_kind () const final override
5884 {
5885 return "exposure_through_uninit_copy";
5886 }
5887
5888 bool operator== (const exposure_through_uninit_copy &other) const
5889 {
5890 return (m_src_region == other.m_src_region
5891 && m_dest_region == other.m_dest_region
5892 && m_copied_sval == other.m_copied_sval);
5893 }
5894
5895 int get_controlling_option () const final override
5896 {
5897 return OPT_Wanalyzer_exposure_through_uninit_copy;
5898 }
5899
0e466e97 5900 bool emit (rich_location *rich_loc, logger *) final override
c81b60b8
DM
5901 {
5902 diagnostic_metadata m;
5903 /* CWE-200: Exposure of Sensitive Information to an Unauthorized Actor. */
5904 m.add_cwe (200);
5905 enum memory_space mem_space = get_src_memory_space ();
5906 bool warned;
5907 switch (mem_space)
5908 {
5909 default:
5910 warned = warning_meta
5911 (rich_loc, m, get_controlling_option (),
5912 "potential exposure of sensitive information"
5913 " by copying uninitialized data across trust boundary");
5914 break;
5915 case MEMSPACE_STACK:
5916 warned = warning_meta
5917 (rich_loc, m, get_controlling_option (),
5918 "potential exposure of sensitive information"
5919 " by copying uninitialized data from stack across trust boundary");
5920 break;
5921 case MEMSPACE_HEAP:
5922 warned = warning_meta
5923 (rich_loc, m, get_controlling_option (),
5924 "potential exposure of sensitive information"
5925 " by copying uninitialized data from heap across trust boundary");
5926 break;
5927 }
5928 if (warned)
5929 {
5930 location_t loc = rich_loc->get_loc ();
5931 inform_number_of_uninit_bits (loc);
5932 complain_about_uninit_ranges (loc);
5933
5934 if (mem_space == MEMSPACE_STACK)
5935 maybe_emit_fixit_hint ();
5936 }
5937 return warned;
5938 }
5939
5940 label_text describe_final_event (const evdesc::final_event &) final override
5941 {
5942 enum memory_space mem_space = get_src_memory_space ();
5943 switch (mem_space)
5944 {
5945 default:
5946 return label_text::borrow ("uninitialized data copied here");
5947
5948 case MEMSPACE_STACK:
5949 return label_text::borrow ("uninitialized data copied from stack here");
5950
5951 case MEMSPACE_HEAP:
5952 return label_text::borrow ("uninitialized data copied from heap here");
5953 }
5954 }
5955
5956 void mark_interesting_stuff (interesting_t *interest) final override
5957 {
5958 if (m_src_region)
5959 interest->add_region_creation (m_src_region);
5960 }
5961
5962private:
5963 enum memory_space get_src_memory_space () const
5964 {
5965 return m_src_region ? m_src_region->get_memory_space () : MEMSPACE_UNKNOWN;
5966 }
5967
5968 bit_size_t calc_num_uninit_bits () const
5969 {
5970 switch (m_copied_sval->get_kind ())
5971 {
5972 default:
5973 gcc_unreachable ();
5974 break;
5975 case SK_POISONED:
5976 {
5977 const poisoned_svalue *poisoned_sval
5978 = as_a <const poisoned_svalue *> (m_copied_sval);
5979 gcc_assert (poisoned_sval->get_poison_kind () == POISON_KIND_UNINIT);
5980
5981 /* Give up if don't have type information. */
5982 if (m_copied_sval->get_type () == NULL_TREE)
5983 return 0;
5984
5985 bit_size_t size_in_bits;
5986 if (int_size_in_bits (m_copied_sval->get_type (), &size_in_bits))
5987 return size_in_bits;
5988
5989 /* Give up if we can't get the size of the type. */
5990 return 0;
5991 }
5992 break;
5993 case SK_COMPOUND:
5994 {
5995 const compound_svalue *compound_sval
5996 = as_a <const compound_svalue *> (m_copied_sval);
5997 bit_size_t result = 0;
5998 /* Find keys for uninit svals. */
5999 for (auto iter : *compound_sval)
6000 {
6001 const svalue *sval = iter.second;
6002 if (const poisoned_svalue *psval
6003 = sval->dyn_cast_poisoned_svalue ())
6004 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
6005 {
6006 const binding_key *key = iter.first;
6007 const concrete_binding *ckey
6008 = key->dyn_cast_concrete_binding ();
6009 gcc_assert (ckey);
6010 result += ckey->get_size_in_bits ();
6011 }
6012 }
6013 return result;
6014 }
6015 }
6016 }
6017
6018 void inform_number_of_uninit_bits (location_t loc) const
6019 {
6020 bit_size_t num_uninit_bits = calc_num_uninit_bits ();
6021 if (num_uninit_bits <= 0)
6022 return;
6023 if (num_uninit_bits % BITS_PER_UNIT == 0)
6024 {
6025 /* Express in bytes. */
6026 byte_size_t num_uninit_bytes = num_uninit_bits / BITS_PER_UNIT;
6027 if (num_uninit_bytes == 1)
6028 inform (loc, "1 byte is uninitialized");
6029 else
6030 inform (loc,
6031 "%wu bytes are uninitialized", num_uninit_bytes.to_uhwi ());
6032 }
6033 else
6034 {
6035 /* Express in bits. */
6036 if (num_uninit_bits == 1)
6037 inform (loc, "1 bit is uninitialized");
6038 else
6039 inform (loc,
6040 "%wu bits are uninitialized", num_uninit_bits.to_uhwi ());
6041 }
6042 }
6043
6044 void complain_about_uninit_ranges (location_t loc) const
6045 {
6046 if (const compound_svalue *compound_sval
6047 = m_copied_sval->dyn_cast_compound_svalue ())
6048 {
6049 /* Find keys for uninit svals. */
6050 auto_vec<const concrete_binding *> uninit_keys;
6051 for (auto iter : *compound_sval)
6052 {
6053 const svalue *sval = iter.second;
6054 if (const poisoned_svalue *psval
6055 = sval->dyn_cast_poisoned_svalue ())
6056 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
6057 {
6058 const binding_key *key = iter.first;
6059 const concrete_binding *ckey
6060 = key->dyn_cast_concrete_binding ();
6061 gcc_assert (ckey);
6062 uninit_keys.safe_push (ckey);
6063 }
6064 }
6065 /* Complain about them in sorted order. */
6066 uninit_keys.qsort (concrete_binding::cmp_ptr_ptr);
6067
6068 std::unique_ptr<record_layout> layout;
6069
6070 tree type = m_copied_sval->get_type ();
6071 if (type && TREE_CODE (type) == RECORD_TYPE)
6072 {
6073 // (std::make_unique is C++14)
6074 layout = std::unique_ptr<record_layout> (new record_layout (type));
6075
6076 if (0)
6077 layout->dump ();
6078 }
6079
6080 unsigned i;
6081 const concrete_binding *ckey;
6082 FOR_EACH_VEC_ELT (uninit_keys, i, ckey)
6083 {
6084 bit_offset_t start_bit = ckey->get_start_bit_offset ();
6085 bit_offset_t next_bit = ckey->get_next_bit_offset ();
6086 complain_about_uninit_range (loc, start_bit, next_bit,
6087 layout.get ());
6088 }
6089 }
6090 }
6091
6092 void complain_about_uninit_range (location_t loc,
6093 bit_offset_t start_bit,
6094 bit_offset_t next_bit,
6095 const record_layout *layout) const
6096 {
6097 if (layout)
6098 {
6099 while (start_bit < next_bit)
6100 {
6101 if (const record_layout::item *item
6102 = layout->get_item_at (start_bit))
6103 {
6104 gcc_assert (start_bit >= item->get_start_bit_offset ());
6105 gcc_assert (start_bit < item->get_next_bit_offset ());
6106 if (item->get_start_bit_offset () == start_bit
6107 && item->get_next_bit_offset () <= next_bit)
6108 complain_about_fully_uninit_item (*item);
6109 else
6110 complain_about_partially_uninit_item (*item);
6111 start_bit = item->get_next_bit_offset ();
6112 continue;
6113 }
6114 else
6115 break;
6116 }
6117 }
6118
6119 if (start_bit >= next_bit)
6120 return;
6121
6122 if (start_bit % 8 == 0 && next_bit % 8 == 0)
6123 {
6124 /* Express in bytes. */
6125 byte_offset_t start_byte = start_bit / 8;
6126 byte_offset_t last_byte = (next_bit / 8) - 1;
6127 if (last_byte == start_byte)
6128 inform (loc,
6129 "byte %wu is uninitialized",
6130 start_byte.to_uhwi ());
6131 else
6132 inform (loc,
6133 "bytes %wu - %wu are uninitialized",
6134 start_byte.to_uhwi (),
6135 last_byte.to_uhwi ());
6136 }
6137 else
6138 {
6139 /* Express in bits. */
6140 bit_offset_t last_bit = next_bit - 1;
6141 if (last_bit == start_bit)
6142 inform (loc,
6143 "bit %wu is uninitialized",
6144 start_bit.to_uhwi ());
6145 else
6146 inform (loc,
6147 "bits %wu - %wu are uninitialized",
6148 start_bit.to_uhwi (),
6149 last_bit.to_uhwi ());
6150 }
6151 }
6152
6153 static void
6154 complain_about_fully_uninit_item (const record_layout::item &item)
6155 {
6156 tree field = item.m_field;
6157 bit_size_t num_bits = item.m_bit_range.m_size_in_bits;
6158 if (item.m_is_padding)
6159 {
6160 if (num_bits % 8 == 0)
6161 {
6162 /* Express in bytes. */
6163 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
6164 if (num_bytes == 1)
6165 inform (DECL_SOURCE_LOCATION (field),
6166 "padding after field %qD is uninitialized (1 byte)",
6167 field);
6168 else
6169 inform (DECL_SOURCE_LOCATION (field),
6170 "padding after field %qD is uninitialized (%wu bytes)",
6171 field, num_bytes.to_uhwi ());
6172 }
6173 else
6174 {
6175 /* Express in bits. */
6176 if (num_bits == 1)
6177 inform (DECL_SOURCE_LOCATION (field),
6178 "padding after field %qD is uninitialized (1 bit)",
6179 field);
6180 else
6181 inform (DECL_SOURCE_LOCATION (field),
6182 "padding after field %qD is uninitialized (%wu bits)",
6183 field, num_bits.to_uhwi ());
6184 }
6185 }
6186 else
6187 {
6188 if (num_bits % 8 == 0)
6189 {
6190 /* Express in bytes. */
6191 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
6192 if (num_bytes == 1)
6193 inform (DECL_SOURCE_LOCATION (field),
6194 "field %qD is uninitialized (1 byte)", field);
6195 else
6196 inform (DECL_SOURCE_LOCATION (field),
6197 "field %qD is uninitialized (%wu bytes)",
6198 field, num_bytes.to_uhwi ());
6199 }
6200 else
6201 {
6202 /* Express in bits. */
6203 if (num_bits == 1)
6204 inform (DECL_SOURCE_LOCATION (field),
6205 "field %qD is uninitialized (1 bit)", field);
6206 else
6207 inform (DECL_SOURCE_LOCATION (field),
6208 "field %qD is uninitialized (%wu bits)",
6209 field, num_bits.to_uhwi ());
6210 }
6211 }
6212 }
6213
6214 static void
6215 complain_about_partially_uninit_item (const record_layout::item &item)
6216 {
6217 tree field = item.m_field;
6218 if (item.m_is_padding)
6219 inform (DECL_SOURCE_LOCATION (field),
6220 "padding after field %qD is partially uninitialized",
6221 field);
6222 else
6223 inform (DECL_SOURCE_LOCATION (field),
6224 "field %qD is partially uninitialized",
6225 field);
6226 /* TODO: ideally we'd describe what parts are uninitialized. */
6227 }
6228
6229 void maybe_emit_fixit_hint () const
6230 {
6231 if (tree decl = m_src_region->maybe_get_decl ())
6232 {
6233 gcc_rich_location hint_richloc (DECL_SOURCE_LOCATION (decl));
6234 hint_richloc.add_fixit_insert_after (" = {0}");
6235 inform (&hint_richloc,
6236 "suggest forcing zero-initialization by"
6237 " providing a %<{0}%> initializer");
6238 }
6239 }
6240
6241private:
6242 const region *m_src_region;
6243 const region *m_dest_region;
6244 const svalue *m_copied_sval;
c81b60b8
DM
6245};
6246
6247/* Return true if any part of SVAL is uninitialized. */
6248
6249static bool
6250contains_uninit_p (const svalue *sval)
6251{
6252 struct uninit_finder : public visitor
6253 {
6254 public:
6255 uninit_finder () : m_found_uninit (false) {}
6256 void visit_poisoned_svalue (const poisoned_svalue *sval)
6257 {
6258 if (sval->get_poison_kind () == POISON_KIND_UNINIT)
6259 m_found_uninit = true;
6260 }
6261 bool m_found_uninit;
6262 };
6263
6264 uninit_finder v;
6265 sval->accept (&v);
6266
6267 return v.m_found_uninit;
6268}
6269
6270/* Function for use by plugins when simulating writing data through a
6271 pointer to an "untrusted" region DST_REG (and thus crossing a security
6272 boundary), such as copying data to user space in an OS kernel.
6273
6274 Check that COPIED_SVAL is fully initialized. If not, complain about
6275 an infoleak to CTXT.
6276
6277 SRC_REG can be NULL; if non-NULL it is used as a hint in the diagnostic
6278 as to where COPIED_SVAL came from. */
6279
6280void
6281region_model::maybe_complain_about_infoleak (const region *dst_reg,
6282 const svalue *copied_sval,
6283 const region *src_reg,
6284 region_model_context *ctxt)
6285{
6286 /* Check for exposure. */
6287 if (contains_uninit_p (copied_sval))
6341f14e
DM
6288 ctxt->warn (make_unique<exposure_through_uninit_copy> (src_reg,
6289 dst_reg,
6290 copied_sval));
c81b60b8
DM
6291}
6292
3d2d04cd
DM
6293/* Set errno to a positive symbolic int, as if some error has occurred. */
6294
6295void
6296region_model::set_errno (const call_details &cd)
6297{
6298 const region *errno_reg = m_mgr->get_errno_region ();
6299 conjured_purge p (this, cd.get_ctxt ());
6300 const svalue *new_errno_sval
6301 = m_mgr->get_or_create_conjured_svalue (integer_type_node,
6302 cd.get_call_stmt (),
6303 errno_reg, p);
6304 const svalue *zero
6305 = m_mgr->get_or_create_int_cst (integer_type_node, 0);
6306 add_constraint (new_errno_sval, GT_EXPR, zero, cd.get_ctxt ());
6307 set_value (errno_reg, new_errno_sval, cd.get_ctxt ());
6308}
6309
eafa9d96
DM
6310/* class noop_region_model_context : public region_model_context. */
6311
c65d3c7f 6312void
6341f14e 6313noop_region_model_context::add_note (std::unique_ptr<pending_note>)
c65d3c7f 6314{
c65d3c7f
DM
6315}
6316
2503dd59
DM
6317void
6318noop_region_model_context::add_event (std::unique_ptr<checker_event>)
6319{
6320}
6321
eafa9d96 6322void
accece8c 6323noop_region_model_context::bifurcate (std::unique_ptr<custom_edge_info>)
eafa9d96 6324{
eafa9d96
DM
6325}
6326
6327void
6328noop_region_model_context::terminate_path ()
6329{
6330}
6331
2503dd59
DM
6332/* class region_model_context_decorator : public region_model_context. */
6333
6334void
6335region_model_context_decorator::add_event (std::unique_ptr<checker_event> event)
6336{
1e7b0a5d
DM
6337 if (m_inner)
6338 m_inner->add_event (std::move (event));
2503dd59
DM
6339}
6340
808f4dfe 6341/* struct model_merger. */
757bf1df 6342
808f4dfe 6343/* Dump a multiline representation of this merger to PP. */
757bf1df
DM
6344
6345void
808f4dfe 6346model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
757bf1df 6347{
808f4dfe
DM
6348 pp_string (pp, "model A:");
6349 pp_newline (pp);
6350 m_model_a->dump_to_pp (pp, simple, true);
6351 pp_newline (pp);
757bf1df 6352
808f4dfe 6353 pp_string (pp, "model B:");
757bf1df 6354 pp_newline (pp);
808f4dfe 6355 m_model_b->dump_to_pp (pp, simple, true);
757bf1df
DM
6356 pp_newline (pp);
6357
808f4dfe 6358 pp_string (pp, "merged model:");
757bf1df 6359 pp_newline (pp);
808f4dfe 6360 m_merged_model->dump_to_pp (pp, simple, true);
757bf1df
DM
6361 pp_newline (pp);
6362}
6363
808f4dfe 6364/* Dump a multiline representation of this merger to FILE. */
757bf1df
DM
6365
6366void
808f4dfe 6367model_merger::dump (FILE *fp, bool simple) const
757bf1df
DM
6368{
6369 pretty_printer pp;
6370 pp_format_decoder (&pp) = default_tree_printer;
6371 pp_show_color (&pp) = pp_show_color (global_dc->printer);
6372 pp.buffer->stream = fp;
808f4dfe 6373 dump_to_pp (&pp, simple);
757bf1df
DM
6374 pp_flush (&pp);
6375}
6376
808f4dfe 6377/* Dump a multiline representation of this merger to stderr. */
757bf1df
DM
6378
6379DEBUG_FUNCTION void
808f4dfe 6380model_merger::dump (bool simple) const
757bf1df 6381{
808f4dfe 6382 dump (stderr, simple);
757bf1df
DM
6383}
6384
f573d351
DM
6385/* Return true if it's OK to merge SVAL with other svalues. */
6386
6387bool
6388model_merger::mergeable_svalue_p (const svalue *sval) const
6389{
6390 if (m_ext_state)
6391 {
6392 /* Reject merging svalues that have non-purgable sm-state,
6393 to avoid falsely reporting memory leaks by merging them
6394 with something else. For example, given a local var "p",
6395 reject the merger of a:
6396 store_a mapping "p" to a malloc-ed ptr
6397 with:
6398 store_b mapping "p" to a NULL ptr. */
6399 if (m_state_a)
6400 if (!m_state_a->can_purge_p (*m_ext_state, sval))
6401 return false;
6402 if (m_state_b)
6403 if (!m_state_b->can_purge_p (*m_ext_state, sval))
6404 return false;
6405 }
6406 return true;
6407}
6408
75038aa6
DM
6409} // namespace ana
6410
808f4dfe 6411/* Dump RMODEL fully to stderr (i.e. without summarization). */
757bf1df 6412
808f4dfe
DM
6413DEBUG_FUNCTION void
6414debug (const region_model &rmodel)
757bf1df 6415{
808f4dfe 6416 rmodel.dump (false);
757bf1df
DM
6417}
6418
8ca7fa84 6419/* class rejected_op_constraint : public rejected_constraint. */
84fb3546
DM
6420
6421void
8ca7fa84 6422rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
84fb3546
DM
6423{
6424 region_model m (m_model);
6425 const svalue *lhs_sval = m.get_rvalue (m_lhs, NULL);
6426 const svalue *rhs_sval = m.get_rvalue (m_rhs, NULL);
6427 lhs_sval->dump_to_pp (pp, true);
6428 pp_printf (pp, " %s ", op_symbol_code (m_op));
6429 rhs_sval->dump_to_pp (pp, true);
6430}
6431
ccd4df81
DM
6432/* class rejected_default_case : public rejected_constraint. */
6433
6434void
6435rejected_default_case::dump_to_pp (pretty_printer *pp) const
6436{
6437 pp_string (pp, "implicit default for enum");
6438}
6439
8ca7fa84
DM
6440/* class rejected_ranges_constraint : public rejected_constraint. */
6441
6442void
6443rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
6444{
6445 region_model m (m_model);
6446 const svalue *sval = m.get_rvalue (m_expr, NULL);
6447 sval->dump_to_pp (pp, true);
6448 pp_string (pp, " in ");
6449 m_ranges->dump_to_pp (pp, true);
6450}
6451
808f4dfe 6452/* class engine. */
757bf1df 6453
11a2ff8d
DM
6454/* engine's ctor. */
6455
4cebae09
DM
6456engine::engine (const supergraph *sg, logger *logger)
6457: m_sg (sg), m_mgr (logger)
11a2ff8d
DM
6458{
6459}
6460
808f4dfe 6461/* Dump the managed objects by class to LOGGER, and the per-class totals. */
757bf1df 6462
808f4dfe
DM
6463void
6464engine::log_stats (logger *logger) const
757bf1df 6465{
808f4dfe 6466 m_mgr.log_stats (logger, true);
757bf1df
DM
6467}
6468
75038aa6
DM
6469namespace ana {
6470
757bf1df
DM
6471#if CHECKING_P
6472
6473namespace selftest {
6474
8c08c983
DM
6475/* Build a constant tree of the given type from STR. */
6476
6477static tree
6478build_real_cst_from_string (tree type, const char *str)
6479{
6480 REAL_VALUE_TYPE real;
6481 real_from_string (&real, str);
6482 return build_real (type, real);
6483}
6484
6485/* Append various "interesting" constants to OUT (e.g. NaN). */
6486
6487static void
6488append_interesting_constants (auto_vec<tree> *out)
6489{
6490 out->safe_push (build_int_cst (integer_type_node, 0));
6491 out->safe_push (build_int_cst (integer_type_node, 42));
6492 out->safe_push (build_int_cst (unsigned_type_node, 0));
6493 out->safe_push (build_int_cst (unsigned_type_node, 42));
6494 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
6495 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
6496 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
6497 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
6498 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
6499 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
6500 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
6501 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
6502}
6503
6504/* Verify that tree_cmp is a well-behaved comparator for qsort, even
6505 if the underlying constants aren't comparable. */
6506
6507static void
6508test_tree_cmp_on_constants ()
6509{
6510 auto_vec<tree> csts;
6511 append_interesting_constants (&csts);
6512
6513 /* Try sorting every triple. */
6514 const unsigned num = csts.length ();
6515 for (unsigned i = 0; i < num; i++)
6516 for (unsigned j = 0; j < num; j++)
6517 for (unsigned k = 0; k < num; k++)
6518 {
6519 auto_vec<tree> v (3);
6520 v.quick_push (csts[i]);
6521 v.quick_push (csts[j]);
6522 v.quick_push (csts[k]);
6523 v.qsort (tree_cmp);
6524 }
6525}
6526
757bf1df
DM
6527/* Implementation detail of the ASSERT_CONDITION_* macros. */
6528
808f4dfe
DM
6529void
6530assert_condition (const location &loc,
6531 region_model &model,
6532 const svalue *lhs, tree_code op, const svalue *rhs,
6533 tristate expected)
6534{
6535 tristate actual = model.eval_condition (lhs, op, rhs);
6536 ASSERT_EQ_AT (loc, actual, expected);
6537}
6538
6539/* Implementation detail of the ASSERT_CONDITION_* macros. */
6540
757bf1df
DM
6541void
6542assert_condition (const location &loc,
6543 region_model &model,
6544 tree lhs, tree_code op, tree rhs,
6545 tristate expected)
6546{
6547 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
6548 ASSERT_EQ_AT (loc, actual, expected);
6549}
6550
90f7c300
DM
6551/* Implementation detail of ASSERT_DUMP_TREE_EQ. */
6552
6553static void
6554assert_dump_tree_eq (const location &loc, tree t, const char *expected)
6555{
6556 auto_fix_quotes sentinel;
6557 pretty_printer pp;
6558 pp_format_decoder (&pp) = default_tree_printer;
6559 dump_tree (&pp, t);
6560 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
6561}
6562
6563/* Assert that dump_tree (T) is EXPECTED. */
6564
6565#define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
6566 SELFTEST_BEGIN_STMT \
6567 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
6568 SELFTEST_END_STMT
6569
757bf1df
DM
6570/* Implementation detail of ASSERT_DUMP_EQ. */
6571
6572static void
6573assert_dump_eq (const location &loc,
6574 const region_model &model,
6575 bool summarize,
6576 const char *expected)
6577{
6578 auto_fix_quotes sentinel;
6579 pretty_printer pp;
6580 pp_format_decoder (&pp) = default_tree_printer;
808f4dfe
DM
6581
6582 model.dump_to_pp (&pp, summarize, true);
757bf1df
DM
6583 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
6584}
6585
6586/* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
6587
6588#define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
6589 SELFTEST_BEGIN_STMT \
6590 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
6591 SELFTEST_END_STMT
6592
6593/* Smoketest for region_model::dump_to_pp. */
6594
6595static void
6596test_dump ()
6597{
808f4dfe
DM
6598 region_model_manager mgr;
6599 region_model model (&mgr);
757bf1df
DM
6600
6601 ASSERT_DUMP_EQ (model, false,
808f4dfe
DM
6602 "stack depth: 0\n"
6603 "m_called_unknown_fn: FALSE\n"
6604 "constraint_manager:\n"
6605 " equiv classes:\n"
6606 " constraints:\n");
6607 ASSERT_DUMP_EQ (model, true,
6608 "stack depth: 0\n"
6609 "m_called_unknown_fn: FALSE\n"
6610 "constraint_manager:\n"
757bf1df
DM
6611 " equiv classes:\n"
6612 " constraints:\n");
757bf1df
DM
6613}
6614
884d9141
DM
6615/* Helper function for selftests. Create a struct or union type named NAME,
6616 with the fields given by the FIELD_DECLS in FIELDS.
6617 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
6618 create a UNION_TYPE. */
6619
6620static tree
6621make_test_compound_type (const char *name, bool is_struct,
6622 const auto_vec<tree> *fields)
6623{
6624 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
6625 TYPE_NAME (t) = get_identifier (name);
6626 TYPE_SIZE (t) = 0;
6627
6628 tree fieldlist = NULL;
6629 int i;
6630 tree field;
6631 FOR_EACH_VEC_ELT (*fields, i, field)
6632 {
6633 gcc_assert (TREE_CODE (field) == FIELD_DECL);
6634 DECL_CONTEXT (field) = t;
6635 fieldlist = chainon (field, fieldlist);
6636 }
6637 fieldlist = nreverse (fieldlist);
6638 TYPE_FIELDS (t) = fieldlist;
6639
6640 layout_type (t);
6641 return t;
6642}
6643
a96f1c38
DM
6644/* Selftest fixture for creating the type "struct coord {int x; int y; };". */
6645
6646struct coord_test
6647{
6648 coord_test ()
6649 {
6650 auto_vec<tree> fields;
6651 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
6652 get_identifier ("x"), integer_type_node);
6653 fields.safe_push (m_x_field);
6654 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
6655 get_identifier ("y"), integer_type_node);
6656 fields.safe_push (m_y_field);
6657 m_coord_type = make_test_compound_type ("coord", true, &fields);
6658 }
6659
6660 tree m_x_field;
6661 tree m_y_field;
6662 tree m_coord_type;
6663};
6664
808f4dfe 6665/* Verify usage of a struct. */
884d9141
DM
6666
6667static void
808f4dfe 6668test_struct ()
884d9141 6669{
a96f1c38
DM
6670 coord_test ct;
6671
6672 tree c = build_global_decl ("c", ct.m_coord_type);
6673 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6674 c, ct.m_x_field, NULL_TREE);
6675 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6676 c, ct.m_y_field, NULL_TREE);
884d9141
DM
6677
6678 tree int_17 = build_int_cst (integer_type_node, 17);
6679 tree int_m3 = build_int_cst (integer_type_node, -3);
6680
808f4dfe
DM
6681 region_model_manager mgr;
6682 region_model model (&mgr);
884d9141
DM
6683 model.set_value (c_x, int_17, NULL);
6684 model.set_value (c_y, int_m3, NULL);
6685
808f4dfe
DM
6686 /* Verify get_offset for "c.x". */
6687 {
6688 const region *c_x_reg = model.get_lvalue (c_x, NULL);
7a6564c9 6689 region_offset offset = c_x_reg->get_offset (&mgr);
808f4dfe
DM
6690 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
6691 ASSERT_EQ (offset.get_bit_offset (), 0);
6692 }
6693
6694 /* Verify get_offset for "c.y". */
6695 {
6696 const region *c_y_reg = model.get_lvalue (c_y, NULL);
7a6564c9 6697 region_offset offset = c_y_reg->get_offset (&mgr);
808f4dfe
DM
6698 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
6699 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
6700 }
884d9141
DM
6701}
6702
808f4dfe 6703/* Verify usage of an array element. */
884d9141
DM
6704
6705static void
808f4dfe 6706test_array_1 ()
884d9141
DM
6707{
6708 tree tlen = size_int (10);
6709 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
6710
6711 tree a = build_global_decl ("a", arr_type);
6712
808f4dfe
DM
6713 region_model_manager mgr;
6714 region_model model (&mgr);
884d9141
DM
6715 tree int_0 = build_int_cst (integer_type_node, 0);
6716 tree a_0 = build4 (ARRAY_REF, char_type_node,
6717 a, int_0, NULL_TREE, NULL_TREE);
6718 tree char_A = build_int_cst (char_type_node, 'A');
6719 model.set_value (a_0, char_A, NULL);
884d9141
DM
6720}
6721
90f7c300
DM
6722/* Verify that region_model::get_representative_tree works as expected. */
6723
6724static void
6725test_get_representative_tree ()
6726{
808f4dfe
DM
6727 region_model_manager mgr;
6728
90f7c300
DM
6729 /* STRING_CST. */
6730 {
6731 tree string_cst = build_string (4, "foo");
808f4dfe
DM
6732 region_model m (&mgr);
6733 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
6734 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
6735 ASSERT_EQ (rep, string_cst);
6736 }
6737
6738 /* String literal. */
6739 {
6740 tree string_cst_ptr = build_string_literal (4, "foo");
808f4dfe
DM
6741 region_model m (&mgr);
6742 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
6743 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
6744 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
6745 }
808f4dfe
DM
6746
6747 /* Value of an element within an array. */
6748 {
6749 tree tlen = size_int (10);
6750 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
6751 tree a = build_global_decl ("a", arr_type);
9d804f9b
DM
6752 placeholder_svalue test_sval (mgr.alloc_symbol_id (),
6753 char_type_node, "test value");
808f4dfe
DM
6754
6755 /* Value of a[3]. */
6756 {
6757 test_region_model_context ctxt;
6758 region_model model (&mgr);
6759 tree int_3 = build_int_cst (integer_type_node, 3);
6760 tree a_3 = build4 (ARRAY_REF, char_type_node,
6761 a, int_3, NULL_TREE, NULL_TREE);
6762 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
6763 model.set_value (a_3_reg, &test_sval, &ctxt);
6764 tree rep = model.get_representative_tree (&test_sval);
6765 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
6766 }
6767
6768 /* Value of a[0]. */
6769 {
6770 test_region_model_context ctxt;
6771 region_model model (&mgr);
6772 tree idx = build_int_cst (integer_type_node, 0);
6773 tree a_0 = build4 (ARRAY_REF, char_type_node,
6774 a, idx, NULL_TREE, NULL_TREE);
6775 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
6776 model.set_value (a_0_reg, &test_sval, &ctxt);
6777 tree rep = model.get_representative_tree (&test_sval);
6778 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
6779 }
6780 }
6781
6782 /* Value of a field within a struct. */
6783 {
6784 coord_test ct;
6785
6786 tree c = build_global_decl ("c", ct.m_coord_type);
6787 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6788 c, ct.m_x_field, NULL_TREE);
6789 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6790 c, ct.m_y_field, NULL_TREE);
6791
6792 test_region_model_context ctxt;
6793
6794 /* Value of initial field. */
6795 {
6796 region_model m (&mgr);
6797 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
9d804f9b
DM
6798 placeholder_svalue test_sval_x (mgr.alloc_symbol_id (),
6799 integer_type_node, "test x val");
808f4dfe
DM
6800 m.set_value (c_x_reg, &test_sval_x, &ctxt);
6801 tree rep = m.get_representative_tree (&test_sval_x);
6802 ASSERT_DUMP_TREE_EQ (rep, "c.x");
6803 }
6804
6805 /* Value of non-initial field. */
6806 {
6807 region_model m (&mgr);
6808 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
9d804f9b
DM
6809 placeholder_svalue test_sval_y (mgr.alloc_symbol_id (),
6810 integer_type_node, "test y val");
808f4dfe
DM
6811 m.set_value (c_y_reg, &test_sval_y, &ctxt);
6812 tree rep = m.get_representative_tree (&test_sval_y);
6813 ASSERT_DUMP_TREE_EQ (rep, "c.y");
6814 }
6815 }
90f7c300
DM
6816}
6817
757bf1df 6818/* Verify that calling region_model::get_rvalue repeatedly on the same
808f4dfe 6819 tree constant retrieves the same svalue *. */
757bf1df
DM
6820
6821static void
6822test_unique_constants ()
6823{
6824 tree int_0 = build_int_cst (integer_type_node, 0);
6825 tree int_42 = build_int_cst (integer_type_node, 42);
6826
6827 test_region_model_context ctxt;
808f4dfe
DM
6828 region_model_manager mgr;
6829 region_model model (&mgr);
757bf1df
DM
6830 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
6831 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
6832 model.get_rvalue (int_42, &ctxt));
6833 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
6834 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
757bf1df 6835
808f4dfe
DM
6836 /* A "(const int)42" will be a different tree from "(int)42)"... */
6837 tree const_int_type_node
6838 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
6839 tree const_int_42 = build_int_cst (const_int_type_node, 42);
6840 ASSERT_NE (int_42, const_int_42);
6841 /* It should have a different const_svalue. */
6842 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
6843 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
6844 ASSERT_NE (int_42_sval, const_int_42_sval);
6845 /* But they should compare as equal. */
6846 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
6847 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
757bf1df
DM
6848}
6849
808f4dfe
DM
6850/* Verify that each type gets its own singleton unknown_svalue within a
6851 region_model_manager, and that NULL_TREE gets its own singleton. */
757bf1df
DM
6852
6853static void
808f4dfe 6854test_unique_unknowns ()
757bf1df 6855{
808f4dfe
DM
6856 region_model_manager mgr;
6857 const svalue *unknown_int
6858 = mgr.get_or_create_unknown_svalue (integer_type_node);
6859 /* Repeated calls with the same type should get the same "unknown"
6860 svalue. */
6861 const svalue *unknown_int_2
6862 = mgr.get_or_create_unknown_svalue (integer_type_node);
6863 ASSERT_EQ (unknown_int, unknown_int_2);
757bf1df 6864
808f4dfe
DM
6865 /* Different types (or the NULL type) should have different
6866 unknown_svalues. */
6867 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
6868 ASSERT_NE (unknown_NULL_type, unknown_int);
757bf1df 6869
808f4dfe
DM
6870 /* Repeated calls with NULL for the type should get the same "unknown"
6871 svalue. */
6872 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
6873 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
757bf1df
DM
6874}
6875
808f4dfe 6876/* Verify that initial_svalue are handled as expected. */
757bf1df 6877
808f4dfe
DM
6878static void
6879test_initial_svalue_folding ()
757bf1df 6880{
808f4dfe
DM
6881 region_model_manager mgr;
6882 tree x = build_global_decl ("x", integer_type_node);
6883 tree y = build_global_decl ("y", integer_type_node);
757bf1df 6884
808f4dfe
DM
6885 test_region_model_context ctxt;
6886 region_model model (&mgr);
6887 const svalue *x_init = model.get_rvalue (x, &ctxt);
6888 const svalue *y_init = model.get_rvalue (y, &ctxt);
6889 ASSERT_NE (x_init, y_init);
6890 const region *x_reg = model.get_lvalue (x, &ctxt);
6891 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
757bf1df 6892
808f4dfe 6893}
757bf1df 6894
808f4dfe 6895/* Verify that unary ops are folded as expected. */
757bf1df
DM
6896
6897static void
808f4dfe 6898test_unaryop_svalue_folding ()
757bf1df 6899{
808f4dfe 6900 region_model_manager mgr;
757bf1df
DM
6901 tree x = build_global_decl ("x", integer_type_node);
6902 tree y = build_global_decl ("y", integer_type_node);
6903
808f4dfe
DM
6904 test_region_model_context ctxt;
6905 region_model model (&mgr);
6906 const svalue *x_init = model.get_rvalue (x, &ctxt);
6907 const svalue *y_init = model.get_rvalue (y, &ctxt);
6908 const region *x_reg = model.get_lvalue (x, &ctxt);
6909 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
6910
6911 /* "(int)x" -> "x". */
6912 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
6913
6914 /* "(void *)x" -> something other than "x". */
6915 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
6916
6917 /* "!(x == y)" -> "x != y". */
6918 ASSERT_EQ (mgr.get_or_create_unaryop
6919 (boolean_type_node, TRUTH_NOT_EXPR,
6920 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
6921 x_init, y_init)),
6922 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
6923 x_init, y_init));
6924 /* "!(x > y)" -> "x <= y". */
6925 ASSERT_EQ (mgr.get_or_create_unaryop
6926 (boolean_type_node, TRUTH_NOT_EXPR,
6927 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
6928 x_init, y_init)),
6929 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
6930 x_init, y_init));
6931}
6932
6933/* Verify that binops on constant svalues are folded. */
757bf1df 6934
808f4dfe
DM
6935static void
6936test_binop_svalue_folding ()
6937{
6938#define NUM_CSTS 10
6939 tree cst_int[NUM_CSTS];
6940 region_model_manager mgr;
6941 const svalue *cst_sval[NUM_CSTS];
6942 for (int i = 0; i < NUM_CSTS; i++)
6943 {
6944 cst_int[i] = build_int_cst (integer_type_node, i);
6945 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
6946 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
6947 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
6948 }
757bf1df 6949
808f4dfe
DM
6950 for (int i = 0; i < NUM_CSTS; i++)
6951 for (int j = 0; j < NUM_CSTS; j++)
6952 {
6953 if (i != j)
6954 ASSERT_NE (cst_sval[i], cst_sval[j]);
6955 if (i + j < NUM_CSTS)
6956 {
6957 const svalue *sum
6958 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6959 cst_sval[i], cst_sval[j]);
6960 ASSERT_EQ (sum, cst_sval[i + j]);
6961 }
6962 if (i - j >= 0)
6963 {
6964 const svalue *difference
6965 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
6966 cst_sval[i], cst_sval[j]);
6967 ASSERT_EQ (difference, cst_sval[i - j]);
6968 }
6969 if (i * j < NUM_CSTS)
6970 {
6971 const svalue *product
6972 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6973 cst_sval[i], cst_sval[j]);
6974 ASSERT_EQ (product, cst_sval[i * j]);
6975 }
6976 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
6977 cst_sval[i], cst_sval[j]);
6978 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
6979 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
6980 cst_sval[i], cst_sval[j]);
6981 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
6982 // etc
6983 }
757bf1df 6984
808f4dfe 6985 tree x = build_global_decl ("x", integer_type_node);
757bf1df 6986
808f4dfe
DM
6987 test_region_model_context ctxt;
6988 region_model model (&mgr);
6989 const svalue *x_init = model.get_rvalue (x, &ctxt);
6990
6991 /* PLUS_EXPR folding. */
6992 const svalue *x_init_plus_zero
6993 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6994 x_init, cst_sval[0]);
6995 ASSERT_EQ (x_init_plus_zero, x_init);
6996 const svalue *zero_plus_x_init
6997 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6998 cst_sval[0], x_init);
6999 ASSERT_EQ (zero_plus_x_init, x_init);
7000
7001 /* MULT_EXPR folding. */
7002 const svalue *x_init_times_zero
7003 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7004 x_init, cst_sval[0]);
7005 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
7006 const svalue *zero_times_x_init
7007 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7008 cst_sval[0], x_init);
7009 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
7010
7011 const svalue *x_init_times_one
7012 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7013 x_init, cst_sval[1]);
7014 ASSERT_EQ (x_init_times_one, x_init);
7015 const svalue *one_times_x_init
7016 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7017 cst_sval[1], x_init);
7018 ASSERT_EQ (one_times_x_init, x_init);
7019
7020 // etc
7021 // TODO: do we want to use the match-and-simplify DSL for this?
7022
7023 /* Verify that binops put any constants on the RHS. */
7024 const svalue *four_times_x_init
7025 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7026 cst_sval[4], x_init);
7027 const svalue *x_init_times_four
7028 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7029 x_init, cst_sval[4]);
7030 ASSERT_EQ (four_times_x_init, x_init_times_four);
7031 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
7032 ASSERT_EQ (binop->get_op (), MULT_EXPR);
7033 ASSERT_EQ (binop->get_arg0 (), x_init);
7034 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
7035
7036 /* Verify that ((x + 1) + 1) == (x + 2). */
7037 const svalue *x_init_plus_one
7038 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7039 x_init, cst_sval[1]);
7040 const svalue *x_init_plus_two
7041 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7042 x_init, cst_sval[2]);
7043 const svalue *x_init_plus_one_plus_one
7044 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7045 x_init_plus_one, cst_sval[1]);
7046 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
4f34f8cc
DM
7047
7048 /* Verify various binops on booleans. */
7049 {
7050 const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1);
7051 const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0);
7052 const svalue *sval_unknown
7053 = mgr.get_or_create_unknown_svalue (boolean_type_node);
9d804f9b
DM
7054 const placeholder_svalue sval_placeholder (mgr.alloc_symbol_id (),
7055 boolean_type_node, "v");
4f34f8cc
DM
7056 for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR})
7057 {
7058 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7059 sval_true, sval_unknown),
7060 sval_true);
7061 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7062 sval_false, sval_unknown),
7063 sval_unknown);
7064 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7065 sval_false, &sval_placeholder),
7066 &sval_placeholder);
7067 }
7068 for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR})
7069 {
7070 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7071 sval_false, sval_unknown),
7072 sval_false);
7073 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7074 sval_true, sval_unknown),
7075 sval_unknown);
7076 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7077 sval_true, &sval_placeholder),
7078 &sval_placeholder);
7079 }
7080 }
808f4dfe
DM
7081}
7082
7083/* Verify that sub_svalues are folded as expected. */
757bf1df 7084
808f4dfe
DM
7085static void
7086test_sub_svalue_folding ()
7087{
7088 coord_test ct;
7089 tree c = build_global_decl ("c", ct.m_coord_type);
7090 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7091 c, ct.m_x_field, NULL_TREE);
757bf1df 7092
808f4dfe
DM
7093 region_model_manager mgr;
7094 region_model model (&mgr);
7095 test_region_model_context ctxt;
7096 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
757bf1df 7097
808f4dfe
DM
7098 /* Verify that sub_svalue of "unknown" simply
7099 yields an unknown. */
757bf1df 7100
808f4dfe
DM
7101 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
7102 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
7103 unknown, c_x_reg);
7104 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
7105 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
757bf1df
DM
7106}
7107
f09b9955
DM
7108/* Get BIT within VAL as a symbolic value within MGR. */
7109
7110static const svalue *
7111get_bit (region_model_manager *mgr,
7112 bit_offset_t bit,
7113 unsigned HOST_WIDE_INT val)
7114{
7115 const svalue *inner_svalue
7116 = mgr->get_or_create_int_cst (unsigned_type_node, val);
7117 return mgr->get_or_create_bits_within (boolean_type_node,
7118 bit_range (bit, 1),
7119 inner_svalue);
7120}
7121
7122/* Verify that bits_within_svalues are folded as expected. */
7123
7124static void
7125test_bits_within_svalue_folding ()
7126{
7127 region_model_manager mgr;
7128
7129 const svalue *zero = mgr.get_or_create_int_cst (boolean_type_node, 0);
7130 const svalue *one = mgr.get_or_create_int_cst (boolean_type_node, 1);
7131
7132 {
7133 const unsigned val = 0x0000;
7134 for (unsigned bit = 0; bit < 16; bit++)
7135 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
7136 }
7137
7138 {
7139 const unsigned val = 0x0001;
7140 ASSERT_EQ (get_bit (&mgr, 0, val), one);
7141 for (unsigned bit = 1; bit < 16; bit++)
7142 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
7143 }
7144
7145 {
7146 const unsigned val = 0x8000;
7147 for (unsigned bit = 0; bit < 15; bit++)
7148 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
7149 ASSERT_EQ (get_bit (&mgr, 15, val), one);
7150 }
7151
7152 {
7153 const unsigned val = 0xFFFF;
7154 for (unsigned bit = 0; bit < 16; bit++)
7155 ASSERT_EQ (get_bit (&mgr, bit, val), one);
7156 }
7157}
7158
808f4dfe 7159/* Test that region::descendent_of_p works as expected. */
757bf1df
DM
7160
7161static void
808f4dfe 7162test_descendent_of_p ()
757bf1df 7163{
808f4dfe
DM
7164 region_model_manager mgr;
7165 const region *stack = mgr.get_stack_region ();
7166 const region *heap = mgr.get_heap_region ();
7167 const region *code = mgr.get_code_region ();
7168 const region *globals = mgr.get_globals_region ();
757bf1df 7169
808f4dfe
DM
7170 /* descendent_of_p should return true when used on the region itself. */
7171 ASSERT_TRUE (stack->descendent_of_p (stack));
7172 ASSERT_FALSE (stack->descendent_of_p (heap));
7173 ASSERT_FALSE (stack->descendent_of_p (code));
7174 ASSERT_FALSE (stack->descendent_of_p (globals));
757bf1df 7175
808f4dfe
DM
7176 tree x = build_global_decl ("x", integer_type_node);
7177 const region *x_reg = mgr.get_region_for_global (x);
7178 ASSERT_TRUE (x_reg->descendent_of_p (globals));
757bf1df 7179
808f4dfe
DM
7180 /* A cast_region should be a descendent of the original region. */
7181 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
7182 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
757bf1df
DM
7183}
7184
391512ad
DM
7185/* Verify that bit_range_region works as expected. */
7186
7187static void
7188test_bit_range_regions ()
7189{
7190 tree x = build_global_decl ("x", integer_type_node);
7191 region_model_manager mgr;
7192 const region *x_reg = mgr.get_region_for_global (x);
7193 const region *byte0
7194 = mgr.get_bit_range (x_reg, char_type_node, bit_range (0, 8));
7195 const region *byte1
7196 = mgr.get_bit_range (x_reg, char_type_node, bit_range (8, 8));
7197 ASSERT_TRUE (byte0->descendent_of_p (x_reg));
7198 ASSERT_TRUE (byte1->descendent_of_p (x_reg));
7199 ASSERT_NE (byte0, byte1);
7200}
7201
757bf1df
DM
7202/* Verify that simple assignments work as expected. */
7203
7204static void
7205test_assignment ()
7206{
7207 tree int_0 = build_int_cst (integer_type_node, 0);
7208 tree x = build_global_decl ("x", integer_type_node);
7209 tree y = build_global_decl ("y", integer_type_node);
7210
7211 /* "x == 0", then use of y, then "y = 0;". */
808f4dfe
DM
7212 region_model_manager mgr;
7213 region_model model (&mgr);
757bf1df
DM
7214 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
7215 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
7216 model.set_value (model.get_lvalue (y, NULL),
7217 model.get_rvalue (int_0, NULL),
7218 NULL);
7219 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
7220 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
757bf1df
DM
7221}
7222
a96f1c38
DM
7223/* Verify that compound assignments work as expected. */
7224
7225static void
7226test_compound_assignment ()
7227{
7228 coord_test ct;
7229
7230 tree c = build_global_decl ("c", ct.m_coord_type);
7231 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7232 c, ct.m_x_field, NULL_TREE);
7233 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
7234 c, ct.m_y_field, NULL_TREE);
7235 tree d = build_global_decl ("d", ct.m_coord_type);
7236 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7237 d, ct.m_x_field, NULL_TREE);
7238 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
7239 d, ct.m_y_field, NULL_TREE);
7240
7241 tree int_17 = build_int_cst (integer_type_node, 17);
7242 tree int_m3 = build_int_cst (integer_type_node, -3);
7243
808f4dfe
DM
7244 region_model_manager mgr;
7245 region_model model (&mgr);
a96f1c38
DM
7246 model.set_value (c_x, int_17, NULL);
7247 model.set_value (c_y, int_m3, NULL);
7248
a96f1c38 7249 /* Copy c to d. */
13ad6d9f
DM
7250 const svalue *sval = model.get_rvalue (c, NULL);
7251 model.set_value (model.get_lvalue (d, NULL), sval, NULL);
7252
a96f1c38
DM
7253 /* Check that the fields have the same svalues. */
7254 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
7255 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
7256}
7257
757bf1df
DM
7258/* Verify the details of pushing and popping stack frames. */
7259
7260static void
7261test_stack_frames ()
7262{
7263 tree int_42 = build_int_cst (integer_type_node, 42);
7264 tree int_10 = build_int_cst (integer_type_node, 10);
7265 tree int_5 = build_int_cst (integer_type_node, 5);
7266 tree int_0 = build_int_cst (integer_type_node, 0);
7267
7268 auto_vec <tree> param_types;
7269 tree parent_fndecl = make_fndecl (integer_type_node,
7270 "parent_fn",
7271 param_types);
7272 allocate_struct_function (parent_fndecl, true);
7273
7274 tree child_fndecl = make_fndecl (integer_type_node,
7275 "child_fn",
7276 param_types);
7277 allocate_struct_function (child_fndecl, true);
7278
7279 /* "a" and "b" in the parent frame. */
7280 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7281 get_identifier ("a"),
7282 integer_type_node);
4cebae09 7283 DECL_CONTEXT (a) = parent_fndecl;
757bf1df
DM
7284 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7285 get_identifier ("b"),
7286 integer_type_node);
4cebae09 7287 DECL_CONTEXT (b) = parent_fndecl;
757bf1df
DM
7288 /* "x" and "y" in a child frame. */
7289 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7290 get_identifier ("x"),
7291 integer_type_node);
4cebae09 7292 DECL_CONTEXT (x) = child_fndecl;
757bf1df
DM
7293 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7294 get_identifier ("y"),
7295 integer_type_node);
4cebae09 7296 DECL_CONTEXT (y) = child_fndecl;
757bf1df
DM
7297
7298 /* "p" global. */
7299 tree p = build_global_decl ("p", ptr_type_node);
7300
7301 /* "q" global. */
7302 tree q = build_global_decl ("q", ptr_type_node);
7303
808f4dfe 7304 region_model_manager mgr;
757bf1df 7305 test_region_model_context ctxt;
808f4dfe 7306 region_model model (&mgr);
757bf1df
DM
7307
7308 /* Push stack frame for "parent_fn". */
808f4dfe
DM
7309 const region *parent_frame_reg
7310 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl),
7311 NULL, &ctxt);
7312 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
7313 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
7314 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
7315 model.set_value (a_in_parent_reg,
7316 model.get_rvalue (int_42, &ctxt),
7317 &ctxt);
7318 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
7319
757bf1df
DM
7320 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
7321 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
7322 tristate (tristate::TS_TRUE));
7323
7324 /* Push stack frame for "child_fn". */
808f4dfe 7325 const region *child_frame_reg
757bf1df 7326 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
808f4dfe
DM
7327 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
7328 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
7329 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
7330 model.set_value (x_in_child_reg,
7331 model.get_rvalue (int_0, &ctxt),
7332 &ctxt);
7333 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
7334
757bf1df
DM
7335 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
7336 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
7337 tristate (tristate::TS_TRUE));
7338
7339 /* Point a global pointer at a local in the child frame: p = &x. */
808f4dfe
DM
7340 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
7341 model.set_value (p_in_globals_reg,
7342 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
757bf1df 7343 &ctxt);
808f4dfe 7344 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
757bf1df
DM
7345
7346 /* Point another global pointer at p: q = &p. */
808f4dfe
DM
7347 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
7348 model.set_value (q_in_globals_reg,
7349 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
757bf1df
DM
7350 &ctxt);
7351
808f4dfe
DM
7352 /* Test region::descendent_of_p. */
7353 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
7354 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
7355 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
757bf1df
DM
7356
7357 /* Pop the "child_fn" frame from the stack. */
808f4dfe
DM
7358 model.pop_frame (NULL, NULL, &ctxt);
7359 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
7360 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
757bf1df
DM
7361
7362 /* Verify that p (which was pointing at the local "x" in the popped
7363 frame) has been poisoned. */
33255ad3 7364 const svalue *new_p_sval = model.get_rvalue (p, NULL);
757bf1df
DM
7365 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
7366 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
7367 POISON_KIND_POPPED_STACK);
7368
7369 /* Verify that q still points to p, in spite of the region
7370 renumbering. */
808f4dfe 7371 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
757bf1df 7372 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
5932dd35 7373 ASSERT_EQ (new_q_sval->maybe_get_region (),
757bf1df
DM
7374 model.get_lvalue (p, &ctxt));
7375
7376 /* Verify that top of stack has been updated. */
808f4dfe 7377 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
757bf1df
DM
7378
7379 /* Verify locals in parent frame. */
7380 /* Verify "a" still has its value. */
808f4dfe 7381 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
757bf1df
DM
7382 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
7383 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
7384 int_42);
7385 /* Verify "b" still has its constraint. */
7386 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
7387 tristate (tristate::TS_TRUE));
7388}
7389
7390/* Verify that get_representative_path_var works as expected, that
808f4dfe 7391 we can map from regions to parms and back within a recursive call
757bf1df
DM
7392 stack. */
7393
7394static void
7395test_get_representative_path_var ()
7396{
7397 auto_vec <tree> param_types;
7398 tree fndecl = make_fndecl (integer_type_node,
7399 "factorial",
7400 param_types);
7401 allocate_struct_function (fndecl, true);
7402
7403 /* Parm "n". */
7404 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7405 get_identifier ("n"),
7406 integer_type_node);
4cebae09 7407 DECL_CONTEXT (n) = fndecl;
757bf1df 7408
808f4dfe
DM
7409 region_model_manager mgr;
7410 test_region_model_context ctxt;
7411 region_model model (&mgr);
757bf1df
DM
7412
7413 /* Push 5 stack frames for "factorial", each with a param */
808f4dfe
DM
7414 auto_vec<const region *> parm_regs;
7415 auto_vec<const svalue *> parm_svals;
757bf1df
DM
7416 for (int depth = 0; depth < 5; depth++)
7417 {
808f4dfe
DM
7418 const region *frame_n_reg
7419 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
7420 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
7421 parm_regs.safe_push (parm_n_reg);
757bf1df 7422
808f4dfe
DM
7423 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
7424 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
7425 parm_svals.safe_push (sval_n);
757bf1df
DM
7426 }
7427
7428 /* Verify that we can recognize that the regions are the parms,
7429 at every depth. */
7430 for (int depth = 0; depth < 5; depth++)
7431 {
808f4dfe
DM
7432 {
7433 svalue_set visited;
7434 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
7435 &visited),
7436 path_var (n, depth + 1));
7437 }
757bf1df
DM
7438 /* ...and that we can lookup lvalues for locals for all frames,
7439 not just the top. */
7440 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
808f4dfe 7441 parm_regs[depth]);
757bf1df 7442 /* ...and that we can locate the svalues. */
808f4dfe
DM
7443 {
7444 svalue_set visited;
7445 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
7446 &visited),
7447 path_var (n, depth + 1));
7448 }
757bf1df
DM
7449 }
7450}
7451
808f4dfe 7452/* Ensure that region_model::operator== works as expected. */
757bf1df
DM
7453
7454static void
808f4dfe 7455test_equality_1 ()
757bf1df 7456{
808f4dfe
DM
7457 tree int_42 = build_int_cst (integer_type_node, 42);
7458 tree int_17 = build_int_cst (integer_type_node, 17);
757bf1df 7459
808f4dfe
DM
7460/* Verify that "empty" region_model instances are equal to each other. */
7461 region_model_manager mgr;
7462 region_model model0 (&mgr);
7463 region_model model1 (&mgr);
757bf1df 7464 ASSERT_EQ (model0, model1);
808f4dfe
DM
7465
7466 /* Verify that setting state in model1 makes the models non-equal. */
7467 tree x = build_global_decl ("x", integer_type_node);
7468 model0.set_value (x, int_42, NULL);
7469 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
7470 ASSERT_NE (model0, model1);
7471
7472 /* Verify the copy-ctor. */
7473 region_model model2 (model0);
7474 ASSERT_EQ (model0, model2);
7475 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
7476 ASSERT_NE (model1, model2);
7477
7478 /* Verify that models obtained from copy-ctor are independently editable
7479 w/o affecting the original model. */
7480 model2.set_value (x, int_17, NULL);
7481 ASSERT_NE (model0, model2);
7482 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
7483 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
757bf1df
DM
7484}
7485
7486/* Verify that region models for
7487 x = 42; y = 113;
7488 and
7489 y = 113; x = 42;
808f4dfe 7490 are equal. */
757bf1df
DM
7491
7492static void
7493test_canonicalization_2 ()
7494{
7495 tree int_42 = build_int_cst (integer_type_node, 42);
7496 tree int_113 = build_int_cst (integer_type_node, 113);
7497 tree x = build_global_decl ("x", integer_type_node);
7498 tree y = build_global_decl ("y", integer_type_node);
7499
808f4dfe
DM
7500 region_model_manager mgr;
7501 region_model model0 (&mgr);
757bf1df
DM
7502 model0.set_value (model0.get_lvalue (x, NULL),
7503 model0.get_rvalue (int_42, NULL),
7504 NULL);
7505 model0.set_value (model0.get_lvalue (y, NULL),
7506 model0.get_rvalue (int_113, NULL),
7507 NULL);
7508
808f4dfe 7509 region_model model1 (&mgr);
757bf1df
DM
7510 model1.set_value (model1.get_lvalue (y, NULL),
7511 model1.get_rvalue (int_113, NULL),
7512 NULL);
7513 model1.set_value (model1.get_lvalue (x, NULL),
7514 model1.get_rvalue (int_42, NULL),
7515 NULL);
7516
757bf1df
DM
7517 ASSERT_EQ (model0, model1);
7518}
7519
7520/* Verify that constraints for
7521 x > 3 && y > 42
7522 and
7523 y > 42 && x > 3
7524 are equal after canonicalization. */
7525
7526static void
7527test_canonicalization_3 ()
7528{
7529 tree int_3 = build_int_cst (integer_type_node, 3);
7530 tree int_42 = build_int_cst (integer_type_node, 42);
7531 tree x = build_global_decl ("x", integer_type_node);
7532 tree y = build_global_decl ("y", integer_type_node);
7533
808f4dfe
DM
7534 region_model_manager mgr;
7535 region_model model0 (&mgr);
757bf1df
DM
7536 model0.add_constraint (x, GT_EXPR, int_3, NULL);
7537 model0.add_constraint (y, GT_EXPR, int_42, NULL);
7538
808f4dfe 7539 region_model model1 (&mgr);
757bf1df
DM
7540 model1.add_constraint (y, GT_EXPR, int_42, NULL);
7541 model1.add_constraint (x, GT_EXPR, int_3, NULL);
7542
808f4dfe
DM
7543 model0.canonicalize ();
7544 model1.canonicalize ();
757bf1df
DM
7545 ASSERT_EQ (model0, model1);
7546}
7547
8c08c983
DM
7548/* Verify that we can canonicalize a model containing NaN and other real
7549 constants. */
7550
7551static void
7552test_canonicalization_4 ()
7553{
7554 auto_vec<tree> csts;
7555 append_interesting_constants (&csts);
7556
808f4dfe
DM
7557 region_model_manager mgr;
7558 region_model model (&mgr);
8c08c983 7559
3f207ab3 7560 for (tree cst : csts)
8c08c983
DM
7561 model.get_rvalue (cst, NULL);
7562
808f4dfe 7563 model.canonicalize ();
8c08c983
DM
7564}
7565
757bf1df
DM
7566/* Assert that if we have two region_model instances
7567 with values VAL_A and VAL_B for EXPR that they are
7568 mergable. Write the merged model to *OUT_MERGED_MODEL,
7569 and the merged svalue ptr to *OUT_MERGED_SVALUE.
7570 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
7571 for that region_model. */
7572
7573static void
7574assert_region_models_merge (tree expr, tree val_a, tree val_b,
808f4dfe
DM
7575 region_model *out_merged_model,
7576 const svalue **out_merged_svalue)
757bf1df 7577{
808f4dfe 7578 region_model_manager *mgr = out_merged_model->get_manager ();
bb8e93eb
DM
7579 program_point point (program_point::origin (*mgr));
7580 test_region_model_context ctxt;
808f4dfe
DM
7581 region_model model0 (mgr);
7582 region_model model1 (mgr);
757bf1df
DM
7583 if (val_a)
7584 model0.set_value (model0.get_lvalue (expr, &ctxt),
7585 model0.get_rvalue (val_a, &ctxt),
7586 &ctxt);
7587 if (val_b)
7588 model1.set_value (model1.get_lvalue (expr, &ctxt),
7589 model1.get_rvalue (val_b, &ctxt),
7590 &ctxt);
7591
7592 /* They should be mergeable. */
808f4dfe
DM
7593 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
7594 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
757bf1df
DM
7595}
7596
7597/* Verify that we can merge region_model instances. */
7598
7599static void
7600test_state_merging ()
7601{
7602 tree int_42 = build_int_cst (integer_type_node, 42);
7603 tree int_113 = build_int_cst (integer_type_node, 113);
7604 tree x = build_global_decl ("x", integer_type_node);
7605 tree y = build_global_decl ("y", integer_type_node);
7606 tree z = build_global_decl ("z", integer_type_node);
7607 tree p = build_global_decl ("p", ptr_type_node);
7608
7609 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
7610 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
7611
7612 auto_vec <tree> param_types;
7613 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
7614 allocate_struct_function (test_fndecl, true);
7615
7616 /* Param "a". */
7617 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7618 get_identifier ("a"),
7619 integer_type_node);
4cebae09 7620 DECL_CONTEXT (a) = test_fndecl;
757bf1df
DM
7621 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
7622
455f58ec
DM
7623 /* Param "q", a pointer. */
7624 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7625 get_identifier ("q"),
7626 ptr_type_node);
4cebae09 7627 DECL_CONTEXT (q) = test_fndecl;
455f58ec 7628
808f4dfe 7629 region_model_manager mgr;
bb8e93eb 7630 program_point point (program_point::origin (mgr));
808f4dfe 7631
757bf1df 7632 {
808f4dfe
DM
7633 region_model model0 (&mgr);
7634 region_model model1 (&mgr);
7635 region_model merged (&mgr);
757bf1df 7636 /* Verify empty models can be merged. */
808f4dfe 7637 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7638 ASSERT_EQ (model0, merged);
7639 }
7640
7641 /* Verify that we can merge two contradictory constraints on the
7642 value for a global. */
7643 /* TODO: verify that the merged model doesn't have a value for
7644 the global */
7645 {
808f4dfe
DM
7646 region_model model0 (&mgr);
7647 region_model model1 (&mgr);
7648 region_model merged (&mgr);
757bf1df
DM
7649 test_region_model_context ctxt;
7650 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7651 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe 7652 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7653 ASSERT_NE (model0, merged);
7654 ASSERT_NE (model1, merged);
7655 }
7656
7657 /* Verify handling of a PARM_DECL. */
7658 {
7659 test_region_model_context ctxt;
808f4dfe
DM
7660 region_model model0 (&mgr);
7661 region_model model1 (&mgr);
757bf1df
DM
7662 ASSERT_EQ (model0.get_stack_depth (), 0);
7663 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7664 ASSERT_EQ (model0.get_stack_depth (), 1);
757bf1df
DM
7665 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7666
9d804f9b
DM
7667 placeholder_svalue test_sval (mgr.alloc_symbol_id (),
7668 integer_type_node, "test sval");
808f4dfe
DM
7669 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
7670 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
757bf1df
DM
7671 ASSERT_EQ (model0, model1);
7672
757bf1df 7673 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7674 region_model merged (&mgr);
7675 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7676 ASSERT_EQ (model0, merged);
808f4dfe
DM
7677 /* In particular, "a" should have the placeholder value. */
7678 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
757bf1df
DM
7679 }
7680
7681 /* Verify handling of a global. */
7682 {
7683 test_region_model_context ctxt;
808f4dfe
DM
7684 region_model model0 (&mgr);
7685 region_model model1 (&mgr);
757bf1df 7686
9d804f9b
DM
7687 placeholder_svalue test_sval (mgr.alloc_symbol_id (),
7688 integer_type_node, "test sval");
808f4dfe
DM
7689 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
7690 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
7691 ASSERT_EQ (model0, model1);
757bf1df
DM
7692
7693 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7694 region_model merged (&mgr);
7695 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7696 ASSERT_EQ (model0, merged);
808f4dfe
DM
7697 /* In particular, "x" should have the placeholder value. */
7698 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
757bf1df
DM
7699 }
7700
7701 /* Use global-handling to verify various combinations of values. */
7702
7703 /* Two equal constant values. */
7704 {
808f4dfe
DM
7705 region_model merged (&mgr);
7706 const svalue *merged_x_sval;
757bf1df
DM
7707 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
7708
7709 /* In particular, there should be a constant value for "x". */
7710 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
7711 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
7712 int_42);
7713 }
7714
7715 /* Two non-equal constant values. */
7716 {
808f4dfe
DM
7717 region_model merged (&mgr);
7718 const svalue *merged_x_sval;
757bf1df
DM
7719 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
7720
808f4dfe
DM
7721 /* In particular, there should be a "widening" value for "x". */
7722 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
757bf1df
DM
7723 }
7724
808f4dfe 7725 /* Initial and constant. */
757bf1df 7726 {
808f4dfe
DM
7727 region_model merged (&mgr);
7728 const svalue *merged_x_sval;
757bf1df
DM
7729 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
7730
7731 /* In particular, there should be an unknown value for "x". */
7732 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7733 }
7734
808f4dfe 7735 /* Constant and initial. */
757bf1df 7736 {
808f4dfe
DM
7737 region_model merged (&mgr);
7738 const svalue *merged_x_sval;
757bf1df
DM
7739 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
7740
7741 /* In particular, there should be an unknown value for "x". */
7742 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7743 }
7744
7745 /* Unknown and constant. */
7746 // TODO
7747
7748 /* Pointers: NULL and NULL. */
7749 // TODO
7750
7751 /* Pointers: NULL and non-NULL. */
7752 // TODO
7753
7754 /* Pointers: non-NULL and non-NULL: ptr to a local. */
7755 {
808f4dfe 7756 region_model model0 (&mgr);
757bf1df 7757 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
757bf1df
DM
7758 model0.set_value (model0.get_lvalue (p, NULL),
7759 model0.get_rvalue (addr_of_a, NULL), NULL);
7760
7761 region_model model1 (model0);
7762 ASSERT_EQ (model0, model1);
7763
7764 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7765 region_model merged (&mgr);
7766 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7767 ASSERT_EQ (model0, merged);
7768 }
7769
7770 /* Pointers: non-NULL and non-NULL: ptr to a global. */
7771 {
808f4dfe 7772 region_model merged (&mgr);
757bf1df 7773 /* p == &y in both input models. */
808f4dfe 7774 const svalue *merged_p_sval;
757bf1df
DM
7775 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
7776 &merged_p_sval);
7777
7778 /* We should get p == &y in the merged model. */
7779 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
808f4dfe
DM
7780 const region_svalue *merged_p_ptr
7781 = merged_p_sval->dyn_cast_region_svalue ();
7782 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
7783 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
757bf1df
DM
7784 }
7785
7786 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
7787 {
808f4dfe
DM
7788 region_model merged (&mgr);
7789 /* x == &y vs x == &z in the input models; these are actually casts
7790 of the ptrs to "int". */
7791 const svalue *merged_x_sval;
7792 // TODO:
757bf1df
DM
7793 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
7794 &merged_x_sval);
7795
7796 /* We should get x == unknown in the merged model. */
7797 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7798 }
7799
7800 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
7801 {
7802 test_region_model_context ctxt;
808f4dfe 7803 region_model model0 (&mgr);
9a2c9579 7804 tree size = build_int_cst (size_type_node, 1024);
808f4dfe 7805 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
b9365b93 7806 const region *new_reg
ce917b04 7807 = model0.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe 7808 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
757bf1df 7809 model0.set_value (model0.get_lvalue (p, &ctxt),
808f4dfe 7810 ptr_sval, &ctxt);
757bf1df
DM
7811
7812 region_model model1 (model0);
7813
7814 ASSERT_EQ (model0, model1);
7815
808f4dfe
DM
7816 region_model merged (&mgr);
7817 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7818
808f4dfe 7819 /* The merged model ought to be identical. */
757bf1df
DM
7820 ASSERT_EQ (model0, merged);
7821 }
7822
808f4dfe
DM
7823 /* Two regions sharing the same placeholder svalue should continue sharing
7824 it after self-merger. */
757bf1df
DM
7825 {
7826 test_region_model_context ctxt;
808f4dfe 7827 region_model model0 (&mgr);
9d804f9b
DM
7828 placeholder_svalue placeholder_sval (mgr.alloc_symbol_id (),
7829 integer_type_node, "test");
808f4dfe
DM
7830 model0.set_value (model0.get_lvalue (x, &ctxt),
7831 &placeholder_sval, &ctxt);
7832 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
757bf1df
DM
7833 region_model model1 (model0);
7834
7835 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7836 region_model merged (&mgr);
7837 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7838 ASSERT_EQ (model0, merged);
7839
7840 /* In particular, we should have x == y. */
7841 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
7842 tristate (tristate::TS_TRUE));
7843 }
7844
757bf1df 7845 {
808f4dfe
DM
7846 region_model model0 (&mgr);
7847 region_model model1 (&mgr);
757bf1df
DM
7848 test_region_model_context ctxt;
7849 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7850 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
808f4dfe
DM
7851 region_model merged (&mgr);
7852 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7853 }
7854
7855 {
808f4dfe
DM
7856 region_model model0 (&mgr);
7857 region_model model1 (&mgr);
757bf1df
DM
7858 test_region_model_context ctxt;
7859 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7860 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
7861 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe
DM
7862 region_model merged (&mgr);
7863 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7864 }
757bf1df
DM
7865
7866 // TODO: what can't we merge? need at least one such test
7867
7868 /* TODO: various things
7869 - heap regions
7870 - value merging:
7871 - every combination, but in particular
808f4dfe 7872 - pairs of regions
757bf1df
DM
7873 */
7874
7875 /* Views. */
7876 {
7877 test_region_model_context ctxt;
808f4dfe 7878 region_model model0 (&mgr);
757bf1df 7879
808f4dfe
DM
7880 const region *x_reg = model0.get_lvalue (x, &ctxt);
7881 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
757bf1df
DM
7882 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
7883
7884 region_model model1 (model0);
7885 ASSERT_EQ (model1, model0);
7886
7887 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7888 region_model merged (&mgr);
7889 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7890 }
455f58ec
DM
7891
7892 /* Verify that we can merge a model in which a local in an older stack
7893 frame points to a local in a more recent stack frame. */
7894 {
808f4dfe 7895 region_model model0 (&mgr);
455f58ec 7896 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
808f4dfe 7897 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
455f58ec
DM
7898
7899 /* Push a second frame. */
808f4dfe 7900 const region *reg_2nd_frame
455f58ec
DM
7901 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7902
7903 /* Have a pointer in the older frame point to a local in the
7904 more recent frame. */
808f4dfe
DM
7905 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
7906 model0.set_value (q_in_first_frame, sval_ptr, NULL);
455f58ec
DM
7907
7908 /* Verify that it's pointing at the newer frame. */
5932dd35 7909 const region *reg_pointee = sval_ptr->maybe_get_region ();
808f4dfe 7910 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
455f58ec 7911
808f4dfe 7912 model0.canonicalize ();
455f58ec
DM
7913
7914 region_model model1 (model0);
7915 ASSERT_EQ (model0, model1);
7916
7917 /* They should be mergeable, and the result should be the same
7918 (after canonicalization, at least). */
808f4dfe
DM
7919 region_model merged (&mgr);
7920 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7921 merged.canonicalize ();
455f58ec
DM
7922 ASSERT_EQ (model0, merged);
7923 }
7924
7925 /* Verify that we can merge a model in which a local points to a global. */
7926 {
808f4dfe 7927 region_model model0 (&mgr);
455f58ec
DM
7928 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7929 model0.set_value (model0.get_lvalue (q, NULL),
7930 model0.get_rvalue (addr_of_y, NULL), NULL);
7931
455f58ec
DM
7932 region_model model1 (model0);
7933 ASSERT_EQ (model0, model1);
7934
7935 /* They should be mergeable, and the result should be the same
7936 (after canonicalization, at least). */
808f4dfe
DM
7937 region_model merged (&mgr);
7938 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
455f58ec
DM
7939 ASSERT_EQ (model0, merged);
7940 }
757bf1df
DM
7941}
7942
7943/* Verify that constraints are correctly merged when merging region_model
7944 instances. */
7945
7946static void
7947test_constraint_merging ()
7948{
7949 tree int_0 = build_int_cst (integer_type_node, 0);
7950 tree int_5 = build_int_cst (integer_type_node, 5);
7951 tree x = build_global_decl ("x", integer_type_node);
7952 tree y = build_global_decl ("y", integer_type_node);
7953 tree z = build_global_decl ("z", integer_type_node);
7954 tree n = build_global_decl ("n", integer_type_node);
7955
808f4dfe 7956 region_model_manager mgr;
757bf1df
DM
7957 test_region_model_context ctxt;
7958
7959 /* model0: 0 <= (x == y) < n. */
808f4dfe 7960 region_model model0 (&mgr);
757bf1df
DM
7961 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
7962 model0.add_constraint (x, GE_EXPR, int_0, NULL);
7963 model0.add_constraint (x, LT_EXPR, n, NULL);
7964
7965 /* model1: z != 5 && (0 <= x < n). */
808f4dfe 7966 region_model model1 (&mgr);
757bf1df
DM
7967 model1.add_constraint (z, NE_EXPR, int_5, NULL);
7968 model1.add_constraint (x, GE_EXPR, int_0, NULL);
7969 model1.add_constraint (x, LT_EXPR, n, NULL);
7970
7971 /* They should be mergeable; the merged constraints should
7972 be: (0 <= x < n). */
bb8e93eb 7973 program_point point (program_point::origin (mgr));
808f4dfe
DM
7974 region_model merged (&mgr);
7975 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7976
7977 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
7978 tristate (tristate::TS_TRUE));
7979 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
7980 tristate (tristate::TS_TRUE));
7981
7982 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
7983 tristate (tristate::TS_UNKNOWN));
7984 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
7985 tristate (tristate::TS_UNKNOWN));
7986}
7987
808f4dfe
DM
7988/* Verify that widening_svalue::eval_condition_without_cm works as
7989 expected. */
7990
7991static void
7992test_widening_constraints ()
7993{
bb8e93eb 7994 region_model_manager mgr;
e6fe02d8 7995 function_point point (program_point::origin (mgr).get_function_point ());
808f4dfe
DM
7996 tree int_0 = build_int_cst (integer_type_node, 0);
7997 tree int_m1 = build_int_cst (integer_type_node, -1);
7998 tree int_1 = build_int_cst (integer_type_node, 1);
7999 tree int_256 = build_int_cst (integer_type_node, 256);
808f4dfe
DM
8000 test_region_model_context ctxt;
8001 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
8002 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
8003 const svalue *w_zero_then_one_sval
8004 = mgr.get_or_create_widening_svalue (integer_type_node, point,
8005 int_0_sval, int_1_sval);
8006 const widening_svalue *w_zero_then_one
8007 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
8008 ASSERT_EQ (w_zero_then_one->get_direction (),
8009 widening_svalue::DIR_ASCENDING);
8010 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
8011 tristate::TS_FALSE);
8012 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
8013 tristate::TS_FALSE);
8014 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
8015 tristate::TS_UNKNOWN);
8016 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
8017 tristate::TS_UNKNOWN);
8018
8019 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
8020 tristate::TS_FALSE);
8021 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
8022 tristate::TS_UNKNOWN);
8023 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
8024 tristate::TS_UNKNOWN);
8025 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
8026 tristate::TS_UNKNOWN);
8027
8028 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
8029 tristate::TS_TRUE);
8030 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
8031 tristate::TS_UNKNOWN);
8032 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
8033 tristate::TS_UNKNOWN);
8034 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
8035 tristate::TS_UNKNOWN);
8036
8037 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
8038 tristate::TS_TRUE);
8039 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
8040 tristate::TS_TRUE);
8041 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
8042 tristate::TS_UNKNOWN);
8043 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
8044 tristate::TS_UNKNOWN);
8045
8046 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
8047 tristate::TS_FALSE);
8048 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
8049 tristate::TS_UNKNOWN);
8050 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
8051 tristate::TS_UNKNOWN);
8052 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
8053 tristate::TS_UNKNOWN);
8054
8055 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
8056 tristate::TS_TRUE);
8057 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
8058 tristate::TS_UNKNOWN);
8059 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
8060 tristate::TS_UNKNOWN);
8061 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
8062 tristate::TS_UNKNOWN);
8063}
8064
8065/* Verify merging constraints for states simulating successive iterations
8066 of a loop.
8067 Simulate:
8068 for (i = 0; i < 256; i++)
8069 [...body...]
8070 i.e. this gimple:.
8071 i_15 = 0;
8072 goto <bb 4>;
8073
8074 <bb 4> :
8075 i_11 = PHI <i_15(2), i_23(3)>
8076 if (i_11 <= 255)
8077 goto <bb 3>;
8078 else
8079 goto [AFTER LOOP]
8080
8081 <bb 3> :
8082 [LOOP BODY]
8083 i_23 = i_11 + 1;
8084
8085 and thus these ops (and resultant states):
8086 i_11 = PHI()
8087 {i_11: 0}
8088 add_constraint (i_11 <= 255) [for the true edge]
8089 {i_11: 0} [constraint was a no-op]
8090 i_23 = i_11 + 1;
8091 {i_22: 1}
8092 i_11 = PHI()
8093 {i_11: WIDENED (at phi, 0, 1)}
8094 add_constraint (i_11 <= 255) [for the true edge]
8095 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
8096 i_23 = i_11 + 1;
8097 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
8098 i_11 = PHI(); merge with state at phi above
8099 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
8100 [changing meaning of "WIDENED" here]
8101 if (i_11 <= 255)
8102 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
8103 F: {i_11: 256}
8104 */
8105
8106static void
8107test_iteration_1 ()
8108{
bb8e93eb
DM
8109 region_model_manager mgr;
8110 program_point point (program_point::origin (mgr));
808f4dfe
DM
8111
8112 tree int_0 = build_int_cst (integer_type_node, 0);
8113 tree int_1 = build_int_cst (integer_type_node, 1);
8114 tree int_256 = build_int_cst (integer_type_node, 256);
8115 tree int_257 = build_int_cst (integer_type_node, 257);
8116 tree i = build_global_decl ("i", integer_type_node);
8117
808f4dfe
DM
8118 test_region_model_context ctxt;
8119
8120 /* model0: i: 0. */
8121 region_model model0 (&mgr);
8122 model0.set_value (i, int_0, &ctxt);
8123
8124 /* model1: i: 1. */
8125 region_model model1 (&mgr);
8126 model1.set_value (i, int_1, &ctxt);
8127
8128 /* Should merge "i" to a widened value. */
8129 region_model model2 (&mgr);
8130 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
8131 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
8132 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
8133 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
8134 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
8135
8136 /* Add constraint: i < 256 */
8137 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
8138 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
8139 tristate (tristate::TS_TRUE));
8140 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
8141 tristate (tristate::TS_TRUE));
8142
8143 /* Try merging with the initial state. */
8144 region_model model3 (&mgr);
8145 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
8146 /* Merging the merged value with the initial value should be idempotent,
8147 so that the analysis converges. */
8148 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
8149 /* Merger of 0 and a widening value with constraint < CST
8150 should retain the constraint, even though it was implicit
8151 for the 0 case. */
8152 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
8153 tristate (tristate::TS_TRUE));
8154 /* ...and we should have equality: the analysis should have converged. */
8155 ASSERT_EQ (model3, model2);
8156
8157 /* "i_23 = i_11 + 1;" */
8158 region_model model4 (model3);
8159 ASSERT_EQ (model4, model2);
8160 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
8161 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
8162 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
8163
8164 /* Try merging with the "i: 1" state. */
8165 region_model model5 (&mgr);
8166 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
8167 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
8168 ASSERT_EQ (model5, model4);
8169
8170 /* "i_11 = PHI();" merge with state at phi above.
8171 For i, we should have a merger of WIDENING with WIDENING + 1,
8172 and this should be WIDENING again. */
8173 region_model model6 (&mgr);
8174 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
8175 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
8176 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
8177
8178 ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257);
8179}
8180
6969ac30
DM
8181/* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
8182 all cast pointers to that region are also known to be non-NULL. */
8183
8184static void
8185test_malloc_constraints ()
8186{
808f4dfe
DM
8187 region_model_manager mgr;
8188 region_model model (&mgr);
6969ac30
DM
8189 tree p = build_global_decl ("p", ptr_type_node);
8190 tree char_star = build_pointer_type (char_type_node);
8191 tree q = build_global_decl ("q", char_star);
8192 tree null_ptr = build_int_cst (ptr_type_node, 0);
8193
808f4dfe 8194 const svalue *size_in_bytes
9a2c9579 8195 = mgr.get_or_create_unknown_svalue (size_type_node);
ce917b04
DM
8196 const region *reg
8197 = model.get_or_create_region_for_heap_alloc (size_in_bytes, NULL);
808f4dfe
DM
8198 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
8199 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
6969ac30
DM
8200 model.set_value (q, p, NULL);
8201
6969ac30
DM
8202 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
8203 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
8204 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
8205 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
8206
8207 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
8208
6969ac30
DM
8209 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
8210 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
8211 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
8212 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
8213}
8214
808f4dfe
DM
8215/* Smoketest of getting and setting the value of a variable. */
8216
8217static void
8218test_var ()
8219{
8220 /* "int i;" */
8221 tree i = build_global_decl ("i", integer_type_node);
8222
8223 tree int_17 = build_int_cst (integer_type_node, 17);
8224 tree int_m3 = build_int_cst (integer_type_node, -3);
8225
8226 region_model_manager mgr;
8227 region_model model (&mgr);
8228
8229 const region *i_reg = model.get_lvalue (i, NULL);
8230 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
8231
8232 /* Reading "i" should give a symbolic "initial value". */
8233 const svalue *sval_init = model.get_rvalue (i, NULL);
8234 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
8235 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
8236 /* ..and doing it again should give the same "initial value". */
8237 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
8238
8239 /* "i = 17;". */
8240 model.set_value (i, int_17, NULL);
8241 ASSERT_EQ (model.get_rvalue (i, NULL),
8242 model.get_rvalue (int_17, NULL));
8243
8244 /* "i = -3;". */
8245 model.set_value (i, int_m3, NULL);
8246 ASSERT_EQ (model.get_rvalue (i, NULL),
8247 model.get_rvalue (int_m3, NULL));
8248
8249 /* Verify get_offset for "i". */
8250 {
7a6564c9 8251 region_offset offset = i_reg->get_offset (&mgr);
808f4dfe
DM
8252 ASSERT_EQ (offset.get_base_region (), i_reg);
8253 ASSERT_EQ (offset.get_bit_offset (), 0);
8254 }
8255}
8256
8257static void
8258test_array_2 ()
8259{
8260 /* "int arr[10];" */
8261 tree tlen = size_int (10);
8262 tree arr_type
8263 = build_array_type (integer_type_node, build_index_type (tlen));
8264 tree arr = build_global_decl ("arr", arr_type);
8265
8266 /* "int i;" */
8267 tree i = build_global_decl ("i", integer_type_node);
8268
8269 tree int_0 = build_int_cst (integer_type_node, 0);
8270 tree int_1 = build_int_cst (integer_type_node, 1);
8271
8272 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
8273 arr, int_0, NULL_TREE, NULL_TREE);
8274 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
8275 arr, int_1, NULL_TREE, NULL_TREE);
8276 tree arr_i = build4 (ARRAY_REF, integer_type_node,
8277 arr, i, NULL_TREE, NULL_TREE);
8278
8279 tree int_17 = build_int_cst (integer_type_node, 17);
8280 tree int_42 = build_int_cst (integer_type_node, 42);
8281 tree int_m3 = build_int_cst (integer_type_node, -3);
8282
8283 region_model_manager mgr;
8284 region_model model (&mgr);
8285 /* "arr[0] = 17;". */
8286 model.set_value (arr_0, int_17, NULL);
8287 /* "arr[1] = -3;". */
8288 model.set_value (arr_1, int_m3, NULL);
8289
8290 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
8291 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
8292
8293 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
8294 model.set_value (arr_1, int_42, NULL);
8295 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
8296
8297 /* Verify get_offset for "arr[0]". */
8298 {
8299 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
7a6564c9 8300 region_offset offset = arr_0_reg->get_offset (&mgr);
808f4dfe
DM
8301 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
8302 ASSERT_EQ (offset.get_bit_offset (), 0);
8303 }
8304
8305 /* Verify get_offset for "arr[1]". */
8306 {
8307 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
7a6564c9 8308 region_offset offset = arr_1_reg->get_offset (&mgr);
808f4dfe
DM
8309 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
8310 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
8311 }
8312
7a6564c9
TL
8313 /* Verify get_offset for "arr[i]". */
8314 {
8315 const region *arr_i_reg = model.get_lvalue (arr_i, NULL);
8316 region_offset offset = arr_i_reg->get_offset (&mgr);
8317 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
8318 ASSERT_EQ (offset.get_symbolic_byte_offset ()->get_kind (), SK_BINOP);
8319 }
8320
808f4dfe
DM
8321 /* "arr[i] = i;" - this should remove the earlier bindings. */
8322 model.set_value (arr_i, i, NULL);
8323 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
8324 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
8325
8326 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
8327 model.set_value (arr_0, int_17, NULL);
8328 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
8329 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
8330}
8331
8332/* Smoketest of dereferencing a pointer via MEM_REF. */
8333
8334static void
8335test_mem_ref ()
8336{
8337 /*
8338 x = 17;
8339 p = &x;
8340 *p;
8341 */
8342 tree x = build_global_decl ("x", integer_type_node);
8343 tree int_star = build_pointer_type (integer_type_node);
8344 tree p = build_global_decl ("p", int_star);
8345
8346 tree int_17 = build_int_cst (integer_type_node, 17);
8347 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
8348 tree offset_0 = build_int_cst (integer_type_node, 0);
8349 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
8350
8351 region_model_manager mgr;
8352 region_model model (&mgr);
8353
8354 /* "x = 17;". */
8355 model.set_value (x, int_17, NULL);
8356
8357 /* "p = &x;". */
8358 model.set_value (p, addr_of_x, NULL);
8359
8360 const svalue *sval = model.get_rvalue (star_p, NULL);
8361 ASSERT_EQ (sval->maybe_get_constant (), int_17);
8362}
8363
8364/* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
8365 Analogous to this code:
8366 void test_6 (int a[10])
8367 {
8368 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
8369 a[3] = 42;
8370 __analyzer_eval (a[3] == 42); [should be TRUE]
8371 }
8372 from data-model-1.c, which looks like this at the gimple level:
8373 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
8374 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
8375 int _2 = *_1; # MEM_REF
8376 _Bool _3 = _2 == 42;
8377 int _4 = (int) _3;
8378 __analyzer_eval (_4);
8379
8380 # a[3] = 42;
8381 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
8382 *_5 = 42; # MEM_REF
8383
8384 # __analyzer_eval (a[3] == 42); [should be TRUE]
8385 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
8386 int _7 = *_6; # MEM_REF
8387 _Bool _8 = _7 == 42;
8388 int _9 = (int) _8;
8389 __analyzer_eval (_9); */
8390
8391static void
8392test_POINTER_PLUS_EXPR_then_MEM_REF ()
8393{
8394 tree int_star = build_pointer_type (integer_type_node);
8395 tree a = build_global_decl ("a", int_star);
8396 tree offset_12 = build_int_cst (size_type_node, 12);
8397 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
8398 tree offset_0 = build_int_cst (integer_type_node, 0);
8399 tree mem_ref = build2 (MEM_REF, integer_type_node,
8400 pointer_plus_expr, offset_0);
8401 region_model_manager mgr;
8402 region_model m (&mgr);
8403
8404 tree int_42 = build_int_cst (integer_type_node, 42);
8405 m.set_value (mem_ref, int_42, NULL);
8406 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
8407}
8408
8409/* Verify that malloc works. */
8410
8411static void
8412test_malloc ()
8413{
8414 tree int_star = build_pointer_type (integer_type_node);
8415 tree p = build_global_decl ("p", int_star);
8416 tree n = build_global_decl ("n", integer_type_node);
8417 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
8418 n, build_int_cst (size_type_node, 4));
8419
8420 region_model_manager mgr;
8421 test_region_model_context ctxt;
8422 region_model model (&mgr);
8423
8424 /* "p = malloc (n * 4);". */
8425 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
ce917b04
DM
8426 const region *reg
8427 = model.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe
DM
8428 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
8429 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 8430 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
8431}
8432
8433/* Verify that alloca works. */
8434
8435static void
8436test_alloca ()
8437{
8438 auto_vec <tree> param_types;
8439 tree fndecl = make_fndecl (integer_type_node,
8440 "test_fn",
8441 param_types);
8442 allocate_struct_function (fndecl, true);
8443
8444
8445 tree int_star = build_pointer_type (integer_type_node);
8446 tree p = build_global_decl ("p", int_star);
8447 tree n = build_global_decl ("n", integer_type_node);
8448 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
8449 n, build_int_cst (size_type_node, 4));
8450
8451 region_model_manager mgr;
8452 test_region_model_context ctxt;
8453 region_model model (&mgr);
8454
8455 /* Push stack frame. */
8456 const region *frame_reg
8457 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl),
8458 NULL, &ctxt);
8459 /* "p = alloca (n * 4);". */
8460 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
b9365b93 8461 const region *reg = model.create_region_for_alloca (size_sval, &ctxt);
808f4dfe
DM
8462 ASSERT_EQ (reg->get_parent_region (), frame_reg);
8463 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
8464 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 8465 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
8466
8467 /* Verify that the pointers to the alloca region are replaced by
8468 poisoned values when the frame is popped. */
8469 model.pop_frame (NULL, NULL, &ctxt);
33255ad3 8470 ASSERT_EQ (model.get_rvalue (p, NULL)->get_kind (), SK_POISONED);
808f4dfe
DM
8471}
8472
71fc4655
DM
8473/* Verify that svalue::involves_p works. */
8474
8475static void
8476test_involves_p ()
8477{
8478 region_model_manager mgr;
8479 tree int_star = build_pointer_type (integer_type_node);
8480 tree p = build_global_decl ("p", int_star);
8481 tree q = build_global_decl ("q", int_star);
8482
8483 test_region_model_context ctxt;
8484 region_model model (&mgr);
8485 const svalue *p_init = model.get_rvalue (p, &ctxt);
8486 const svalue *q_init = model.get_rvalue (q, &ctxt);
8487
8488 ASSERT_TRUE (p_init->involves_p (p_init));
8489 ASSERT_FALSE (p_init->involves_p (q_init));
8490
8491 const region *star_p_reg = mgr.get_symbolic_region (p_init);
8492 const region *star_q_reg = mgr.get_symbolic_region (q_init);
8493
8494 const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
8495 const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
8496
8497 ASSERT_TRUE (init_star_p->involves_p (p_init));
8498 ASSERT_FALSE (p_init->involves_p (init_star_p));
8499 ASSERT_FALSE (init_star_p->involves_p (q_init));
8500 ASSERT_TRUE (init_star_q->involves_p (q_init));
8501 ASSERT_FALSE (init_star_q->involves_p (p_init));
8502}
8503
757bf1df
DM
8504/* Run all of the selftests within this file. */
8505
8506void
8507analyzer_region_model_cc_tests ()
8508{
8c08c983 8509 test_tree_cmp_on_constants ();
757bf1df 8510 test_dump ();
808f4dfe
DM
8511 test_struct ();
8512 test_array_1 ();
90f7c300 8513 test_get_representative_tree ();
757bf1df 8514 test_unique_constants ();
808f4dfe
DM
8515 test_unique_unknowns ();
8516 test_initial_svalue_folding ();
8517 test_unaryop_svalue_folding ();
8518 test_binop_svalue_folding ();
8519 test_sub_svalue_folding ();
f09b9955 8520 test_bits_within_svalue_folding ();
808f4dfe 8521 test_descendent_of_p ();
391512ad 8522 test_bit_range_regions ();
757bf1df 8523 test_assignment ();
a96f1c38 8524 test_compound_assignment ();
757bf1df
DM
8525 test_stack_frames ();
8526 test_get_representative_path_var ();
808f4dfe 8527 test_equality_1 ();
757bf1df
DM
8528 test_canonicalization_2 ();
8529 test_canonicalization_3 ();
8c08c983 8530 test_canonicalization_4 ();
757bf1df
DM
8531 test_state_merging ();
8532 test_constraint_merging ();
808f4dfe
DM
8533 test_widening_constraints ();
8534 test_iteration_1 ();
6969ac30 8535 test_malloc_constraints ();
808f4dfe
DM
8536 test_var ();
8537 test_array_2 ();
8538 test_mem_ref ();
8539 test_POINTER_PLUS_EXPR_then_MEM_REF ();
8540 test_malloc ();
8541 test_alloca ();
71fc4655 8542 test_involves_p ();
757bf1df
DM
8543}
8544
8545} // namespace selftest
8546
8547#endif /* CHECKING_P */
8548
75038aa6
DM
8549} // namespace ana
8550
757bf1df 8551#endif /* #if ENABLE_ANALYZER */