]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/analyzer/region-model.cc
analyzer: add logging to impl_path_context
[thirdparty/gcc.git] / gcc / analyzer / region-model.cc
CommitLineData
757bf1df 1/* Classes for modeling the state of memory.
83ffe9cd 2 Copyright (C) 2019-2023 Free Software Foundation, Inc.
757bf1df
DM
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
c81b60b8 22#define INCLUDE_MEMORY
757bf1df
DM
23#include "system.h"
24#include "coretypes.h"
6341f14e 25#include "make-unique.h"
757bf1df
DM
26#include "tree.h"
27#include "function.h"
28#include "basic-block.h"
29#include "gimple.h"
30#include "gimple-iterator.h"
7892ff37 31#include "diagnostic-core.h"
757bf1df
DM
32#include "graphviz.h"
33#include "options.h"
34#include "cgraph.h"
35#include "tree-dfa.h"
36#include "stringpool.h"
37#include "convert.h"
38#include "target.h"
39#include "fold-const.h"
40#include "tree-pretty-print.h"
41#include "diagnostic-color.h"
42#include "diagnostic-metadata.h"
ef7827b0 43#include "bitmap.h"
757bf1df 44#include "selftest.h"
757bf1df
DM
45#include "analyzer/analyzer.h"
46#include "analyzer/analyzer-logging.h"
47#include "ordered-hash-map.h"
48#include "options.h"
49#include "cgraph.h"
50#include "cfg.h"
757bf1df
DM
51#include "analyzer/supergraph.h"
52#include "sbitmap.h"
808f4dfe
DM
53#include "analyzer/call-string.h"
54#include "analyzer/program-point.h"
55#include "analyzer/store.h"
757bf1df
DM
56#include "analyzer/region-model.h"
57#include "analyzer/constraint-manager.h"
58#include "diagnostic-event-id.h"
59#include "analyzer/sm.h"
60#include "diagnostic-event-id.h"
61#include "analyzer/sm.h"
62#include "analyzer/pending-diagnostic.h"
808f4dfe 63#include "analyzer/region-model-reachability.h"
757bf1df 64#include "analyzer/analyzer-selftests.h"
f573d351 65#include "analyzer/program-state.h"
bfca9505 66#include "analyzer/call-summary.h"
884d9141 67#include "stor-layout.h"
c7e276b8 68#include "attribs.h"
9a2c9579 69#include "tree-object-size.h"
1e2fe671
DM
70#include "gimple-ssa.h"
71#include "tree-phinodes.h"
72#include "tree-ssa-operands.h"
73#include "ssa-iterators.h"
5fbcbcaf 74#include "calls.h"
e6c3bb37 75#include "is-a.h"
c81b60b8 76#include "gcc-rich-location.h"
f5758fe5
DM
77#include "analyzer/checker-event.h"
78#include "analyzer/checker-path.h"
b03a10b0 79#include "analyzer/feasible-graph.h"
757bf1df
DM
80
81#if ENABLE_ANALYZER
82
75038aa6
DM
83namespace ana {
84
757bf1df
DM
85/* Dump T to PP in language-independent form, for debugging/logging/dumping
86 purposes. */
87
757bf1df 88void
808f4dfe 89dump_tree (pretty_printer *pp, tree t)
757bf1df 90{
808f4dfe 91 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
757bf1df
DM
92}
93
808f4dfe
DM
94/* Dump T to PP in language-independent form in quotes, for
95 debugging/logging/dumping purposes. */
757bf1df
DM
96
97void
808f4dfe 98dump_quoted_tree (pretty_printer *pp, tree t)
757bf1df 99{
808f4dfe
DM
100 pp_begin_quote (pp, pp_show_color (pp));
101 dump_tree (pp, t);
102 pp_end_quote (pp, pp_show_color (pp));
757bf1df
DM
103}
104
808f4dfe
DM
105/* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
106 calls within other pp_printf calls.
757bf1df 107
808f4dfe
DM
108 default_tree_printer handles 'T' and some other codes by calling
109 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
110 dump_generic_node calls pp_printf in various places, leading to
111 garbled output.
757bf1df 112
808f4dfe
DM
113 Ideally pp_printf could be made to be reentrant, but in the meantime
114 this function provides a workaround. */
6969ac30
DM
115
116void
808f4dfe 117print_quoted_type (pretty_printer *pp, tree t)
6969ac30 118{
808f4dfe
DM
119 pp_begin_quote (pp, pp_show_color (pp));
120 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
121 pp_end_quote (pp, pp_show_color (pp));
6969ac30
DM
122}
123
d726a57b
DM
124/* class region_to_value_map. */
125
126/* Assignment operator for region_to_value_map. */
127
128region_to_value_map &
129region_to_value_map::operator= (const region_to_value_map &other)
130{
131 m_hash_map.empty ();
132 for (auto iter : other.m_hash_map)
133 {
134 const region *reg = iter.first;
135 const svalue *sval = iter.second;
136 m_hash_map.put (reg, sval);
137 }
138 return *this;
139}
140
141/* Equality operator for region_to_value_map. */
142
143bool
144region_to_value_map::operator== (const region_to_value_map &other) const
145{
146 if (m_hash_map.elements () != other.m_hash_map.elements ())
147 return false;
148
149 for (auto iter : *this)
150 {
151 const region *reg = iter.first;
152 const svalue *sval = iter.second;
153 const svalue * const *other_slot = other.get (reg);
154 if (other_slot == NULL)
155 return false;
156 if (sval != *other_slot)
157 return false;
158 }
159
160 return true;
161}
162
163/* Dump this object to PP. */
164
165void
166region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
167 bool multiline) const
168{
169 auto_vec<const region *> regs;
170 for (iterator iter = begin (); iter != end (); ++iter)
171 regs.safe_push ((*iter).first);
172 regs.qsort (region::cmp_ptr_ptr);
173 if (multiline)
174 pp_newline (pp);
175 else
176 pp_string (pp, " {");
177 unsigned i;
178 const region *reg;
179 FOR_EACH_VEC_ELT (regs, i, reg)
180 {
181 if (multiline)
182 pp_string (pp, " ");
183 else if (i > 0)
184 pp_string (pp, ", ");
185 reg->dump_to_pp (pp, simple);
186 pp_string (pp, ": ");
187 const svalue *sval = *get (reg);
188 sval->dump_to_pp (pp, true);
189 if (multiline)
190 pp_newline (pp);
191 }
192 if (!multiline)
193 pp_string (pp, "}");
194}
195
196/* Dump this object to stderr. */
197
198DEBUG_FUNCTION void
199region_to_value_map::dump (bool simple) const
200{
201 pretty_printer pp;
202 pp_format_decoder (&pp) = default_tree_printer;
203 pp_show_color (&pp) = pp_show_color (global_dc->printer);
204 pp.buffer->stream = stderr;
205 dump_to_pp (&pp, simple, true);
206 pp_newline (&pp);
207 pp_flush (&pp);
208}
209
210
211/* Attempt to merge THIS with OTHER, writing the result
212 to OUT.
213
214 For now, write (region, value) mappings that are in common between THIS
ce917b04
DM
215 and OTHER to OUT, effectively taking the intersection.
216
217 Reject merger of different values. */
d726a57b
DM
218
219bool
220region_to_value_map::can_merge_with_p (const region_to_value_map &other,
221 region_to_value_map *out) const
222{
223 for (auto iter : *this)
224 {
225 const region *iter_reg = iter.first;
226 const svalue *iter_sval = iter.second;
227 const svalue * const * other_slot = other.get (iter_reg);
228 if (other_slot)
ce917b04
DM
229 {
230 if (iter_sval == *other_slot)
231 out->put (iter_reg, iter_sval);
232 else
233 return false;
234 }
d726a57b
DM
235 }
236 return true;
237}
238
33255ad3
DM
239/* Purge any state involving SVAL. */
240
241void
242region_to_value_map::purge_state_involving (const svalue *sval)
243{
244 auto_vec<const region *> to_purge;
245 for (auto iter : *this)
246 {
247 const region *iter_reg = iter.first;
248 const svalue *iter_sval = iter.second;
249 if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
250 to_purge.safe_push (iter_reg);
251 }
252 for (auto iter : to_purge)
253 m_hash_map.remove (iter);
254}
255
757bf1df
DM
256/* class region_model. */
257
808f4dfe 258/* Ctor for region_model: construct an "empty" model. */
757bf1df 259
808f4dfe 260region_model::region_model (region_model_manager *mgr)
9a2c9579
DM
261: m_mgr (mgr), m_store (), m_current_frame (NULL),
262 m_dynamic_extents ()
757bf1df 263{
808f4dfe 264 m_constraints = new constraint_manager (mgr);
757bf1df
DM
265}
266
267/* region_model's copy ctor. */
268
269region_model::region_model (const region_model &other)
808f4dfe
DM
270: m_mgr (other.m_mgr), m_store (other.m_store),
271 m_constraints (new constraint_manager (*other.m_constraints)),
9a2c9579
DM
272 m_current_frame (other.m_current_frame),
273 m_dynamic_extents (other.m_dynamic_extents)
757bf1df 274{
757bf1df
DM
275}
276
277/* region_model's dtor. */
278
279region_model::~region_model ()
280{
281 delete m_constraints;
282}
283
284/* region_model's assignment operator. */
285
286region_model &
287region_model::operator= (const region_model &other)
288{
808f4dfe
DM
289 /* m_mgr is const. */
290 gcc_assert (m_mgr == other.m_mgr);
757bf1df 291
808f4dfe 292 m_store = other.m_store;
757bf1df
DM
293
294 delete m_constraints;
808f4dfe 295 m_constraints = new constraint_manager (*other.m_constraints);
757bf1df 296
808f4dfe 297 m_current_frame = other.m_current_frame;
757bf1df 298
9a2c9579
DM
299 m_dynamic_extents = other.m_dynamic_extents;
300
757bf1df
DM
301 return *this;
302}
303
304/* Equality operator for region_model.
305
808f4dfe
DM
306 Amongst other things this directly compares the stores and the constraint
307 managers, so for this to be meaningful both this and OTHER should
757bf1df
DM
308 have been canonicalized. */
309
310bool
311region_model::operator== (const region_model &other) const
312{
808f4dfe
DM
313 /* We can only compare instances that use the same manager. */
314 gcc_assert (m_mgr == other.m_mgr);
757bf1df 315
808f4dfe 316 if (m_store != other.m_store)
757bf1df
DM
317 return false;
318
319 if (*m_constraints != *other.m_constraints)
320 return false;
321
808f4dfe
DM
322 if (m_current_frame != other.m_current_frame)
323 return false;
757bf1df 324
9a2c9579
DM
325 if (m_dynamic_extents != other.m_dynamic_extents)
326 return false;
327
757bf1df
DM
328 gcc_checking_assert (hash () == other.hash ());
329
330 return true;
331}
332
333/* Generate a hash value for this region_model. */
334
335hashval_t
808f4dfe
DM
336region_model::hash () const
337{
338 hashval_t result = m_store.hash ();
339 result ^= m_constraints->hash ();
340 return result;
757bf1df
DM
341}
342
808f4dfe
DM
343/* Dump a representation of this model to PP, showing the
344 stack, the store, and any constraints.
345 Use SIMPLE to control how svalues and regions are printed. */
757bf1df
DM
346
347void
808f4dfe
DM
348region_model::dump_to_pp (pretty_printer *pp, bool simple,
349 bool multiline) const
757bf1df 350{
808f4dfe
DM
351 /* Dump stack. */
352 pp_printf (pp, "stack depth: %i", get_stack_depth ());
353 if (multiline)
354 pp_newline (pp);
355 else
356 pp_string (pp, " {");
357 for (const frame_region *iter_frame = m_current_frame; iter_frame;
358 iter_frame = iter_frame->get_calling_frame ())
359 {
360 if (multiline)
361 pp_string (pp, " ");
362 else if (iter_frame != m_current_frame)
363 pp_string (pp, ", ");
364 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
365 iter_frame->dump_to_pp (pp, simple);
366 if (multiline)
367 pp_newline (pp);
368 }
369 if (!multiline)
370 pp_string (pp, "}");
371
372 /* Dump store. */
373 if (!multiline)
374 pp_string (pp, ", {");
375 m_store.dump_to_pp (pp, simple, multiline,
376 m_mgr->get_store_manager ());
377 if (!multiline)
378 pp_string (pp, "}");
379
380 /* Dump constraints. */
381 pp_string (pp, "constraint_manager:");
382 if (multiline)
383 pp_newline (pp);
384 else
385 pp_string (pp, " {");
386 m_constraints->dump_to_pp (pp, multiline);
387 if (!multiline)
388 pp_string (pp, "}");
9a2c9579
DM
389
390 /* Dump sizes of dynamic regions, if any are known. */
391 if (!m_dynamic_extents.is_empty ())
392 {
393 pp_string (pp, "dynamic_extents:");
394 m_dynamic_extents.dump_to_pp (pp, simple, multiline);
395 }
808f4dfe 396}
757bf1df 397
808f4dfe 398/* Dump a representation of this model to FILE. */
757bf1df 399
808f4dfe
DM
400void
401region_model::dump (FILE *fp, bool simple, bool multiline) const
402{
403 pretty_printer pp;
404 pp_format_decoder (&pp) = default_tree_printer;
405 pp_show_color (&pp) = pp_show_color (global_dc->printer);
406 pp.buffer->stream = fp;
407 dump_to_pp (&pp, simple, multiline);
408 pp_newline (&pp);
409 pp_flush (&pp);
757bf1df
DM
410}
411
808f4dfe 412/* Dump a multiline representation of this model to stderr. */
757bf1df 413
808f4dfe
DM
414DEBUG_FUNCTION void
415region_model::dump (bool simple) const
416{
417 dump (stderr, simple, true);
418}
757bf1df 419
808f4dfe 420/* Dump a multiline representation of this model to stderr. */
757bf1df 421
808f4dfe
DM
422DEBUG_FUNCTION void
423region_model::debug () const
757bf1df 424{
808f4dfe 425 dump (true);
757bf1df
DM
426}
427
e61ffa20
DM
428/* Assert that this object is valid. */
429
430void
431region_model::validate () const
432{
433 m_store.validate ();
434}
435
808f4dfe
DM
436/* Canonicalize the store and constraints, to maximize the chance of
437 equality between region_model instances. */
757bf1df
DM
438
439void
808f4dfe 440region_model::canonicalize ()
757bf1df 441{
808f4dfe
DM
442 m_store.canonicalize (m_mgr->get_store_manager ());
443 m_constraints->canonicalize ();
757bf1df
DM
444}
445
446/* Return true if this region_model is in canonical form. */
447
448bool
449region_model::canonicalized_p () const
450{
451 region_model copy (*this);
808f4dfe 452 copy.canonicalize ();
757bf1df
DM
453 return *this == copy;
454}
455
808f4dfe
DM
456/* See the comment for store::loop_replay_fixup. */
457
458void
459region_model::loop_replay_fixup (const region_model *dst_state)
460{
461 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
462}
463
757bf1df
DM
464/* A subclass of pending_diagnostic for complaining about uses of
465 poisoned values. */
466
467class poisoned_value_diagnostic
468: public pending_diagnostic_subclass<poisoned_value_diagnostic>
469{
470public:
00e7d024 471 poisoned_value_diagnostic (tree expr, enum poison_kind pkind,
b03a10b0
DM
472 const region *src_region,
473 tree check_expr)
00e7d024 474 : m_expr (expr), m_pkind (pkind),
b03a10b0
DM
475 m_src_region (src_region),
476 m_check_expr (check_expr)
757bf1df
DM
477 {}
478
ff171cb1 479 const char *get_kind () const final override { return "poisoned_value_diagnostic"; }
757bf1df 480
ff171cb1 481 bool use_of_uninit_p () const final override
33255ad3
DM
482 {
483 return m_pkind == POISON_KIND_UNINIT;
484 }
485
757bf1df
DM
486 bool operator== (const poisoned_value_diagnostic &other) const
487 {
00e7d024
DM
488 return (m_expr == other.m_expr
489 && m_pkind == other.m_pkind
490 && m_src_region == other.m_src_region);
757bf1df
DM
491 }
492
ff171cb1 493 int get_controlling_option () const final override
7fd6e36e
DM
494 {
495 switch (m_pkind)
496 {
497 default:
498 gcc_unreachable ();
499 case POISON_KIND_UNINIT:
500 return OPT_Wanalyzer_use_of_uninitialized_value;
501 case POISON_KIND_FREED:
502 return OPT_Wanalyzer_use_after_free;
503 case POISON_KIND_POPPED_STACK:
504 return OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame;
505 }
506 }
507
8f636915
DM
508 bool terminate_path_p () const final override { return true; }
509
0e466e97 510 bool emit (rich_location *rich_loc, logger *) final override
757bf1df
DM
511 {
512 switch (m_pkind)
513 {
514 default:
515 gcc_unreachable ();
33255ad3
DM
516 case POISON_KIND_UNINIT:
517 {
518 diagnostic_metadata m;
519 m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
7fd6e36e 520 return warning_meta (rich_loc, m, get_controlling_option (),
33255ad3
DM
521 "use of uninitialized value %qE",
522 m_expr);
523 }
524 break;
757bf1df
DM
525 case POISON_KIND_FREED:
526 {
527 diagnostic_metadata m;
528 m.add_cwe (416); /* "CWE-416: Use After Free". */
7fd6e36e 529 return warning_meta (rich_loc, m, get_controlling_option (),
6c8e5844
DM
530 "use after %<free%> of %qE",
531 m_expr);
757bf1df
DM
532 }
533 break;
534 case POISON_KIND_POPPED_STACK:
535 {
757bf1df 536 /* TODO: which CWE? */
808f4dfe 537 return warning_at
7fd6e36e 538 (rich_loc, get_controlling_option (),
808f4dfe
DM
539 "dereferencing pointer %qE to within stale stack frame",
540 m_expr);
757bf1df
DM
541 }
542 break;
543 }
544 }
545
ff171cb1 546 label_text describe_final_event (const evdesc::final_event &ev) final override
757bf1df
DM
547 {
548 switch (m_pkind)
549 {
550 default:
551 gcc_unreachable ();
33255ad3
DM
552 case POISON_KIND_UNINIT:
553 return ev.formatted_print ("use of uninitialized value %qE here",
554 m_expr);
757bf1df
DM
555 case POISON_KIND_FREED:
556 return ev.formatted_print ("use after %<free%> of %qE here",
557 m_expr);
558 case POISON_KIND_POPPED_STACK:
559 return ev.formatted_print
808f4dfe 560 ("dereferencing pointer %qE to within stale stack frame",
757bf1df
DM
561 m_expr);
562 }
563 }
564
ff171cb1 565 void mark_interesting_stuff (interesting_t *interest) final override
00e7d024
DM
566 {
567 if (m_src_region)
568 interest->add_region_creation (m_src_region);
569 }
570
b03a10b0
DM
571 /* Attempt to suppress false positives.
572 Reject paths where the value of the underlying region isn't poisoned.
573 This can happen due to state merging when exploring the exploded graph,
574 where the more precise analysis during feasibility analysis finds that
575 the region is in fact valid.
576 To do this we need to get the value from the fgraph. Unfortunately
577 we can't simply query the state of m_src_region (from the enode),
578 since it might be a different region in the fnode state (e.g. with
579 heap-allocated regions, the numbering could be different).
580 Hence we access m_check_expr, if available. */
581
582 bool check_valid_fpath_p (const feasible_node &fnode,
583 const gimple *emission_stmt)
584 const final override
585 {
586 if (!m_check_expr)
587 return true;
588
589 /* We've reached the enode, but not necessarily the right function_point.
590 Try to get the state at the correct stmt. */
591 region_model emission_model (fnode.get_model ().get_manager());
592 if (!fnode.get_state_at_stmt (emission_stmt, &emission_model))
593 /* Couldn't get state; accept this diagnostic. */
594 return true;
595
596 const svalue *fsval = emission_model.get_rvalue (m_check_expr, NULL);
597 /* Check to see if the expr is also poisoned in FNODE (and in the
598 same way). */
599 const poisoned_svalue * fspval = fsval->dyn_cast_poisoned_svalue ();
600 if (!fspval)
601 return false;
602 if (fspval->get_poison_kind () != m_pkind)
603 return false;
604 return true;
605 }
606
757bf1df
DM
607private:
608 tree m_expr;
609 enum poison_kind m_pkind;
00e7d024 610 const region *m_src_region;
b03a10b0 611 tree m_check_expr;
757bf1df
DM
612};
613
5e00ad3f
DM
614/* A subclass of pending_diagnostic for complaining about shifts
615 by negative counts. */
616
617class shift_count_negative_diagnostic
618: public pending_diagnostic_subclass<shift_count_negative_diagnostic>
619{
620public:
621 shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
622 : m_assign (assign), m_count_cst (count_cst)
623 {}
624
ff171cb1 625 const char *get_kind () const final override
5e00ad3f
DM
626 {
627 return "shift_count_negative_diagnostic";
628 }
629
630 bool operator== (const shift_count_negative_diagnostic &other) const
631 {
632 return (m_assign == other.m_assign
633 && same_tree_p (m_count_cst, other.m_count_cst));
634 }
635
ff171cb1 636 int get_controlling_option () const final override
7fd6e36e
DM
637 {
638 return OPT_Wanalyzer_shift_count_negative;
639 }
640
0e466e97 641 bool emit (rich_location *rich_loc, logger *) final override
5e00ad3f 642 {
7fd6e36e 643 return warning_at (rich_loc, get_controlling_option (),
5e00ad3f
DM
644 "shift by negative count (%qE)", m_count_cst);
645 }
646
ff171cb1 647 label_text describe_final_event (const evdesc::final_event &ev) final override
5e00ad3f
DM
648 {
649 return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst);
650 }
651
652private:
653 const gassign *m_assign;
654 tree m_count_cst;
655};
656
657/* A subclass of pending_diagnostic for complaining about shifts
658 by counts >= the width of the operand type. */
659
660class shift_count_overflow_diagnostic
661: public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
662{
663public:
664 shift_count_overflow_diagnostic (const gassign *assign,
665 int operand_precision,
666 tree count_cst)
667 : m_assign (assign), m_operand_precision (operand_precision),
668 m_count_cst (count_cst)
669 {}
670
ff171cb1 671 const char *get_kind () const final override
5e00ad3f
DM
672 {
673 return "shift_count_overflow_diagnostic";
674 }
675
676 bool operator== (const shift_count_overflow_diagnostic &other) const
677 {
678 return (m_assign == other.m_assign
679 && m_operand_precision == other.m_operand_precision
680 && same_tree_p (m_count_cst, other.m_count_cst));
681 }
682
ff171cb1 683 int get_controlling_option () const final override
7fd6e36e
DM
684 {
685 return OPT_Wanalyzer_shift_count_overflow;
686 }
687
0e466e97 688 bool emit (rich_location *rich_loc, logger *) final override
5e00ad3f 689 {
7fd6e36e 690 return warning_at (rich_loc, get_controlling_option (),
5e00ad3f
DM
691 "shift by count (%qE) >= precision of type (%qi)",
692 m_count_cst, m_operand_precision);
693 }
694
ff171cb1 695 label_text describe_final_event (const evdesc::final_event &ev) final override
5e00ad3f
DM
696 {
697 return ev.formatted_print ("shift by count %qE here", m_count_cst);
698 }
699
700private:
701 const gassign *m_assign;
702 int m_operand_precision;
703 tree m_count_cst;
704};
705
808f4dfe
DM
706/* If ASSIGN is a stmt that can be modelled via
707 set_value (lhs_reg, SVALUE, CTXT)
708 for some SVALUE, get the SVALUE.
709 Otherwise return NULL. */
757bf1df 710
808f4dfe
DM
711const svalue *
712region_model::get_gassign_result (const gassign *assign,
713 region_model_context *ctxt)
757bf1df
DM
714{
715 tree lhs = gimple_assign_lhs (assign);
716 tree rhs1 = gimple_assign_rhs1 (assign);
757bf1df
DM
717 enum tree_code op = gimple_assign_rhs_code (assign);
718 switch (op)
719 {
720 default:
808f4dfe 721 return NULL;
757bf1df
DM
722
723 case POINTER_PLUS_EXPR:
724 {
725 /* e.g. "_1 = a_10(D) + 12;" */
726 tree ptr = rhs1;
727 tree offset = gimple_assign_rhs2 (assign);
728
808f4dfe
DM
729 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
730 const svalue *offset_sval = get_rvalue (offset, ctxt);
731 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
732 is an integer of type sizetype". */
733 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
734
735 const svalue *sval_binop
736 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
737 ptr_sval, offset_sval);
738 return sval_binop;
757bf1df
DM
739 }
740 break;
741
742 case POINTER_DIFF_EXPR:
743 {
744 /* e.g. "_1 = p_2(D) - q_3(D);". */
808f4dfe
DM
745 tree rhs2 = gimple_assign_rhs2 (assign);
746 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
747 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 748
808f4dfe 749 // TODO: perhaps fold to zero if they're known to be equal?
757bf1df 750
808f4dfe
DM
751 const svalue *sval_binop
752 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
753 rhs1_sval, rhs2_sval);
754 return sval_binop;
757bf1df
DM
755 }
756 break;
757
808f4dfe
DM
758 /* Assignments of the form
759 set_value (lvalue (LHS), rvalue (EXPR))
760 for various EXPR.
761 We already have the lvalue for the LHS above, as "lhs_reg". */
762 case ADDR_EXPR: /* LHS = &RHS; */
763 case BIT_FIELD_REF:
764 case COMPONENT_REF: /* LHS = op0.op1; */
757bf1df 765 case MEM_REF:
757bf1df 766 case REAL_CST:
808f4dfe
DM
767 case COMPLEX_CST:
768 case VECTOR_CST:
757bf1df
DM
769 case INTEGER_CST:
770 case ARRAY_REF:
808f4dfe
DM
771 case SSA_NAME: /* LHS = VAR; */
772 case VAR_DECL: /* LHS = VAR; */
773 case PARM_DECL:/* LHS = VAR; */
774 case REALPART_EXPR:
775 case IMAGPART_EXPR:
776 return get_rvalue (rhs1, ctxt);
777
778 case ABS_EXPR:
779 case ABSU_EXPR:
780 case CONJ_EXPR:
781 case BIT_NOT_EXPR:
757bf1df
DM
782 case FIX_TRUNC_EXPR:
783 case FLOAT_EXPR:
808f4dfe 784 case NEGATE_EXPR:
757bf1df 785 case NOP_EXPR:
808f4dfe 786 case VIEW_CONVERT_EXPR:
757bf1df 787 {
808f4dfe
DM
788 /* Unary ops. */
789 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
790 const svalue *sval_unaryop
791 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
792 return sval_unaryop;
757bf1df 793 }
757bf1df
DM
794
795 case EQ_EXPR:
796 case GE_EXPR:
797 case LE_EXPR:
798 case NE_EXPR:
799 case GT_EXPR:
800 case LT_EXPR:
808f4dfe
DM
801 case UNORDERED_EXPR:
802 case ORDERED_EXPR:
757bf1df
DM
803 {
804 tree rhs2 = gimple_assign_rhs2 (assign);
805
808f4dfe
DM
806 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
807 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 808
2f5951bd 809 if (TREE_TYPE (lhs) == boolean_type_node)
808f4dfe 810 {
2f5951bd
DM
811 /* Consider constraints between svalues. */
812 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
813 if (t.is_known ())
814 return m_mgr->get_or_create_constant_svalue
815 (t.is_true () ? boolean_true_node : boolean_false_node);
808f4dfe 816 }
2f5951bd
DM
817
818 /* Otherwise, generate a symbolic binary op. */
819 const svalue *sval_binop
820 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
821 rhs1_sval, rhs2_sval);
822 return sval_binop;
757bf1df
DM
823 }
824 break;
825
826 case PLUS_EXPR:
827 case MINUS_EXPR:
828 case MULT_EXPR:
808f4dfe 829 case MULT_HIGHPART_EXPR:
757bf1df 830 case TRUNC_DIV_EXPR:
808f4dfe
DM
831 case CEIL_DIV_EXPR:
832 case FLOOR_DIV_EXPR:
833 case ROUND_DIV_EXPR:
757bf1df 834 case TRUNC_MOD_EXPR:
808f4dfe
DM
835 case CEIL_MOD_EXPR:
836 case FLOOR_MOD_EXPR:
837 case ROUND_MOD_EXPR:
838 case RDIV_EXPR:
839 case EXACT_DIV_EXPR:
757bf1df
DM
840 case LSHIFT_EXPR:
841 case RSHIFT_EXPR:
808f4dfe
DM
842 case LROTATE_EXPR:
843 case RROTATE_EXPR:
757bf1df
DM
844 case BIT_IOR_EXPR:
845 case BIT_XOR_EXPR:
846 case BIT_AND_EXPR:
847 case MIN_EXPR:
848 case MAX_EXPR:
808f4dfe 849 case COMPLEX_EXPR:
757bf1df
DM
850 {
851 /* Binary ops. */
852 tree rhs2 = gimple_assign_rhs2 (assign);
853
808f4dfe
DM
854 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
855 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 856
5e00ad3f
DM
857 if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
858 {
859 /* "INT34-C. Do not shift an expression by a negative number of bits
860 or by greater than or equal to the number of bits that exist in
861 the operand." */
862 if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
a4913a19
DM
863 if (TREE_CODE (rhs2_cst) == INTEGER_CST
864 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
5e00ad3f
DM
865 {
866 if (tree_int_cst_sgn (rhs2_cst) < 0)
6341f14e
DM
867 ctxt->warn
868 (make_unique<shift_count_negative_diagnostic>
869 (assign, rhs2_cst));
5e00ad3f
DM
870 else if (compare_tree_int (rhs2_cst,
871 TYPE_PRECISION (TREE_TYPE (rhs1)))
872 >= 0)
6341f14e
DM
873 ctxt->warn
874 (make_unique<shift_count_overflow_diagnostic>
875 (assign,
876 int (TYPE_PRECISION (TREE_TYPE (rhs1))),
877 rhs2_cst));
5e00ad3f
DM
878 }
879 }
880
808f4dfe
DM
881 const svalue *sval_binop
882 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
883 rhs1_sval, rhs2_sval);
884 return sval_binop;
885 }
886
887 /* Vector expressions. In theory we could implement these elementwise,
888 but for now, simply return unknown values. */
889 case VEC_DUPLICATE_EXPR:
890 case VEC_SERIES_EXPR:
891 case VEC_COND_EXPR:
892 case VEC_PERM_EXPR:
1b0be822
DM
893 case VEC_WIDEN_MULT_HI_EXPR:
894 case VEC_WIDEN_MULT_LO_EXPR:
895 case VEC_WIDEN_MULT_EVEN_EXPR:
896 case VEC_WIDEN_MULT_ODD_EXPR:
897 case VEC_UNPACK_HI_EXPR:
898 case VEC_UNPACK_LO_EXPR:
899 case VEC_UNPACK_FLOAT_HI_EXPR:
900 case VEC_UNPACK_FLOAT_LO_EXPR:
901 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
902 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
903 case VEC_PACK_TRUNC_EXPR:
904 case VEC_PACK_SAT_EXPR:
905 case VEC_PACK_FIX_TRUNC_EXPR:
906 case VEC_PACK_FLOAT_EXPR:
907 case VEC_WIDEN_LSHIFT_HI_EXPR:
908 case VEC_WIDEN_LSHIFT_LO_EXPR:
808f4dfe
DM
909 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
910 }
911}
912
1e2fe671
DM
913/* Workaround for discarding certain false positives from
914 -Wanalyzer-use-of-uninitialized-value
915 of the form:
916 ((A OR-IF B) OR-IF C)
917 and:
918 ((A AND-IF B) AND-IF C)
919 where evaluating B is redundant, but could involve simple accesses of
920 uninitialized locals.
921
922 When optimization is turned on the FE can immediately fold compound
923 conditionals. Specifically, c_parser_condition parses this condition:
924 ((A OR-IF B) OR-IF C)
925 and calls c_fully_fold on the condition.
926 Within c_fully_fold, fold_truth_andor is called, which bails when
927 optimization is off, but if any optimization is turned on can convert the
928 ((A OR-IF B) OR-IF C)
929 into:
930 ((A OR B) OR_IF C)
931 for sufficiently simple B
932 i.e. the inner OR-IF becomes an OR.
933 At gimplification time the inner OR becomes BIT_IOR_EXPR (in gimplify_expr),
934 giving this for the inner condition:
935 tmp = A | B;
936 if (tmp)
937 thus effectively synthesizing a redundant access of B when optimization
938 is turned on, when compared to:
939 if (A) goto L1; else goto L4;
940 L1: if (B) goto L2; else goto L4;
941 L2: if (C) goto L3; else goto L4;
942 for the unoptimized case.
943
944 Return true if CTXT appears to be handling such a short-circuitable stmt,
945 such as the def-stmt for B for the:
946 tmp = A | B;
947 case above, for the case where A is true and thus B would have been
948 short-circuited without optimization, using MODEL for the value of A. */
949
950static bool
951within_short_circuited_stmt_p (const region_model *model,
b33dd787 952 const gassign *assign_stmt)
1e2fe671 953{
1e2fe671 954 /* We must have an assignment to a temporary of _Bool type. */
1e2fe671
DM
955 tree lhs = gimple_assign_lhs (assign_stmt);
956 if (TREE_TYPE (lhs) != boolean_type_node)
957 return false;
958 if (TREE_CODE (lhs) != SSA_NAME)
959 return false;
960 if (SSA_NAME_VAR (lhs) != NULL_TREE)
961 return false;
962
963 /* The temporary bool must be used exactly once: as the second arg of
964 a BIT_IOR_EXPR or BIT_AND_EXPR. */
965 use_operand_p use_op;
966 gimple *use_stmt;
967 if (!single_imm_use (lhs, &use_op, &use_stmt))
968 return false;
969 const gassign *use_assign = dyn_cast <const gassign *> (use_stmt);
970 if (!use_assign)
971 return false;
972 enum tree_code op = gimple_assign_rhs_code (use_assign);
973 if (!(op == BIT_IOR_EXPR ||op == BIT_AND_EXPR))
974 return false;
975 if (!(gimple_assign_rhs1 (use_assign) != lhs
976 && gimple_assign_rhs2 (use_assign) == lhs))
977 return false;
978
979 /* The first arg of the bitwise stmt must have a known value in MODEL
980 that implies that the value of the second arg doesn't matter, i.e.
981 1 for bitwise or, 0 for bitwise and. */
982 tree other_arg = gimple_assign_rhs1 (use_assign);
983 /* Use a NULL ctxt here to avoid generating warnings. */
984 const svalue *other_arg_sval = model->get_rvalue (other_arg, NULL);
985 tree other_arg_cst = other_arg_sval->maybe_get_constant ();
986 if (!other_arg_cst)
987 return false;
988 switch (op)
989 {
990 default:
991 gcc_unreachable ();
992 case BIT_IOR_EXPR:
993 if (zerop (other_arg_cst))
994 return false;
995 break;
996 case BIT_AND_EXPR:
997 if (!zerop (other_arg_cst))
998 return false;
999 break;
1000 }
1001
1002 /* All tests passed. We appear to be in a stmt that generates a boolean
1003 temporary with a value that won't matter. */
1004 return true;
1005}
1006
b33dd787
DM
1007/* Workaround for discarding certain false positives from
1008 -Wanalyzer-use-of-uninitialized-value
1009 seen with -ftrivial-auto-var-init=.
1010
1011 -ftrivial-auto-var-init= will generate calls to IFN_DEFERRED_INIT.
1012
1013 If the address of the var is taken, gimplification will give us
1014 something like:
1015
1016 _1 = .DEFERRED_INIT (4, 2, &"len"[0]);
1017 len = _1;
1018
1019 The result of DEFERRED_INIT will be an uninit value; we don't
1020 want to emit a false positive for "len = _1;"
1021
1022 Return true if ASSIGN_STMT is such a stmt. */
1023
1024static bool
1025due_to_ifn_deferred_init_p (const gassign *assign_stmt)
1026
1027{
1028 /* We must have an assignment to a decl from an SSA name that's the
1029 result of a IFN_DEFERRED_INIT call. */
1030 if (gimple_assign_rhs_code (assign_stmt) != SSA_NAME)
1031 return false;
1032 tree lhs = gimple_assign_lhs (assign_stmt);
1033 if (TREE_CODE (lhs) != VAR_DECL)
1034 return false;
1035 tree rhs = gimple_assign_rhs1 (assign_stmt);
1036 if (TREE_CODE (rhs) != SSA_NAME)
1037 return false;
1038 const gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1039 const gcall *call = dyn_cast <const gcall *> (def_stmt);
1040 if (!call)
1041 return false;
1042 if (gimple_call_internal_p (call)
1043 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1044 return true;
1045 return false;
1046}
1047
33255ad3
DM
1048/* Check for SVAL being poisoned, adding a warning to CTXT.
1049 Return SVAL, or, if a warning is added, another value, to avoid
2fdc8546
DM
1050 repeatedly complaining about the same poisoned value in followup code.
1051 SRC_REGION is a hint about where SVAL came from, and can be NULL. */
33255ad3
DM
1052
1053const svalue *
1054region_model::check_for_poison (const svalue *sval,
1055 tree expr,
2fdc8546 1056 const region *src_region,
33255ad3
DM
1057 region_model_context *ctxt) const
1058{
1059 if (!ctxt)
1060 return sval;
1061
1062 if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
1063 {
cc68ad87
DM
1064 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
1065
1066 /* Ignore uninitialized uses of empty types; there's nothing
1067 to initialize. */
1068 if (pkind == POISON_KIND_UNINIT
1069 && sval->get_type ()
1070 && is_empty_type (sval->get_type ()))
1071 return sval;
1072
b33dd787
DM
1073 if (pkind == POISON_KIND_UNINIT)
1074 if (const gimple *curr_stmt = ctxt->get_stmt ())
1075 if (const gassign *assign_stmt
1076 = dyn_cast <const gassign *> (curr_stmt))
1077 {
1078 /* Special case to avoid certain false positives. */
1079 if (within_short_circuited_stmt_p (this, assign_stmt))
1080 return sval;
1081
1082 /* Special case to avoid false positive on
1083 -ftrivial-auto-var-init=. */
1084 if (due_to_ifn_deferred_init_p (assign_stmt))
1085 return sval;
1086 }
1e2fe671 1087
33255ad3
DM
1088 /* If we have an SSA name for a temporary, we don't want to print
1089 '<unknown>'.
1090 Poisoned values are shared by type, and so we can't reconstruct
1091 the tree other than via the def stmts, using
1092 fixup_tree_for_diagnostic. */
1093 tree diag_arg = fixup_tree_for_diagnostic (expr);
2fdc8546 1094 if (src_region == NULL && pkind == POISON_KIND_UNINIT)
00e7d024 1095 src_region = get_region_for_poisoned_expr (expr);
b03a10b0
DM
1096
1097 /* Can we reliably get the poisoned value from "expr"?
1098 This is for use by poisoned_value_diagnostic::check_valid_fpath_p.
1099 Unfortunately, we might not have a reliable value for EXPR.
1100 Hence we only query its value now, and only use it if we get the
1101 poisoned value back again. */
1102 tree check_expr = expr;
1103 const svalue *foo_sval = get_rvalue (expr, NULL);
1104 if (foo_sval == sval)
1105 check_expr = expr;
1106 else
1107 check_expr = NULL;
6341f14e
DM
1108 if (ctxt->warn (make_unique<poisoned_value_diagnostic> (diag_arg,
1109 pkind,
b03a10b0
DM
1110 src_region,
1111 check_expr)))
33255ad3
DM
1112 {
1113 /* We only want to report use of a poisoned value at the first
1114 place it gets used; return an unknown value to avoid generating
1115 a chain of followup warnings. */
1116 sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
1117 }
1118
1119 return sval;
1120 }
1121
1122 return sval;
1123}
1124
00e7d024
DM
1125/* Attempt to get a region for describing EXPR, the source of region of
1126 a poisoned_svalue for use in a poisoned_value_diagnostic.
1127 Return NULL if there is no good region to use. */
1128
1129const region *
1130region_model::get_region_for_poisoned_expr (tree expr) const
1131{
1132 if (TREE_CODE (expr) == SSA_NAME)
1133 {
1134 tree decl = SSA_NAME_VAR (expr);
1135 if (decl && DECL_P (decl))
1136 expr = decl;
1137 else
1138 return NULL;
1139 }
1140 return get_lvalue (expr, NULL);
1141}
1142
808f4dfe
DM
1143/* Update this model for the ASSIGN stmt, using CTXT to report any
1144 diagnostics. */
1145
1146void
1147region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
1148{
1149 tree lhs = gimple_assign_lhs (assign);
1150 tree rhs1 = gimple_assign_rhs1 (assign);
1151
1152 const region *lhs_reg = get_lvalue (lhs, ctxt);
1153
1154 /* Most assignments are handled by:
1155 set_value (lhs_reg, SVALUE, CTXT)
1156 for some SVALUE. */
1157 if (const svalue *sval = get_gassign_result (assign, ctxt))
1158 {
33255ad3 1159 tree expr = get_diagnostic_tree_for_gassign (assign);
2fdc8546 1160 check_for_poison (sval, expr, NULL, ctxt);
808f4dfe
DM
1161 set_value (lhs_reg, sval, ctxt);
1162 return;
1163 }
1164
1165 enum tree_code op = gimple_assign_rhs_code (assign);
1166 switch (op)
1167 {
1168 default:
1169 {
1b0be822 1170 if (0)
808f4dfe
DM
1171 sorry_at (assign->location, "unhandled assignment op: %qs",
1172 get_tree_code_name (op));
1b0be822
DM
1173 const svalue *unknown_sval
1174 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1175 set_value (lhs_reg, unknown_sval, ctxt);
757bf1df
DM
1176 }
1177 break;
1178
808f4dfe
DM
1179 case CONSTRUCTOR:
1180 {
1181 if (TREE_CLOBBER_P (rhs1))
1182 {
1183 /* e.g. "x ={v} {CLOBBER};" */
1184 clobber_region (lhs_reg);
1185 }
1186 else
1187 {
1188 /* Any CONSTRUCTOR that survives to this point is either
1189 just a zero-init of everything, or a vector. */
1190 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
1191 zero_fill_region (lhs_reg);
1192 unsigned ix;
1193 tree index;
1194 tree val;
1195 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
1196 {
1197 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
1198 if (!index)
1199 index = build_int_cst (integer_type_node, ix);
1200 gcc_assert (TREE_CODE (index) == INTEGER_CST);
1201 const svalue *index_sval
1202 = m_mgr->get_or_create_constant_svalue (index);
1203 gcc_assert (index_sval);
1204 const region *sub_reg
1205 = m_mgr->get_element_region (lhs_reg,
1206 TREE_TYPE (val),
1207 index_sval);
1208 const svalue *val_sval = get_rvalue (val, ctxt);
1209 set_value (sub_reg, val_sval, ctxt);
1210 }
1211 }
1212 }
1213 break;
1214
1215 case STRING_CST:
757bf1df 1216 {
808f4dfe 1217 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
808f4dfe
DM
1218 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1219 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 1220 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
1221 }
1222 break;
1223 }
1224}
1225
33255ad3 1226/* Handle the pre-sm-state part of STMT, modifying this object in-place.
33255ad3
DM
1227 Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
1228 side effects. */
1229
1230void
1231region_model::on_stmt_pre (const gimple *stmt,
33255ad3
DM
1232 bool *out_unknown_side_effects,
1233 region_model_context *ctxt)
1234{
1235 switch (gimple_code (stmt))
1236 {
1237 default:
1238 /* No-op for now. */
1239 break;
1240
1241 case GIMPLE_ASSIGN:
1242 {
1243 const gassign *assign = as_a <const gassign *> (stmt);
1244 on_assignment (assign, ctxt);
1245 }
1246 break;
1247
1248 case GIMPLE_ASM:
ded2c2c0
DM
1249 {
1250 const gasm *asm_stmt = as_a <const gasm *> (stmt);
1251 on_asm_stmt (asm_stmt, ctxt);
1252 }
33255ad3
DM
1253 break;
1254
1255 case GIMPLE_CALL:
1256 {
1257 /* Track whether we have a gcall to a function that's not recognized by
1258 anything, for which we don't have a function body, or for which we
1259 don't know the fndecl. */
1260 const gcall *call = as_a <const gcall *> (stmt);
6bd31b33 1261 *out_unknown_side_effects = on_call_pre (call, ctxt);
33255ad3
DM
1262 }
1263 break;
1264
1265 case GIMPLE_RETURN:
1266 {
1267 const greturn *return_ = as_a <const greturn *> (stmt);
1268 on_return (return_, ctxt);
1269 }
1270 break;
1271 }
1272}
1273
3b691e01
DM
1274/* Given a call CD with function attribute FORMAT_ATTR, check that the
1275 format arg to the call is a valid null-terminated string. */
1276
1277void
1278region_model::check_call_format_attr (const call_details &cd,
1279 tree format_attr) const
1280{
1281 /* We assume that FORMAT_ATTR has already been validated. */
1282
1283 /* arg0 of the attribute should be kind of format strings
1284 that this function expects (e.g. "printf"). */
1285 const tree arg0_tree_list = TREE_VALUE (format_attr);
1286 if (!arg0_tree_list)
1287 return;
1288
1289 /* arg1 of the attribute should be the 1-based parameter index
1290 to treat as the format string. */
1291 const tree arg1_tree_list = TREE_CHAIN (arg0_tree_list);
1292 if (!arg1_tree_list)
1293 return;
1294 const tree arg1_value = TREE_VALUE (arg1_tree_list);
1295 if (!arg1_value)
1296 return;
1297
1298 unsigned format_arg_idx = TREE_INT_CST_LOW (arg1_value) - 1;
1299 if (cd.num_args () <= format_arg_idx)
1300 return;
1301
1302 /* Subclass of annotating_context that
1303 adds a note about the format attr to any saved diagnostics. */
1304 class annotating_ctxt : public annotating_context
1305 {
1306 public:
1307 annotating_ctxt (const call_details &cd,
1308 unsigned fmt_param_idx)
1309 : annotating_context (cd.get_ctxt ()),
1310 m_cd (cd),
1311 m_fmt_param_idx (fmt_param_idx)
1312 {
1313 }
1314 void add_annotations () final override
1315 {
1316 class reason_format_attr
1317 : public pending_note_subclass<reason_format_attr>
1318 {
1319 public:
1320 reason_format_attr (const call_arg_details &arg_details)
1321 : m_arg_details (arg_details)
1322 {
1323 }
1324
1325 const char *get_kind () const final override
1326 {
1327 return "reason_format_attr";
1328 }
1329
1330 void emit () const final override
1331 {
1332 inform (DECL_SOURCE_LOCATION (m_arg_details.m_called_fndecl),
1333 "parameter %i of %qD marked as a format string"
1334 " via %qs attribute",
1335 m_arg_details.m_arg_idx + 1, m_arg_details.m_called_fndecl,
1336 "format");
1337 }
1338
1339 bool operator== (const reason_format_attr &other) const
1340 {
1341 return m_arg_details == other.m_arg_details;
1342 }
1343
1344 private:
1345 call_arg_details m_arg_details;
1346 };
1347
1348 call_arg_details arg_details (m_cd, m_fmt_param_idx);
1349 add_note (make_unique<reason_format_attr> (arg_details));
1350 }
1351 private:
1352 const call_details &m_cd;
1353 unsigned m_fmt_param_idx;
1354 };
1355
1356 annotating_ctxt my_ctxt (cd, format_arg_idx);
1357 call_details my_cd (cd, &my_ctxt);
1358 my_cd.check_for_null_terminated_string_arg (format_arg_idx);
1359}
1360
9ff3e236 1361/* Ensure that all arguments at the call described by CD are checked
3b691e01
DM
1362 for poisoned values, by calling get_rvalue on each argument.
1363
1364 Check that calls to functions with "format" attribute have valid
1365 null-terminated strings for their format argument. */
9ff3e236
DM
1366
1367void
1368region_model::check_call_args (const call_details &cd) const
1369{
1370 for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++)
1371 cd.get_arg_svalue (arg_idx);
3b691e01
DM
1372
1373 /* Handle attribute "format". */
1374 if (tree format_attr = cd.lookup_function_attribute ("format"))
1375 check_call_format_attr (cd, format_attr);
9ff3e236
DM
1376}
1377
792f039f
DM
1378/* Update this model for an outcome of a call that returns a specific
1379 integer constant.
07e30160
DM
1380 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1381 the state-merger code from merging success and failure outcomes. */
1382
1383void
792f039f
DM
1384region_model::update_for_int_cst_return (const call_details &cd,
1385 int retval,
1386 bool unmergeable)
07e30160
DM
1387{
1388 if (!cd.get_lhs_type ())
1389 return;
4e4e45a4
DM
1390 if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1391 return;
07e30160 1392 const svalue *result
792f039f 1393 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), retval);
07e30160
DM
1394 if (unmergeable)
1395 result = m_mgr->get_or_create_unmergeable (result);
1396 set_value (cd.get_lhs_region (), result, cd.get_ctxt ());
1397}
1398
792f039f
DM
1399/* Update this model for an outcome of a call that returns zero.
1400 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1401 the state-merger code from merging success and failure outcomes. */
1402
1403void
1404region_model::update_for_zero_return (const call_details &cd,
1405 bool unmergeable)
1406{
1407 update_for_int_cst_return (cd, 0, unmergeable);
1408}
1409
73da34a5
DM
1410/* Update this model for an outcome of a call that returns non-zero.
1411 Specifically, assign an svalue to the LHS, and add a constraint that
1412 that svalue is non-zero. */
07e30160
DM
1413
1414void
1415region_model::update_for_nonzero_return (const call_details &cd)
1416{
1417 if (!cd.get_lhs_type ())
1418 return;
4e4e45a4
DM
1419 if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1420 return;
73da34a5 1421 cd.set_any_lhs_with_defaults ();
07e30160
DM
1422 const svalue *zero
1423 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), 0);
1424 const svalue *result
1425 = get_store_value (cd.get_lhs_region (), cd.get_ctxt ());
1426 add_constraint (result, NE_EXPR, zero, cd.get_ctxt ());
1427}
1428
1429/* Subroutine of region_model::maybe_get_copy_bounds.
1430 The Linux kernel commonly uses
1431 min_t([unsigned] long, VAR, sizeof(T));
1432 to set an upper bound on the size of a copy_to_user.
1433 Attempt to simplify such sizes by trying to get the upper bound as a
1434 constant.
1435 Return the simplified svalue if possible, or NULL otherwise. */
1436
1437static const svalue *
1438maybe_simplify_upper_bound (const svalue *num_bytes_sval,
1439 region_model_manager *mgr)
1440{
1441 tree type = num_bytes_sval->get_type ();
1442 while (const svalue *raw = num_bytes_sval->maybe_undo_cast ())
1443 num_bytes_sval = raw;
1444 if (const binop_svalue *binop_sval = num_bytes_sval->dyn_cast_binop_svalue ())
1445 if (binop_sval->get_op () == MIN_EXPR)
1446 if (binop_sval->get_arg1 ()->get_kind () == SK_CONSTANT)
1447 {
1448 return mgr->get_or_create_cast (type, binop_sval->get_arg1 ());
1449 /* TODO: we might want to also capture the constraint
1450 when recording the diagnostic, or note that we're using
1451 the upper bound. */
1452 }
1453 return NULL;
1454}
1455
1456/* Attempt to get an upper bound for the size of a copy when simulating a
1457 copy function.
1458
1459 NUM_BYTES_SVAL is the symbolic value for the size of the copy.
1460 Use it if it's constant, otherwise try to simplify it. Failing
1461 that, use the size of SRC_REG if constant.
1462
1463 Return a symbolic value for an upper limit on the number of bytes
1464 copied, or NULL if no such value could be determined. */
1465
1466const svalue *
1467region_model::maybe_get_copy_bounds (const region *src_reg,
1468 const svalue *num_bytes_sval)
1469{
1470 if (num_bytes_sval->maybe_get_constant ())
1471 return num_bytes_sval;
1472
1473 if (const svalue *simplified
1474 = maybe_simplify_upper_bound (num_bytes_sval, m_mgr))
1475 num_bytes_sval = simplified;
1476
1477 if (num_bytes_sval->maybe_get_constant ())
1478 return num_bytes_sval;
1479
1480 /* For now, try just guessing the size as the capacity of the
1481 base region of the src.
1482 This is a hack; we might get too large a value. */
1483 const region *src_base_reg = src_reg->get_base_region ();
1484 num_bytes_sval = get_capacity (src_base_reg);
1485
1486 if (num_bytes_sval->maybe_get_constant ())
1487 return num_bytes_sval;
1488
1489 /* Non-constant: give up. */
1490 return NULL;
1491}
1492
6bd31b33
DM
1493/* Get any known_function for FNDECL for call CD.
1494
1495 The call must match all assumptions made by the known_function (such as
1496 e.g. "argument 1's type must be a pointer type").
1497
1498 Return NULL if no known_function is found, or it does not match the
1499 assumption(s). */
1500
1501const known_function *
1502region_model::get_known_function (tree fndecl, const call_details &cd) const
1503{
1504 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
1505 return known_fn_mgr->get_match (fndecl, cd);
1506}
1507
1508/* Get any known_function for IFN, or NULL. */
07e30160
DM
1509
1510const known_function *
6bd31b33 1511region_model::get_known_function (enum internal_fn ifn) const
07e30160
DM
1512{
1513 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
6bd31b33 1514 return known_fn_mgr->get_internal_fn (ifn);
07e30160
DM
1515}
1516
757bf1df
DM
1517/* Update this model for the CALL stmt, using CTXT to report any
1518 diagnostics - the first half.
1519
1520 Updates to the region_model that should be made *before* sm-states
1521 are updated are done here; other updates to the region_model are done
ef7827b0 1522 in region_model::on_call_post.
757bf1df 1523
ef7827b0
DM
1524 Return true if the function call has unknown side effects (it wasn't
1525 recognized and we don't have a body for it, or are unable to tell which
6bd31b33 1526 fndecl it is). */
ef7827b0
DM
1527
1528bool
6bd31b33 1529region_model::on_call_pre (const gcall *call, region_model_context *ctxt)
757bf1df 1530{
48e8a7a6
DM
1531 call_details cd (call, this, ctxt);
1532
9b4eee5f
DM
1533 /* Special-case for IFN_DEFERRED_INIT.
1534 We want to report uninitialized variables with -fanalyzer (treating
1535 -ftrivial-auto-var-init= as purely a mitigation feature).
1536 Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the
1537 lhs of the call, so that it is still uninitialized from the point of
1538 view of the analyzer. */
1539 if (gimple_call_internal_p (call)
1540 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
24ebc540 1541 return false; /* No side effects. */
9b4eee5f 1542
bddd8d86
DM
1543 /* Get svalues for all of the arguments at the callsite, to ensure that we
1544 complain about any uninitialized arguments. This might lead to
1545 duplicates if any of the handling below also looks up the svalues,
1546 but the deduplication code should deal with that. */
1547 if (ctxt)
ca123e01 1548 check_call_args (cd);
bddd8d86 1549
688fc162
DM
1550 tree callee_fndecl = get_fndecl_for_call (call, ctxt);
1551
48e8a7a6 1552 if (gimple_call_internal_p (call))
6bd31b33
DM
1553 if (const known_function *kf
1554 = get_known_function (gimple_call_internal_fn (call)))
1555 {
1556 kf->impl_call_pre (cd);
24ebc540 1557 return false; /* No further side effects. */
6bd31b33 1558 }
808f4dfe 1559
24ebc540 1560 if (!callee_fndecl)
73da34a5
DM
1561 {
1562 cd.set_any_lhs_with_defaults ();
1563 return true; /* Unknown side effects. */
1564 }
ee7bfbe5 1565
24ebc540
DM
1566 if (const known_function *kf = get_known_function (callee_fndecl, cd))
1567 {
1568 kf->impl_call_pre (cd);
1569 return false; /* No further side effects. */
757bf1df 1570 }
757bf1df 1571
73da34a5
DM
1572 cd.set_any_lhs_with_defaults ();
1573
24ebc540
DM
1574 const int callee_fndecl_flags = flags_from_decl_or_type (callee_fndecl);
1575 if (callee_fndecl_flags & (ECF_CONST | ECF_PURE))
1576 return false; /* No side effects. */
1577
1578 if (fndecl_built_in_p (callee_fndecl))
1579 return true; /* Unknown side effects. */
1580
1581 if (!fndecl_has_gimple_body_p (callee_fndecl))
1582 return true; /* Unknown side effects. */
1583
1584 return false; /* No side effects. */
757bf1df
DM
1585}
1586
1587/* Update this model for the CALL stmt, using CTXT to report any
1588 diagnostics - the second half.
1589
1590 Updates to the region_model that should be made *after* sm-states
1591 are updated are done here; other updates to the region_model are done
ef7827b0
DM
1592 in region_model::on_call_pre.
1593
1594 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
1595 to purge state. */
757bf1df
DM
1596
1597void
ef7827b0
DM
1598region_model::on_call_post (const gcall *call,
1599 bool unknown_side_effects,
1600 region_model_context *ctxt)
757bf1df 1601{
757bf1df 1602 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1690a839 1603 {
eafa9d96 1604 call_details cd (call, this, ctxt);
6bd31b33 1605 if (const known_function *kf = get_known_function (callee_fndecl, cd))
55e04240 1606 {
6bd31b33 1607 kf->impl_call_post (cd);
55e04240
DM
1608 return;
1609 }
c7e276b8
DM
1610 /* Was this fndecl referenced by
1611 __attribute__((malloc(FOO)))? */
1612 if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
1613 {
c7e276b8
DM
1614 impl_deallocation_call (cd);
1615 return;
1616 }
1690a839 1617 }
ef7827b0
DM
1618
1619 if (unknown_side_effects)
1620 handle_unrecognized_call (call, ctxt);
1621}
1622
33255ad3
DM
1623/* Purge state involving SVAL from this region_model, using CTXT
1624 (if non-NULL) to purge other state in a program_state.
1625
1626 For example, if we're at the def-stmt of an SSA name, then we need to
1627 purge any state for svalues that involve that SSA name. This avoids
1628 false positives in loops, since a symbolic value referring to the
1629 SSA name will be referring to the previous value of that SSA name.
1630
1631 For example, in:
1632 while ((e = hashmap_iter_next(&iter))) {
1633 struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
1634 free (e_strbuf->value);
1635 }
1636 at the def-stmt of e_8:
1637 e_8 = hashmap_iter_next (&iter);
1638 we should purge the "freed" state of:
1639 INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
1640 which is the "e_strbuf->value" value from the previous iteration,
1641 or we will erroneously report a double-free - the "e_8" within it
1642 refers to the previous value. */
1643
1644void
1645region_model::purge_state_involving (const svalue *sval,
1646 region_model_context *ctxt)
1647{
a113b143
DM
1648 if (!sval->can_have_associated_state_p ())
1649 return;
33255ad3
DM
1650 m_store.purge_state_involving (sval, m_mgr);
1651 m_constraints->purge_state_involving (sval);
1652 m_dynamic_extents.purge_state_involving (sval);
1653 if (ctxt)
1654 ctxt->purge_state_involving (sval);
1655}
1656
c65d3c7f
DM
1657/* A pending_note subclass for adding a note about an
1658 __attribute__((access, ...)) to a diagnostic. */
1659
1660class reason_attr_access : public pending_note_subclass<reason_attr_access>
1661{
1662public:
1663 reason_attr_access (tree callee_fndecl, const attr_access &access)
1664 : m_callee_fndecl (callee_fndecl),
1665 m_ptr_argno (access.ptrarg),
1666 m_access_str (TREE_STRING_POINTER (access.to_external_string ()))
1667 {
1668 }
1669
ff171cb1 1670 const char *get_kind () const final override { return "reason_attr_access"; }
c65d3c7f 1671
2ac1459f 1672 void emit () const final override
c65d3c7f
DM
1673 {
1674 inform (DECL_SOURCE_LOCATION (m_callee_fndecl),
1675 "parameter %i of %qD marked with attribute %qs",
1676 m_ptr_argno + 1, m_callee_fndecl, m_access_str);
1677 }
1678
1679 bool operator== (const reason_attr_access &other) const
1680 {
1681 return (m_callee_fndecl == other.m_callee_fndecl
1682 && m_ptr_argno == other.m_ptr_argno
1683 && !strcmp (m_access_str, other.m_access_str));
1684 }
1685
1686private:
1687 tree m_callee_fndecl;
1688 unsigned m_ptr_argno;
1689 const char *m_access_str;
1690};
1691
b6eaf90c
DM
1692/* Check CALL a call to external function CALLEE_FNDECL based on
1693 any __attribute__ ((access, ....) on the latter, complaining to
1694 CTXT about any issues.
1695
1696 Currently we merely call check_region_for_write on any regions
1697 pointed to by arguments marked with a "write_only" or "read_write"
1698 attribute. */
1699
1700void
1701region_model::
1702check_external_function_for_access_attr (const gcall *call,
1703 tree callee_fndecl,
1704 region_model_context *ctxt) const
1705{
1706 gcc_assert (call);
1707 gcc_assert (callee_fndecl);
1708 gcc_assert (ctxt);
1709
1710 tree fntype = TREE_TYPE (callee_fndecl);
1711 if (!fntype)
1712 return;
1713
1714 if (!TYPE_ATTRIBUTES (fntype))
1715 return;
1716
1717 /* Initialize a map of attribute access specifications for arguments
1718 to the function call. */
1719 rdwr_map rdwr_idx;
1720 init_attr_rdwr_indices (&rdwr_idx, TYPE_ATTRIBUTES (fntype));
1721
1722 unsigned argno = 0;
1723
1724 for (tree iter = TYPE_ARG_TYPES (fntype); iter;
1725 iter = TREE_CHAIN (iter), ++argno)
1726 {
1727 const attr_access* access = rdwr_idx.get (argno);
1728 if (!access)
1729 continue;
1730
1731 /* Ignore any duplicate entry in the map for the size argument. */
1732 if (access->ptrarg != argno)
1733 continue;
1734
1735 if (access->mode == access_write_only
1736 || access->mode == access_read_write)
1737 {
e40a935d 1738 /* Subclass of annotating_context that
c65d3c7f 1739 adds a note about the attr access to any saved diagnostics. */
e40a935d 1740 class annotating_ctxt : public annotating_context
c65d3c7f
DM
1741 {
1742 public:
1743 annotating_ctxt (tree callee_fndecl,
1744 const attr_access &access,
1745 region_model_context *ctxt)
e40a935d 1746 : annotating_context (ctxt),
c65d3c7f
DM
1747 m_callee_fndecl (callee_fndecl),
1748 m_access (access)
1749 {
1750 }
e40a935d 1751 void add_annotations () final override
c65d3c7f 1752 {
e40a935d
DM
1753 add_note (make_unique<reason_attr_access>
1754 (m_callee_fndecl, m_access));
c65d3c7f
DM
1755 }
1756 private:
1757 tree m_callee_fndecl;
1758 const attr_access &m_access;
1759 };
1760
1761 /* Use this ctxt below so that any diagnostics get the
1762 note added to them. */
1763 annotating_ctxt my_ctxt (callee_fndecl, *access, ctxt);
1764
b6eaf90c 1765 tree ptr_tree = gimple_call_arg (call, access->ptrarg);
c65d3c7f
DM
1766 const svalue *ptr_sval = get_rvalue (ptr_tree, &my_ctxt);
1767 const region *reg = deref_rvalue (ptr_sval, ptr_tree, &my_ctxt);
0e466e97 1768 check_region_for_write (reg, nullptr, &my_ctxt);
b6eaf90c
DM
1769 /* We don't use the size arg for now. */
1770 }
1771 }
1772}
1773
ef7827b0
DM
1774/* Handle a call CALL to a function with unknown behavior.
1775
1776 Traverse the regions in this model, determining what regions are
1777 reachable from pointer arguments to CALL and from global variables,
1778 recursively.
1779
1780 Set all reachable regions to new unknown values and purge sm-state
1781 from their values, and from values that point to them. */
1782
1783void
1784region_model::handle_unrecognized_call (const gcall *call,
1785 region_model_context *ctxt)
1786{
1787 tree fndecl = get_fndecl_for_call (call, ctxt);
1788
b6eaf90c
DM
1789 if (fndecl && ctxt)
1790 check_external_function_for_access_attr (call, fndecl, ctxt);
1791
c710051a 1792 reachable_regions reachable_regs (this);
ef7827b0
DM
1793
1794 /* Determine the reachable regions and their mutability. */
1795 {
808f4dfe
DM
1796 /* Add globals and regions that already escaped in previous
1797 unknown calls. */
1798 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1799 &reachable_regs);
ef7827b0
DM
1800
1801 /* Params that are pointers. */
1802 tree iter_param_types = NULL_TREE;
1803 if (fndecl)
1804 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
1805 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
1806 {
1807 /* Track expected param type, where available. */
1808 tree param_type = NULL_TREE;
1809 if (iter_param_types)
1810 {
1811 param_type = TREE_VALUE (iter_param_types);
1812 gcc_assert (param_type);
1813 iter_param_types = TREE_CHAIN (iter_param_types);
1814 }
1815
1816 tree parm = gimple_call_arg (call, arg_idx);
808f4dfe
DM
1817 const svalue *parm_sval = get_rvalue (parm, ctxt);
1818 reachable_regs.handle_parm (parm_sval, param_type);
ef7827b0
DM
1819 }
1820 }
1821
33255ad3 1822 uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULL;
3a66c289 1823
808f4dfe
DM
1824 /* Purge sm-state for the svalues that were reachable,
1825 both in non-mutable and mutable form. */
1826 for (svalue_set::iterator iter
1827 = reachable_regs.begin_reachable_svals ();
1828 iter != reachable_regs.end_reachable_svals (); ++iter)
ef7827b0 1829 {
808f4dfe 1830 const svalue *sval = (*iter);
33255ad3
DM
1831 if (ctxt)
1832 ctxt->on_unknown_change (sval, false);
808f4dfe
DM
1833 }
1834 for (svalue_set::iterator iter
1835 = reachable_regs.begin_mutable_svals ();
1836 iter != reachable_regs.end_mutable_svals (); ++iter)
1837 {
1838 const svalue *sval = (*iter);
33255ad3
DM
1839 if (ctxt)
1840 ctxt->on_unknown_change (sval, true);
3a66c289
DM
1841 if (uncertainty)
1842 uncertainty->on_mutable_sval_at_unknown_call (sval);
808f4dfe 1843 }
ef7827b0 1844
808f4dfe 1845 /* Mark any clusters that have escaped. */
af66094d 1846 reachable_regs.mark_escaped_clusters (ctxt);
ef7827b0 1847
808f4dfe
DM
1848 /* Update bindings for all clusters that have escaped, whether above,
1849 or previously. */
3734527d
DM
1850 m_store.on_unknown_fncall (call, m_mgr->get_store_manager (),
1851 conjured_purge (this, ctxt));
9a2c9579
DM
1852
1853 /* Purge dynamic extents from any regions that have escaped mutably:
1854 realloc could have been called on them. */
1855 for (hash_set<const region *>::iterator
1856 iter = reachable_regs.begin_mutable_base_regs ();
1857 iter != reachable_regs.end_mutable_base_regs ();
1858 ++iter)
1859 {
1860 const region *base_reg = (*iter);
1861 unset_dynamic_extents (base_reg);
1862 }
808f4dfe 1863}
ef7827b0 1864
808f4dfe
DM
1865/* Traverse the regions in this model, determining what regions are
1866 reachable from the store and populating *OUT.
ef7827b0 1867
808f4dfe
DM
1868 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
1869 for reachability (for handling return values from functions when
1870 analyzing return of the only function on the stack).
1871
3a66c289
DM
1872 If UNCERTAINTY is non-NULL, treat any svalues that were recorded
1873 within it as being maybe-bound as additional "roots" for reachability.
1874
808f4dfe
DM
1875 Find svalues that haven't leaked. */
1876
1877void
1878region_model::get_reachable_svalues (svalue_set *out,
3a66c289
DM
1879 const svalue *extra_sval,
1880 const uncertainty_t *uncertainty)
808f4dfe 1881{
c710051a 1882 reachable_regions reachable_regs (this);
808f4dfe
DM
1883
1884 /* Add globals and regions that already escaped in previous
1885 unknown calls. */
1886 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
1887 &reachable_regs);
1888
1889 if (extra_sval)
1890 reachable_regs.handle_sval (extra_sval);
ef7827b0 1891
3a66c289
DM
1892 if (uncertainty)
1893 for (uncertainty_t::iterator iter
1894 = uncertainty->begin_maybe_bound_svals ();
1895 iter != uncertainty->end_maybe_bound_svals (); ++iter)
1896 reachable_regs.handle_sval (*iter);
1897
808f4dfe
DM
1898 /* Get regions for locals that have explicitly bound values. */
1899 for (store::cluster_map_t::iterator iter = m_store.begin ();
1900 iter != m_store.end (); ++iter)
1901 {
1902 const region *base_reg = (*iter).first;
1903 if (const region *parent = base_reg->get_parent_region ())
1904 if (parent->get_kind () == RK_FRAME)
1905 reachable_regs.add (base_reg, false);
1906 }
1907
1908 /* Populate *OUT based on the values that were reachable. */
1909 for (svalue_set::iterator iter
1910 = reachable_regs.begin_reachable_svals ();
1911 iter != reachable_regs.end_reachable_svals (); ++iter)
1912 out->add (*iter);
757bf1df
DM
1913}
1914
1915/* Update this model for the RETURN_STMT, using CTXT to report any
1916 diagnostics. */
1917
1918void
1919region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
1920{
1921 tree callee = get_current_function ()->decl;
1922 tree lhs = DECL_RESULT (callee);
1923 tree rhs = gimple_return_retval (return_stmt);
1924
1925 if (lhs && rhs)
13ad6d9f
DM
1926 {
1927 const svalue *sval = get_rvalue (rhs, ctxt);
1928 const region *ret_reg = get_lvalue (lhs, ctxt);
1929 set_value (ret_reg, sval, ctxt);
1930 }
757bf1df
DM
1931}
1932
342e14ff
DM
1933/* Update this model for a call and return of setjmp/sigsetjmp at CALL within
1934 ENODE, using CTXT to report any diagnostics.
757bf1df 1935
342e14ff
DM
1936 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
1937 0), as opposed to any second return due to longjmp/sigsetjmp. */
757bf1df
DM
1938
1939void
1940region_model::on_setjmp (const gcall *call, const exploded_node *enode,
1941 region_model_context *ctxt)
1942{
808f4dfe
DM
1943 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
1944 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
1945 ctxt);
757bf1df 1946
808f4dfe
DM
1947 /* Create a setjmp_svalue for this call and store it in BUF_REG's
1948 region. */
1949 if (buf_reg)
757bf1df 1950 {
fd9982bb 1951 setjmp_record r (enode, call);
808f4dfe
DM
1952 const svalue *sval
1953 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
1954 set_value (buf_reg, sval, ctxt);
757bf1df
DM
1955 }
1956
1957 /* Direct calls to setjmp return 0. */
1958 if (tree lhs = gimple_call_lhs (call))
1959 {
1aff29d4
DM
1960 const svalue *new_sval
1961 = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
808f4dfe
DM
1962 const region *lhs_reg = get_lvalue (lhs, ctxt);
1963 set_value (lhs_reg, new_sval, ctxt);
757bf1df
DM
1964 }
1965}
1966
1967/* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
1968 to a "setjmp" at SETJMP_CALL where the final stack depth should be
808f4dfe
DM
1969 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
1970 done, and should be done by the caller. */
757bf1df
DM
1971
1972void
1973region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
808f4dfe 1974 int setjmp_stack_depth, region_model_context *ctxt)
757bf1df
DM
1975{
1976 /* Evaluate the val, using the frame of the "longjmp". */
1977 tree fake_retval = gimple_call_arg (longjmp_call, 1);
808f4dfe 1978 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
757bf1df
DM
1979
1980 /* Pop any frames until we reach the stack depth of the function where
1981 setjmp was called. */
1982 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
1983 while (get_stack_depth () > setjmp_stack_depth)
430d7d88 1984 pop_frame (NULL, NULL, ctxt, false);
757bf1df
DM
1985
1986 gcc_assert (get_stack_depth () == setjmp_stack_depth);
1987
1988 /* Assign to LHS of "setjmp" in new_state. */
1989 if (tree lhs = gimple_call_lhs (setjmp_call))
1990 {
1991 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
1aff29d4
DM
1992 const svalue *zero_sval
1993 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
808f4dfe 1994 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
757bf1df
DM
1995 /* If we have 0, use 1. */
1996 if (eq_zero.is_true ())
1997 {
808f4dfe 1998 const svalue *one_sval
1aff29d4 1999 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
808f4dfe 2000 fake_retval_sval = one_sval;
757bf1df
DM
2001 }
2002 else
2003 {
2004 /* Otherwise note that the value is nonzero. */
808f4dfe 2005 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
757bf1df
DM
2006 }
2007
808f4dfe
DM
2008 /* Decorate the return value from setjmp as being unmergeable,
2009 so that we don't attempt to merge states with it as zero
2010 with states in which it's nonzero, leading to a clean distinction
2011 in the exploded_graph betweeen the first return and the second
2012 return. */
2013 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
757bf1df 2014
808f4dfe
DM
2015 const region *lhs_reg = get_lvalue (lhs, ctxt);
2016 set_value (lhs_reg, fake_retval_sval, ctxt);
2017 }
757bf1df
DM
2018}
2019
2020/* Update this region_model for a phi stmt of the form
2021 LHS = PHI <...RHS...>.
e0a7a675
DM
2022 where RHS is for the appropriate edge.
2023 Get state from OLD_STATE so that all of the phi stmts for a basic block
2024 are effectively handled simultaneously. */
757bf1df
DM
2025
2026void
8525d1f5 2027region_model::handle_phi (const gphi *phi,
808f4dfe 2028 tree lhs, tree rhs,
e0a7a675 2029 const region_model &old_state,
757bf1df
DM
2030 region_model_context *ctxt)
2031{
2032 /* For now, don't bother tracking the .MEM SSA names. */
2033 if (tree var = SSA_NAME_VAR (lhs))
2034 if (TREE_CODE (var) == VAR_DECL)
2035 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
2036 return;
2037
e0a7a675
DM
2038 const svalue *src_sval = old_state.get_rvalue (rhs, ctxt);
2039 const region *dst_reg = old_state.get_lvalue (lhs, ctxt);
757bf1df 2040
e0a7a675 2041 set_value (dst_reg, src_sval, ctxt);
8525d1f5
DM
2042
2043 if (ctxt)
2044 ctxt->on_phi (phi, rhs);
757bf1df
DM
2045}
2046
2047/* Implementation of region_model::get_lvalue; the latter adds type-checking.
2048
2049 Get the id of the region for PV within this region_model,
2050 emitting any diagnostics to CTXT. */
2051
808f4dfe 2052const region *
53cb324c 2053region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2054{
2055 tree expr = pv.m_tree;
2056
2057 gcc_assert (expr);
2058
2059 switch (TREE_CODE (expr))
2060 {
2061 default:
808f4dfe
DM
2062 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
2063 dump_location_t ());
757bf1df
DM
2064
2065 case ARRAY_REF:
2066 {
2067 tree array = TREE_OPERAND (expr, 0);
2068 tree index = TREE_OPERAND (expr, 1);
757bf1df 2069
808f4dfe
DM
2070 const region *array_reg = get_lvalue (array, ctxt);
2071 const svalue *index_sval = get_rvalue (index, ctxt);
2072 return m_mgr->get_element_region (array_reg,
2073 TREE_TYPE (TREE_TYPE (array)),
2074 index_sval);
757bf1df
DM
2075 }
2076 break;
2077
93e759fc
DM
2078 case BIT_FIELD_REF:
2079 {
2080 tree inner_expr = TREE_OPERAND (expr, 0);
2081 const region *inner_reg = get_lvalue (inner_expr, ctxt);
2082 tree num_bits = TREE_OPERAND (expr, 1);
2083 tree first_bit_offset = TREE_OPERAND (expr, 2);
2084 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2085 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2086 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2087 TREE_INT_CST_LOW (num_bits));
2088 return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits);
2089 }
2090 break;
2091
757bf1df
DM
2092 case MEM_REF:
2093 {
2094 tree ptr = TREE_OPERAND (expr, 0);
2095 tree offset = TREE_OPERAND (expr, 1);
808f4dfe
DM
2096 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2097 const svalue *offset_sval = get_rvalue (offset, ctxt);
2098 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
2099 return m_mgr->get_offset_region (star_ptr,
2100 TREE_TYPE (expr),
2101 offset_sval);
757bf1df
DM
2102 }
2103 break;
2104
808f4dfe
DM
2105 case FUNCTION_DECL:
2106 return m_mgr->get_region_for_fndecl (expr);
2107
2108 case LABEL_DECL:
2109 return m_mgr->get_region_for_label (expr);
2110
757bf1df
DM
2111 case VAR_DECL:
2112 /* Handle globals. */
2113 if (is_global_var (expr))
808f4dfe 2114 return m_mgr->get_region_for_global (expr);
757bf1df
DM
2115
2116 /* Fall through. */
2117
2118 case SSA_NAME:
2119 case PARM_DECL:
2120 case RESULT_DECL:
2121 {
2122 gcc_assert (TREE_CODE (expr) == SSA_NAME
2123 || TREE_CODE (expr) == PARM_DECL
778aca1b 2124 || VAR_P (expr)
757bf1df
DM
2125 || TREE_CODE (expr) == RESULT_DECL);
2126
808f4dfe
DM
2127 int stack_index = pv.m_stack_depth;
2128 const frame_region *frame = get_frame_at_index (stack_index);
757bf1df 2129 gcc_assert (frame);
4cebae09 2130 return frame->get_region_for_local (m_mgr, expr, ctxt);
757bf1df
DM
2131 }
2132
2133 case COMPONENT_REF:
2134 {
2135 /* obj.field */
2136 tree obj = TREE_OPERAND (expr, 0);
2137 tree field = TREE_OPERAND (expr, 1);
808f4dfe
DM
2138 const region *obj_reg = get_lvalue (obj, ctxt);
2139 return m_mgr->get_field_region (obj_reg, field);
41a9e940
DM
2140 }
2141 break;
2142
757bf1df 2143 case STRING_CST:
808f4dfe 2144 return m_mgr->get_region_for_string (expr);
757bf1df
DM
2145 }
2146}
2147
2148/* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
2149
09bea584
DM
2150static void
2151assert_compat_types (tree src_type, tree dst_type)
2152{
2153 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
808f4dfe
DM
2154 {
2155#if CHECKING_P
2156 if (!(useless_type_conversion_p (src_type, dst_type)))
2157 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
2158#endif
2159 }
09bea584 2160}
757bf1df 2161
ea4e3218
DM
2162/* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
2163
e66b9f67 2164bool
ea4e3218
DM
2165compat_types_p (tree src_type, tree dst_type)
2166{
2167 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2168 if (!(useless_type_conversion_p (src_type, dst_type)))
2169 return false;
2170 return true;
2171}
2172
808f4dfe 2173/* Get the region for PV within this region_model,
757bf1df
DM
2174 emitting any diagnostics to CTXT. */
2175
808f4dfe 2176const region *
53cb324c 2177region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2178{
2179 if (pv.m_tree == NULL_TREE)
808f4dfe 2180 return NULL;
757bf1df 2181
808f4dfe
DM
2182 const region *result_reg = get_lvalue_1 (pv, ctxt);
2183 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
2184 return result_reg;
757bf1df
DM
2185}
2186
808f4dfe 2187/* Get the region for EXPR within this region_model (assuming the most
757bf1df
DM
2188 recent stack frame if it's a local). */
2189
808f4dfe 2190const region *
53cb324c 2191region_model::get_lvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
2192{
2193 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2194}
2195
2196/* Implementation of region_model::get_rvalue; the latter adds type-checking.
2197
2198 Get the value of PV within this region_model,
2199 emitting any diagnostics to CTXT. */
2200
808f4dfe 2201const svalue *
53cb324c 2202region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2203{
2204 gcc_assert (pv.m_tree);
2205
2206 switch (TREE_CODE (pv.m_tree))
2207 {
2208 default:
2242b975 2209 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
757bf1df
DM
2210
2211 case ADDR_EXPR:
2212 {
2213 /* "&EXPR". */
2214 tree expr = pv.m_tree;
2215 tree op0 = TREE_OPERAND (expr, 0);
808f4dfe
DM
2216 const region *expr_reg = get_lvalue (op0, ctxt);
2217 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
757bf1df
DM
2218 }
2219 break;
2220
808f4dfe 2221 case BIT_FIELD_REF:
d3b1ef7a
DM
2222 {
2223 tree expr = pv.m_tree;
2224 tree op0 = TREE_OPERAND (expr, 0);
2225 const region *reg = get_lvalue (op0, ctxt);
2226 tree num_bits = TREE_OPERAND (expr, 1);
2227 tree first_bit_offset = TREE_OPERAND (expr, 2);
2228 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2229 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2230 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2231 TREE_INT_CST_LOW (num_bits));
9faf8348 2232 return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
d3b1ef7a 2233 }
808f4dfe 2234
808f4dfe 2235 case VAR_DECL:
20bd258d
DM
2236 if (DECL_HARD_REGISTER (pv.m_tree))
2237 {
2238 /* If it has a hard register, it doesn't have a memory region
2239 and can't be referred to as an lvalue. */
2240 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
2241 }
2242 /* Fall through. */
808f4dfe 2243 case PARM_DECL:
20bd258d 2244 case SSA_NAME:
808f4dfe 2245 case RESULT_DECL:
757bf1df
DM
2246 case ARRAY_REF:
2247 {
da7c2773 2248 const region *reg = get_lvalue (pv, ctxt);
9faf8348 2249 return get_store_value (reg, ctxt);
757bf1df
DM
2250 }
2251
808f4dfe
DM
2252 case REALPART_EXPR:
2253 case IMAGPART_EXPR:
2254 case VIEW_CONVERT_EXPR:
2255 {
2256 tree expr = pv.m_tree;
2257 tree arg = TREE_OPERAND (expr, 0);
2258 const svalue *arg_sval = get_rvalue (arg, ctxt);
2259 const svalue *sval_unaryop
2260 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
2261 arg_sval);
2262 return sval_unaryop;
2263 };
2264
757bf1df
DM
2265 case INTEGER_CST:
2266 case REAL_CST:
808f4dfe
DM
2267 case COMPLEX_CST:
2268 case VECTOR_CST:
757bf1df 2269 case STRING_CST:
808f4dfe
DM
2270 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
2271
2272 case POINTER_PLUS_EXPR:
2273 {
2274 tree expr = pv.m_tree;
2275 tree ptr = TREE_OPERAND (expr, 0);
2276 tree offset = TREE_OPERAND (expr, 1);
2277 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2278 const svalue *offset_sval = get_rvalue (offset, ctxt);
2279 const svalue *sval_binop
2280 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
2281 ptr_sval, offset_sval);
2282 return sval_binop;
2283 }
2284
2285 /* Binary ops. */
2286 case PLUS_EXPR:
2287 case MULT_EXPR:
4d3b7be2
DM
2288 case BIT_AND_EXPR:
2289 case BIT_IOR_EXPR:
2290 case BIT_XOR_EXPR:
808f4dfe
DM
2291 {
2292 tree expr = pv.m_tree;
2293 tree arg0 = TREE_OPERAND (expr, 0);
2294 tree arg1 = TREE_OPERAND (expr, 1);
2295 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
2296 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
2297 const svalue *sval_binop
2298 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
2299 arg0_sval, arg1_sval);
2300 return sval_binop;
2301 }
757bf1df
DM
2302
2303 case COMPONENT_REF:
2304 case MEM_REF:
757bf1df 2305 {
808f4dfe 2306 const region *ref_reg = get_lvalue (pv, ctxt);
9faf8348 2307 return get_store_value (ref_reg, ctxt);
757bf1df 2308 }
1b342485
AS
2309 case OBJ_TYPE_REF:
2310 {
2311 tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
2312 return get_rvalue (expr, ctxt);
2313 }
757bf1df
DM
2314 }
2315}
2316
2317/* Get the value of PV within this region_model,
2318 emitting any diagnostics to CTXT. */
2319
808f4dfe 2320const svalue *
53cb324c 2321region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2322{
2323 if (pv.m_tree == NULL_TREE)
808f4dfe 2324 return NULL;
757bf1df 2325
808f4dfe 2326 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
757bf1df 2327
808f4dfe
DM
2328 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
2329
2fdc8546 2330 result_sval = check_for_poison (result_sval, pv.m_tree, NULL, ctxt);
33255ad3 2331
808f4dfe 2332 return result_sval;
757bf1df
DM
2333}
2334
2335/* Get the value of EXPR within this region_model (assuming the most
2336 recent stack frame if it's a local). */
2337
808f4dfe 2338const svalue *
53cb324c 2339region_model::get_rvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
2340{
2341 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2342}
2343
623bc027
DM
2344/* Return true if this model is on a path with "main" as the entrypoint
2345 (as opposed to one in which we're merely analyzing a subset of the
2346 path through the code). */
2347
2348bool
2349region_model::called_from_main_p () const
2350{
2351 if (!m_current_frame)
2352 return false;
2353 /* Determine if the oldest stack frame in this model is for "main". */
2354 const frame_region *frame0 = get_frame_at_index (0);
2355 gcc_assert (frame0);
2356 return id_equal (DECL_NAME (frame0->get_function ()->decl), "main");
2357}
2358
2359/* Subroutine of region_model::get_store_value for when REG is (or is within)
2360 a global variable that hasn't been touched since the start of this path
2361 (or was implicitly touched due to a call to an unknown function). */
2362
2363const svalue *
2364region_model::get_initial_value_for_global (const region *reg) const
2365{
2366 /* Get the decl that REG is for (or is within). */
2367 const decl_region *base_reg
2368 = reg->get_base_region ()->dyn_cast_decl_region ();
2369 gcc_assert (base_reg);
2370 tree decl = base_reg->get_decl ();
2371
2372 /* Special-case: to avoid having to explicitly update all previously
2373 untracked globals when calling an unknown fn, they implicitly have
2374 an unknown value if an unknown call has occurred, unless this is
2375 static to-this-TU and hasn't escaped. Globals that have escaped
2376 are explicitly tracked, so we shouldn't hit this case for them. */
af66094d
DM
2377 if (m_store.called_unknown_fn_p ()
2378 && TREE_PUBLIC (decl)
2379 && !TREE_READONLY (decl))
623bc027
DM
2380 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2381
2382 /* If we are on a path from the entrypoint from "main" and we have a
2383 global decl defined in this TU that hasn't been touched yet, then
2384 the initial value of REG can be taken from the initialization value
2385 of the decl. */
16ad9ae8 2386 if (called_from_main_p () || TREE_READONLY (decl))
fe9771b5 2387 return reg->get_initial_value_at_main (m_mgr);
623bc027
DM
2388
2389 /* Otherwise, return INIT_VAL(REG). */
2390 return m_mgr->get_or_create_initial_value (reg);
2391}
2392
808f4dfe 2393/* Get a value for REG, looking it up in the store, or otherwise falling
9faf8348
DM
2394 back to "initial" or "unknown" values.
2395 Use CTXT to report any warnings associated with reading from REG. */
757bf1df 2396
808f4dfe 2397const svalue *
9faf8348
DM
2398region_model::get_store_value (const region *reg,
2399 region_model_context *ctxt) const
757bf1df 2400{
dfe2ef7f
DM
2401 /* Getting the value of an empty region gives an unknown_svalue. */
2402 if (reg->empty_p ())
2403 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2404
1eb90f46 2405 bool check_poisoned = true;
9589a46d 2406 if (check_region_for_read (reg, ctxt))
1eb90f46 2407 check_poisoned = false;
9faf8348 2408
2867118d
DM
2409 /* Special-case: handle var_decls in the constant pool. */
2410 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
2411 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
2412 return sval;
2413
808f4dfe
DM
2414 const svalue *sval
2415 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
2416 if (sval)
757bf1df 2417 {
808f4dfe
DM
2418 if (reg->get_type ())
2419 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
2420 return sval;
757bf1df 2421 }
757bf1df 2422
808f4dfe
DM
2423 /* Special-case: read at a constant index within a STRING_CST. */
2424 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
2425 if (tree byte_offset_cst
2426 = offset_reg->get_byte_offset ()->maybe_get_constant ())
2427 if (const string_region *str_reg
2428 = reg->get_parent_region ()->dyn_cast_string_region ())
757bf1df 2429 {
808f4dfe
DM
2430 tree string_cst = str_reg->get_string_cst ();
2431 if (const svalue *char_sval
2432 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2433 byte_offset_cst))
2434 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
757bf1df 2435 }
757bf1df 2436
808f4dfe
DM
2437 /* Special-case: read the initial char of a STRING_CST. */
2438 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
2439 if (const string_region *str_reg
2440 = cast_reg->get_original_region ()->dyn_cast_string_region ())
2441 {
2442 tree string_cst = str_reg->get_string_cst ();
2443 tree byte_offset_cst = build_int_cst (integer_type_node, 0);
2444 if (const svalue *char_sval
2445 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2446 byte_offset_cst))
2447 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2448 }
757bf1df 2449
808f4dfe
DM
2450 /* Otherwise we implicitly have the initial value of the region
2451 (if the cluster had been touched, binding_cluster::get_any_binding,
2452 would have returned UNKNOWN, and we would already have returned
2453 that above). */
757bf1df 2454
623bc027
DM
2455 /* Handle globals. */
2456 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
2457 == RK_GLOBALS)
2458 return get_initial_value_for_global (reg);
757bf1df 2459
1eb90f46 2460 return m_mgr->get_or_create_initial_value (reg, check_poisoned);
757bf1df
DM
2461}
2462
808f4dfe
DM
2463/* Return false if REG does not exist, true if it may do.
2464 This is for detecting regions within the stack that don't exist anymore
2465 after frames are popped. */
757bf1df 2466
808f4dfe
DM
2467bool
2468region_model::region_exists_p (const region *reg) const
757bf1df 2469{
808f4dfe
DM
2470 /* If within a stack frame, check that the stack frame is live. */
2471 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
757bf1df 2472 {
808f4dfe
DM
2473 /* Check that the current frame is the enclosing frame, or is called
2474 by it. */
2475 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
2476 iter_frame = iter_frame->get_calling_frame ())
2477 if (iter_frame == enclosing_frame)
2478 return true;
2479 return false;
757bf1df 2480 }
808f4dfe
DM
2481
2482 return true;
757bf1df
DM
2483}
2484
808f4dfe
DM
2485/* Get a region for referencing PTR_SVAL, creating a region if need be, and
2486 potentially generating warnings via CTXT.
35e3f082 2487 PTR_SVAL must be of pointer type.
808f4dfe 2488 PTR_TREE if non-NULL can be used when emitting diagnostics. */
757bf1df 2489
808f4dfe
DM
2490const region *
2491region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
021077b9
DM
2492 region_model_context *ctxt,
2493 bool add_nonnull_constraint) const
757bf1df 2494{
808f4dfe 2495 gcc_assert (ptr_sval);
35e3f082 2496 gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
757bf1df 2497
49bfbf18
DM
2498 /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
2499 as a constraint. This suppresses false positives from
2500 -Wanalyzer-null-dereference for the case where we later have an
2501 if (PTR_SVAL) that would occur if we considered the false branch
2502 and transitioned the malloc state machine from start->null. */
021077b9
DM
2503 if (add_nonnull_constraint)
2504 {
2505 tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
2506 const svalue *null_ptr
2507 = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
2508 m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
2509 }
49bfbf18 2510
808f4dfe 2511 switch (ptr_sval->get_kind ())
757bf1df 2512 {
808f4dfe 2513 default:
23ebfda0 2514 break;
808f4dfe 2515
757bf1df
DM
2516 case SK_REGION:
2517 {
808f4dfe
DM
2518 const region_svalue *region_sval
2519 = as_a <const region_svalue *> (ptr_sval);
757bf1df
DM
2520 return region_sval->get_pointee ();
2521 }
2522
808f4dfe
DM
2523 case SK_BINOP:
2524 {
2525 const binop_svalue *binop_sval
2526 = as_a <const binop_svalue *> (ptr_sval);
2527 switch (binop_sval->get_op ())
2528 {
2529 case POINTER_PLUS_EXPR:
2530 {
2531 /* If we have a symbolic value expressing pointer arithmentic,
2532 try to convert it to a suitable region. */
2533 const region *parent_region
2534 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
2535 const svalue *offset = binop_sval->get_arg1 ();
2536 tree type= TREE_TYPE (ptr_sval->get_type ());
2537 return m_mgr->get_offset_region (parent_region, type, offset);
2538 }
2539 default:
23ebfda0 2540 break;
808f4dfe
DM
2541 }
2542 }
23ebfda0 2543 break;
757bf1df
DM
2544
2545 case SK_POISONED:
2546 {
2547 if (ctxt)
808f4dfe
DM
2548 {
2549 tree ptr = get_representative_tree (ptr_sval);
2550 /* If we can't get a representative tree for PTR_SVAL
2551 (e.g. if it hasn't been bound into the store), then
2552 fall back on PTR_TREE, if non-NULL. */
2553 if (!ptr)
2554 ptr = ptr_tree;
2555 if (ptr)
2556 {
2557 const poisoned_svalue *poisoned_sval
2558 = as_a <const poisoned_svalue *> (ptr_sval);
2559 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
0e466e97
DM
2560 ctxt->warn (::make_unique<poisoned_value_diagnostic>
2561 (ptr, pkind, nullptr, nullptr));
808f4dfe
DM
2562 }
2563 }
757bf1df 2564 }
23ebfda0 2565 break;
757bf1df
DM
2566 }
2567
23ebfda0 2568 return m_mgr->get_symbolic_region (ptr_sval);
757bf1df
DM
2569}
2570
d3b1ef7a
DM
2571/* Attempt to get BITS within any value of REG, as TYPE.
2572 In particular, extract values from compound_svalues for the case
2573 where there's a concrete binding at BITS.
9faf8348
DM
2574 Return an unknown svalue if we can't handle the given case.
2575 Use CTXT to report any warnings associated with reading from REG. */
d3b1ef7a
DM
2576
2577const svalue *
2578region_model::get_rvalue_for_bits (tree type,
2579 const region *reg,
9faf8348
DM
2580 const bit_range &bits,
2581 region_model_context *ctxt) const
d3b1ef7a 2582{
9faf8348 2583 const svalue *sval = get_store_value (reg, ctxt);
e61ffa20 2584 return m_mgr->get_or_create_bits_within (type, bits, sval);
d3b1ef7a
DM
2585}
2586
3175d40f
DM
2587/* A subclass of pending_diagnostic for complaining about writes to
2588 constant regions of memory. */
2589
2590class write_to_const_diagnostic
2591: public pending_diagnostic_subclass<write_to_const_diagnostic>
2592{
2593public:
2594 write_to_const_diagnostic (const region *reg, tree decl)
2595 : m_reg (reg), m_decl (decl)
2596 {}
2597
ff171cb1 2598 const char *get_kind () const final override
3175d40f
DM
2599 {
2600 return "write_to_const_diagnostic";
2601 }
2602
2603 bool operator== (const write_to_const_diagnostic &other) const
2604 {
2605 return (m_reg == other.m_reg
2606 && m_decl == other.m_decl);
2607 }
2608
ff171cb1 2609 int get_controlling_option () const final override
7fd6e36e
DM
2610 {
2611 return OPT_Wanalyzer_write_to_const;
2612 }
2613
0e466e97 2614 bool emit (rich_location *rich_loc, logger *) final override
3175d40f 2615 {
111fd515
DM
2616 auto_diagnostic_group d;
2617 bool warned;
2618 switch (m_reg->get_kind ())
2619 {
2620 default:
7fd6e36e 2621 warned = warning_at (rich_loc, get_controlling_option (),
111fd515
DM
2622 "write to %<const%> object %qE", m_decl);
2623 break;
2624 case RK_FUNCTION:
7fd6e36e 2625 warned = warning_at (rich_loc, get_controlling_option (),
111fd515
DM
2626 "write to function %qE", m_decl);
2627 break;
2628 case RK_LABEL:
7fd6e36e 2629 warned = warning_at (rich_loc, get_controlling_option (),
111fd515
DM
2630 "write to label %qE", m_decl);
2631 break;
2632 }
3175d40f
DM
2633 if (warned)
2634 inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
2635 return warned;
2636 }
2637
ff171cb1 2638 label_text describe_final_event (const evdesc::final_event &ev) final override
3175d40f 2639 {
111fd515
DM
2640 switch (m_reg->get_kind ())
2641 {
2642 default:
2643 return ev.formatted_print ("write to %<const%> object %qE here", m_decl);
2644 case RK_FUNCTION:
2645 return ev.formatted_print ("write to function %qE here", m_decl);
2646 case RK_LABEL:
2647 return ev.formatted_print ("write to label %qE here", m_decl);
2648 }
3175d40f
DM
2649 }
2650
2651private:
2652 const region *m_reg;
2653 tree m_decl;
2654};
2655
2656/* A subclass of pending_diagnostic for complaining about writes to
2657 string literals. */
2658
2659class write_to_string_literal_diagnostic
2660: public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
2661{
2662public:
2663 write_to_string_literal_diagnostic (const region *reg)
2664 : m_reg (reg)
2665 {}
2666
ff171cb1 2667 const char *get_kind () const final override
3175d40f
DM
2668 {
2669 return "write_to_string_literal_diagnostic";
2670 }
2671
2672 bool operator== (const write_to_string_literal_diagnostic &other) const
2673 {
2674 return m_reg == other.m_reg;
2675 }
2676
ff171cb1 2677 int get_controlling_option () const final override
7fd6e36e
DM
2678 {
2679 return OPT_Wanalyzer_write_to_string_literal;
2680 }
2681
0e466e97 2682 bool emit (rich_location *rich_loc, logger *) final override
3175d40f 2683 {
7fd6e36e 2684 return warning_at (rich_loc, get_controlling_option (),
3175d40f
DM
2685 "write to string literal");
2686 /* Ideally we would show the location of the STRING_CST as well,
2687 but it is not available at this point. */
2688 }
2689
ff171cb1 2690 label_text describe_final_event (const evdesc::final_event &ev) final override
3175d40f
DM
2691 {
2692 return ev.formatted_print ("write to string literal here");
2693 }
2694
2695private:
2696 const region *m_reg;
2697};
2698
2699/* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
2700
2701void
2702region_model::check_for_writable_region (const region* dest_reg,
2703 region_model_context *ctxt) const
2704{
2705 /* Fail gracefully if CTXT is NULL. */
2706 if (!ctxt)
2707 return;
2708
2709 const region *base_reg = dest_reg->get_base_region ();
2710 switch (base_reg->get_kind ())
2711 {
2712 default:
2713 break;
111fd515
DM
2714 case RK_FUNCTION:
2715 {
2716 const function_region *func_reg = as_a <const function_region *> (base_reg);
2717 tree fndecl = func_reg->get_fndecl ();
6341f14e
DM
2718 ctxt->warn (make_unique<write_to_const_diagnostic>
2719 (func_reg, fndecl));
111fd515
DM
2720 }
2721 break;
2722 case RK_LABEL:
2723 {
2724 const label_region *label_reg = as_a <const label_region *> (base_reg);
2725 tree label = label_reg->get_label ();
6341f14e
DM
2726 ctxt->warn (make_unique<write_to_const_diagnostic>
2727 (label_reg, label));
111fd515
DM
2728 }
2729 break;
3175d40f
DM
2730 case RK_DECL:
2731 {
2732 const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
2733 tree decl = decl_reg->get_decl ();
2734 /* Warn about writes to const globals.
2735 Don't warn for writes to const locals, and params in particular,
2736 since we would warn in push_frame when setting them up (e.g the
2737 "this" param is "T* const"). */
2738 if (TREE_READONLY (decl)
2739 && is_global_var (decl))
6341f14e 2740 ctxt->warn (make_unique<write_to_const_diagnostic> (dest_reg, decl));
3175d40f
DM
2741 }
2742 break;
2743 case RK_STRING:
6341f14e 2744 ctxt->warn (make_unique<write_to_string_literal_diagnostic> (dest_reg));
3175d40f
DM
2745 break;
2746 }
2747}
2748
9a2c9579
DM
2749/* Get the capacity of REG in bytes. */
2750
2751const svalue *
2752region_model::get_capacity (const region *reg) const
2753{
2754 switch (reg->get_kind ())
2755 {
2756 default:
2757 break;
2758 case RK_DECL:
2759 {
2760 const decl_region *decl_reg = as_a <const decl_region *> (reg);
2761 tree decl = decl_reg->get_decl ();
2762 if (TREE_CODE (decl) == SSA_NAME)
2763 {
2764 tree type = TREE_TYPE (decl);
2765 tree size = TYPE_SIZE (type);
2766 return get_rvalue (size, NULL);
2767 }
2768 else
2769 {
2770 tree size = decl_init_size (decl, false);
2771 if (size)
2772 return get_rvalue (size, NULL);
2773 }
2774 }
2775 break;
e61ffa20
DM
2776 case RK_SIZED:
2777 /* Look through sized regions to get at the capacity
2778 of the underlying regions. */
2779 return get_capacity (reg->get_parent_region ());
0e466e97
DM
2780 case RK_STRING:
2781 {
2782 /* "Capacity" here means "size". */
2783 const string_region *string_reg = as_a <const string_region *> (reg);
2784 tree string_cst = string_reg->get_string_cst ();
2785 return m_mgr->get_or_create_int_cst (size_type_node,
2786 TREE_STRING_LENGTH (string_cst));
2787 }
2788 break;
9a2c9579
DM
2789 }
2790
2791 if (const svalue *recorded = get_dynamic_extents (reg))
2792 return recorded;
2793
2794 return m_mgr->get_or_create_unknown_svalue (sizetype);
2795}
2796
0a9c0d4a
TL
2797/* Return the string size, including the 0-terminator, if SVAL is a
2798 constant_svalue holding a string. Otherwise, return an unknown_svalue. */
2799
2800const svalue *
2801region_model::get_string_size (const svalue *sval) const
2802{
2803 tree cst = sval->maybe_get_constant ();
2804 if (!cst || TREE_CODE (cst) != STRING_CST)
2805 return m_mgr->get_or_create_unknown_svalue (size_type_node);
2806
2807 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
2808 return m_mgr->get_or_create_constant_svalue (out);
2809}
2810
2811/* Return the string size, including the 0-terminator, if REG is a
2812 string_region. Otherwise, return an unknown_svalue. */
2813
2814const svalue *
2815region_model::get_string_size (const region *reg) const
2816{
2817 const string_region *str_reg = dyn_cast <const string_region *> (reg);
2818 if (!str_reg)
2819 return m_mgr->get_or_create_unknown_svalue (size_type_node);
2820
2821 tree cst = str_reg->get_string_cst ();
2822 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
2823 return m_mgr->get_or_create_constant_svalue (out);
2824}
2825
9faf8348 2826/* If CTXT is non-NULL, use it to warn about any problems accessing REG,
9589a46d 2827 using DIR to determine if this access is a read or write.
1eb90f46 2828 Return TRUE if an OOB access was detected.
0e466e97
DM
2829 If SVAL_HINT is non-NULL, use it as a hint in diagnostics
2830 about the value that would be written to REG. */
9faf8348 2831
9589a46d 2832bool
9faf8348
DM
2833region_model::check_region_access (const region *reg,
2834 enum access_direction dir,
0e466e97 2835 const svalue *sval_hint,
9faf8348
DM
2836 region_model_context *ctxt) const
2837{
2838 /* Fail gracefully if CTXT is NULL. */
2839 if (!ctxt)
9589a46d 2840 return false;
9faf8348 2841
1eb90f46 2842 bool oob_access_detected = false;
b9365b93 2843 check_region_for_taint (reg, dir, ctxt);
0e466e97 2844 if (!check_region_bounds (reg, dir, sval_hint, ctxt))
1eb90f46 2845 oob_access_detected = true;
b9365b93 2846
9faf8348
DM
2847 switch (dir)
2848 {
2849 default:
2850 gcc_unreachable ();
2851 case DIR_READ:
2852 /* Currently a no-op. */
2853 break;
2854 case DIR_WRITE:
2855 check_for_writable_region (reg, ctxt);
2856 break;
2857 }
1eb90f46 2858 return oob_access_detected;
9faf8348
DM
2859}
2860
2861/* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
2862
2863void
2864region_model::check_region_for_write (const region *dest_reg,
0e466e97 2865 const svalue *sval_hint,
9faf8348
DM
2866 region_model_context *ctxt) const
2867{
0e466e97 2868 check_region_access (dest_reg, DIR_WRITE, sval_hint, ctxt);
9faf8348
DM
2869}
2870
9589a46d 2871/* If CTXT is non-NULL, use it to warn about any problems reading from REG.
1eb90f46 2872 Returns TRUE if an OOB read was detected. */
9faf8348 2873
9589a46d 2874bool
9faf8348
DM
2875region_model::check_region_for_read (const region *src_reg,
2876 region_model_context *ctxt) const
2877{
0e466e97 2878 return check_region_access (src_reg, DIR_READ, NULL, ctxt);
9faf8348
DM
2879}
2880
e6c3bb37
TL
2881/* Concrete subclass for casts of pointers that lead to trailing bytes. */
2882
2883class dubious_allocation_size
2884: public pending_diagnostic_subclass<dubious_allocation_size>
2885{
2886public:
021077b9
DM
2887 dubious_allocation_size (const region *lhs, const region *rhs,
2888 const gimple *stmt)
2889 : m_lhs (lhs), m_rhs (rhs), m_expr (NULL_TREE), m_stmt (stmt),
f5758fe5 2890 m_has_allocation_event (false)
e6c3bb37
TL
2891 {}
2892
2893 dubious_allocation_size (const region *lhs, const region *rhs,
021077b9
DM
2894 tree expr, const gimple *stmt)
2895 : m_lhs (lhs), m_rhs (rhs), m_expr (expr), m_stmt (stmt),
f5758fe5 2896 m_has_allocation_event (false)
e6c3bb37
TL
2897 {}
2898
2899 const char *get_kind () const final override
2900 {
2901 return "dubious_allocation_size";
2902 }
2903
2904 bool operator== (const dubious_allocation_size &other) const
2905 {
021077b9
DM
2906 return (m_stmt == other.m_stmt
2907 && pending_diagnostic::same_tree_p (m_expr, other.m_expr));
e6c3bb37
TL
2908 }
2909
2910 int get_controlling_option () const final override
2911 {
2912 return OPT_Wanalyzer_allocation_size;
2913 }
2914
0e466e97 2915 bool emit (rich_location *rich_loc, logger *) final override
e6c3bb37
TL
2916 {
2917 diagnostic_metadata m;
2918 m.add_cwe (131);
2919
2920 return warning_meta (rich_loc, m, get_controlling_option (),
c83e9731
TL
2921 "allocated buffer size is not a multiple"
2922 " of the pointee's size");
e6c3bb37
TL
2923 }
2924
e6c3bb37
TL
2925 label_text describe_final_event (const evdesc::final_event &ev) final
2926 override
2927 {
2928 tree pointee_type = TREE_TYPE (m_lhs->get_type ());
f5758fe5 2929 if (m_has_allocation_event)
e6c3bb37
TL
2930 return ev.formatted_print ("assigned to %qT here;"
2931 " %<sizeof (%T)%> is %qE",
2932 m_lhs->get_type (), pointee_type,
2933 size_in_bytes (pointee_type));
f5758fe5
DM
2934 /* Fallback: Typically, we should always see an allocation_event
2935 before. */
e6c3bb37
TL
2936 if (m_expr)
2937 {
2938 if (TREE_CODE (m_expr) == INTEGER_CST)
2939 return ev.formatted_print ("allocated %E bytes and assigned to"
2940 " %qT here; %<sizeof (%T)%> is %qE",
2941 m_expr, m_lhs->get_type (), pointee_type,
2942 size_in_bytes (pointee_type));
2943 else
2944 return ev.formatted_print ("allocated %qE bytes and assigned to"
2945 " %qT here; %<sizeof (%T)%> is %qE",
2946 m_expr, m_lhs->get_type (), pointee_type,
2947 size_in_bytes (pointee_type));
2948 }
2949
2950 return ev.formatted_print ("allocated and assigned to %qT here;"
2951 " %<sizeof (%T)%> is %qE",
2952 m_lhs->get_type (), pointee_type,
2953 size_in_bytes (pointee_type));
2954 }
2955
f5758fe5
DM
2956 void
2957 add_region_creation_events (const region *,
2958 tree capacity,
e24fe128 2959 const event_loc_info &loc_info,
f5758fe5
DM
2960 checker_path &emission_path) final override
2961 {
2962 emission_path.add_event
e24fe128 2963 (make_unique<region_creation_event_allocation_size> (capacity, loc_info));
f5758fe5
DM
2964
2965 m_has_allocation_event = true;
2966 }
2967
e6c3bb37
TL
2968 void mark_interesting_stuff (interesting_t *interest) final override
2969 {
2970 interest->add_region_creation (m_rhs);
2971 }
2972
2973private:
2974 const region *m_lhs;
2975 const region *m_rhs;
2976 const tree m_expr;
021077b9 2977 const gimple *m_stmt;
f5758fe5 2978 bool m_has_allocation_event;
e6c3bb37
TL
2979};
2980
2981/* Return true on dubious allocation sizes for constant sizes. */
2982
2983static bool
2984capacity_compatible_with_type (tree cst, tree pointee_size_tree,
2985 bool is_struct)
2986{
2987 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
2988 gcc_assert (TREE_CODE (pointee_size_tree) == INTEGER_CST);
2989
2990 unsigned HOST_WIDE_INT pointee_size = TREE_INT_CST_LOW (pointee_size_tree);
2991 unsigned HOST_WIDE_INT alloc_size = TREE_INT_CST_LOW (cst);
2992
2993 if (is_struct)
b4cc945c 2994 return alloc_size == 0 || alloc_size >= pointee_size;
e6c3bb37
TL
2995 return alloc_size % pointee_size == 0;
2996}
2997
2998static bool
2999capacity_compatible_with_type (tree cst, tree pointee_size_tree)
3000{
3001 return capacity_compatible_with_type (cst, pointee_size_tree, false);
3002}
3003
3004/* Checks whether SVAL could be a multiple of SIZE_CST.
3005
3006 It works by visiting all svalues inside SVAL until it reaches
3007 atomic nodes. From those, it goes back up again and adds each
1d57a223 3008 node that is not a multiple of SIZE_CST to the RESULT_SET. */
e6c3bb37
TL
3009
3010class size_visitor : public visitor
3011{
3012public:
c83e9731
TL
3013 size_visitor (tree size_cst, const svalue *root_sval, constraint_manager *cm)
3014 : m_size_cst (size_cst), m_root_sval (root_sval), m_cm (cm)
e6c3bb37 3015 {
c83e9731 3016 m_root_sval->accept (this);
e6c3bb37
TL
3017 }
3018
1d57a223 3019 bool is_dubious_capacity ()
e6c3bb37 3020 {
c83e9731 3021 return result_set.contains (m_root_sval);
e6c3bb37
TL
3022 }
3023
3024 void visit_constant_svalue (const constant_svalue *sval) final override
3025 {
c83e9731 3026 check_constant (sval->get_constant (), sval);
e6c3bb37
TL
3027 }
3028
bdd385b2 3029 void visit_unaryop_svalue (const unaryop_svalue *sval) final override
e6c3bb37 3030 {
1d57a223
TL
3031 if (CONVERT_EXPR_CODE_P (sval->get_op ())
3032 && result_set.contains (sval->get_arg ()))
e6c3bb37
TL
3033 result_set.add (sval);
3034 }
3035
3036 void visit_binop_svalue (const binop_svalue *sval) final override
3037 {
3038 const svalue *arg0 = sval->get_arg0 ();
3039 const svalue *arg1 = sval->get_arg1 ();
3040
1d57a223 3041 switch (sval->get_op ())
e6c3bb37 3042 {
1d57a223
TL
3043 case MULT_EXPR:
3044 if (result_set.contains (arg0) && result_set.contains (arg1))
3045 result_set.add (sval);
3046 break;
3047 case PLUS_EXPR:
3048 case MINUS_EXPR:
3049 if (result_set.contains (arg0) || result_set.contains (arg1))
3050 result_set.add (sval);
3051 break;
3052 default:
3053 break;
e6c3bb37
TL
3054 }
3055 }
3056
e6c3bb37
TL
3057 void visit_unmergeable_svalue (const unmergeable_svalue *sval) final override
3058 {
e6c3bb37
TL
3059 if (result_set.contains (sval->get_arg ()))
3060 result_set.add (sval);
3061 }
3062
3063 void visit_widening_svalue (const widening_svalue *sval) final override
3064 {
3065 const svalue *base = sval->get_base_svalue ();
3066 const svalue *iter = sval->get_iter_svalue ();
3067
1d57a223 3068 if (result_set.contains (base) || result_set.contains (iter))
e6c3bb37
TL
3069 result_set.add (sval);
3070 }
3071
1d57a223 3072 void visit_initial_svalue (const initial_svalue *sval) final override
e6c3bb37 3073 {
1d57a223 3074 equiv_class_id id = equiv_class_id::null ();
e6c3bb37
TL
3075 if (m_cm->get_equiv_class_by_svalue (sval, &id))
3076 {
c83e9731
TL
3077 if (tree cst = id.get_obj (*m_cm).get_any_constant ())
3078 check_constant (cst, sval);
1d57a223
TL
3079 }
3080 else if (!m_cm->sval_constrained_p (sval))
3081 {
3082 result_set.add (sval);
e6c3bb37
TL
3083 }
3084 }
3085
1d57a223 3086 void visit_conjured_svalue (const conjured_svalue *sval) final override
e6c3bb37 3087 {
1d57a223
TL
3088 equiv_class_id id = equiv_class_id::null ();
3089 if (m_cm->get_equiv_class_by_svalue (sval, &id))
3090 if (tree cst = id.get_obj (*m_cm).get_any_constant ())
3091 check_constant (cst, sval);
e6c3bb37
TL
3092 }
3093
3094private:
c83e9731
TL
3095 void check_constant (tree cst, const svalue *sval)
3096 {
3097 switch (TREE_CODE (cst))
3098 {
3099 default:
3100 /* Assume all unhandled operands are compatible. */
c83e9731
TL
3101 break;
3102 case INTEGER_CST:
1d57a223 3103 if (!capacity_compatible_with_type (cst, m_size_cst))
c83e9731
TL
3104 result_set.add (sval);
3105 break;
3106 }
3107 }
3108
e6c3bb37 3109 tree m_size_cst;
c83e9731 3110 const svalue *m_root_sval;
e6c3bb37
TL
3111 constraint_manager *m_cm;
3112 svalue_set result_set; /* Used as a mapping of svalue*->bool. */
3113};
3114
3115/* Return true if a struct or union either uses the inheritance pattern,
3116 where the first field is a base struct, or the flexible array member
3117 pattern, where the last field is an array without a specified size. */
3118
3119static bool
3120struct_or_union_with_inheritance_p (tree struc)
3121{
3122 tree iter = TYPE_FIELDS (struc);
3123 if (iter == NULL_TREE)
3124 return false;
3125 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (iter)))
3126 return true;
3127
3128 tree last_field;
3129 while (iter != NULL_TREE)
3130 {
3131 last_field = iter;
3132 iter = DECL_CHAIN (iter);
3133 }
3134
3135 if (last_field != NULL_TREE
3136 && TREE_CODE (TREE_TYPE (last_field)) == ARRAY_TYPE)
3137 return true;
3138
3139 return false;
3140}
3141
3142/* Return true if the lhs and rhs of an assignment have different types. */
3143
3144static bool
3145is_any_cast_p (const gimple *stmt)
3146{
c83e9731 3147 if (const gassign *assign = dyn_cast <const gassign *> (stmt))
e6c3bb37
TL
3148 return gimple_assign_cast_p (assign)
3149 || !pending_diagnostic::same_tree_p (
3150 TREE_TYPE (gimple_assign_lhs (assign)),
3151 TREE_TYPE (gimple_assign_rhs1 (assign)));
c83e9731 3152 else if (const gcall *call = dyn_cast <const gcall *> (stmt))
e6c3bb37
TL
3153 {
3154 tree lhs = gimple_call_lhs (call);
3155 return lhs != NULL_TREE && !pending_diagnostic::same_tree_p (
3156 TREE_TYPE (gimple_call_lhs (call)),
3157 gimple_call_return_type (call));
3158 }
3159
3160 return false;
3161}
3162
3163/* On pointer assignments, check whether the buffer size of
3164 RHS_SVAL is compatible with the type of the LHS_REG.
3165 Use a non-null CTXT to report allocation size warnings. */
3166
3167void
3168region_model::check_region_size (const region *lhs_reg, const svalue *rhs_sval,
3169 region_model_context *ctxt) const
3170{
3171 if (!ctxt || ctxt->get_stmt () == NULL)
3172 return;
3173 /* Only report warnings on assignments that actually change the type. */
3174 if (!is_any_cast_p (ctxt->get_stmt ()))
3175 return;
3176
e6c3bb37
TL
3177 tree pointer_type = lhs_reg->get_type ();
3178 if (pointer_type == NULL_TREE || !POINTER_TYPE_P (pointer_type))
3179 return;
3180
3181 tree pointee_type = TREE_TYPE (pointer_type);
3182 /* Make sure that the type on the left-hand size actually has a size. */
3183 if (pointee_type == NULL_TREE || VOID_TYPE_P (pointee_type)
3184 || TYPE_SIZE_UNIT (pointee_type) == NULL_TREE)
3185 return;
3186
3187 /* Bail out early on pointers to structs where we can
3188 not deduce whether the buffer size is compatible. */
3189 bool is_struct = RECORD_OR_UNION_TYPE_P (pointee_type);
3190 if (is_struct && struct_or_union_with_inheritance_p (pointee_type))
3191 return;
3192
3193 tree pointee_size_tree = size_in_bytes (pointee_type);
3194 /* We give up if the type size is not known at compile-time or the
3195 type size is always compatible regardless of the buffer size. */
3196 if (TREE_CODE (pointee_size_tree) != INTEGER_CST
3197 || integer_zerop (pointee_size_tree)
3198 || integer_onep (pointee_size_tree))
3199 return;
3200
021077b9 3201 const region *rhs_reg = deref_rvalue (rhs_sval, NULL_TREE, ctxt, false);
e6c3bb37
TL
3202 const svalue *capacity = get_capacity (rhs_reg);
3203 switch (capacity->get_kind ())
3204 {
3205 case svalue_kind::SK_CONSTANT:
3206 {
3207 const constant_svalue *cst_cap_sval
c83e9731 3208 = as_a <const constant_svalue *> (capacity);
e6c3bb37 3209 tree cst_cap = cst_cap_sval->get_constant ();
c83e9731
TL
3210 if (TREE_CODE (cst_cap) == INTEGER_CST
3211 && !capacity_compatible_with_type (cst_cap, pointee_size_tree,
3212 is_struct))
6341f14e 3213 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg, rhs_reg,
021077b9
DM
3214 cst_cap,
3215 ctxt->get_stmt ()));
e6c3bb37
TL
3216 }
3217 break;
3218 default:
3219 {
3220 if (!is_struct)
3221 {
3222 size_visitor v (pointee_size_tree, capacity, m_constraints);
1d57a223 3223 if (v.is_dubious_capacity ())
e6c3bb37
TL
3224 {
3225 tree expr = get_representative_tree (capacity);
6341f14e
DM
3226 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg,
3227 rhs_reg,
021077b9
DM
3228 expr,
3229 ctxt->get_stmt ()));
e6c3bb37
TL
3230 }
3231 }
3232 break;
3233 }
3234 }
3235}
3236
808f4dfe 3237/* Set the value of the region given by LHS_REG to the value given
9faf8348
DM
3238 by RHS_SVAL.
3239 Use CTXT to report any warnings associated with writing to LHS_REG. */
757bf1df 3240
808f4dfe
DM
3241void
3242region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
3175d40f 3243 region_model_context *ctxt)
757bf1df 3244{
808f4dfe
DM
3245 gcc_assert (lhs_reg);
3246 gcc_assert (rhs_sval);
3247
dfe2ef7f
DM
3248 /* Setting the value of an empty region is a no-op. */
3249 if (lhs_reg->empty_p ())
3250 return;
3251
e6c3bb37
TL
3252 check_region_size (lhs_reg, rhs_sval, ctxt);
3253
0e466e97 3254 check_region_for_write (lhs_reg, rhs_sval, ctxt);
3175d40f 3255
808f4dfe 3256 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 3257 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
3258}
3259
808f4dfe 3260/* Set the value of the region given by LHS to the value given by RHS. */
757bf1df
DM
3261
3262void
808f4dfe 3263region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
757bf1df 3264{
808f4dfe
DM
3265 const region *lhs_reg = get_lvalue (lhs, ctxt);
3266 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
3267 gcc_assert (lhs_reg);
3268 gcc_assert (rhs_sval);
3269 set_value (lhs_reg, rhs_sval, ctxt);
757bf1df
DM
3270}
3271
325f9e88
DM
3272/* Issue a note specifying that a particular function parameter is expected
3273 to be a valid null-terminated string. */
3274
3275static void
3276inform_about_expected_null_terminated_string_arg (const call_arg_details &ad)
3277{
3278 // TODO: ideally we'd underline the param here
3279 inform (DECL_SOURCE_LOCATION (ad.m_called_fndecl),
3280 "argument %d of %qD must be a pointer to a null-terminated string",
3281 ad.m_arg_idx + 1, ad.m_called_fndecl);
3282}
3283
fe97f09a 3284/* A binding of a specific svalue at a concrete byte range. */
325f9e88 3285
fe97f09a 3286struct fragment
325f9e88 3287{
fe97f09a
DM
3288 fragment ()
3289 : m_byte_range (0, 0), m_sval (nullptr)
325f9e88 3290 {
325f9e88
DM
3291 }
3292
fe97f09a
DM
3293 fragment (const byte_range &bytes, const svalue *sval)
3294 : m_byte_range (bytes), m_sval (sval)
325f9e88 3295 {
325f9e88
DM
3296 }
3297
fe97f09a 3298 static int cmp_ptrs (const void *p1, const void *p2)
325f9e88 3299 {
fe97f09a
DM
3300 const fragment *f1 = (const fragment *)p1;
3301 const fragment *f2 = (const fragment *)p2;
3302 return byte_range::cmp (f1->m_byte_range, f2->m_byte_range);
325f9e88
DM
3303 }
3304
fe97f09a
DM
3305 /* Determine if there is a zero terminator somewhere in the
3306 bytes of this fragment, starting at START_READ_OFFSET (which
3307 is absolute to the start of the cluster as a whole), and stopping
3308 at the end of this fragment.
3309
3310 Return a tristate:
3311 - true if there definitely is a zero byte, writing to *OUT_BYTES_READ
3312 the number of bytes from that would be read, including the zero byte.
3313 - false if there definitely isn't a zero byte
3314 - unknown if we don't know. */
3315 tristate has_null_terminator (byte_offset_t start_read_offset,
3316 byte_offset_t *out_bytes_read) const
325f9e88 3317 {
fe97f09a
DM
3318 byte_offset_t rel_start_read_offset
3319 = start_read_offset - m_byte_range.get_start_byte_offset ();
3320 gcc_assert (rel_start_read_offset >= 0);
3321 byte_offset_t available_bytes
3322 = (m_byte_range.get_next_byte_offset () - start_read_offset);
3323 gcc_assert (available_bytes >= 0);
3324
3325 if (rel_start_read_offset > INT_MAX)
3326 return tristate::TS_UNKNOWN;
3327 HOST_WIDE_INT rel_start_read_offset_hwi = rel_start_read_offset.slow ();
3328
3329 if (available_bytes > INT_MAX)
3330 return tristate::TS_UNKNOWN;
3331 HOST_WIDE_INT available_bytes_hwi = available_bytes.slow ();
3332
3333 switch (m_sval->get_kind ())
3334 {
3335 case SK_CONSTANT:
3336 {
3337 tree cst
3338 = as_a <const constant_svalue *> (m_sval)->get_constant ();
3339 switch (TREE_CODE (cst))
3340 {
3341 case STRING_CST:
3342 {
3343 /* Look for the first 0 byte within STRING_CST
3344 from START_READ_OFFSET onwards. */
3345 const HOST_WIDE_INT num_bytes_to_search
3346 = std::min<HOST_WIDE_INT> ((TREE_STRING_LENGTH (cst)
3347 - rel_start_read_offset_hwi),
3348 available_bytes_hwi);
3349 const char *start = (TREE_STRING_POINTER (cst)
3350 + rel_start_read_offset_hwi);
3351 if (num_bytes_to_search >= 0)
3352 if (const void *p = memchr (start, 0,
3353 num_bytes_to_search))
3354 {
3355 *out_bytes_read = (const char *)p - start + 1;
3356 return tristate (true);
3357 }
3358
3359 *out_bytes_read = available_bytes;
3360 return tristate (false);
3361 }
3362 break;
3363 case INTEGER_CST:
3364 if (rel_start_read_offset_hwi == 0
3365 && integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (cst))))
3366 {
3367 /* Model accesses to the initial byte of a 1-byte
3368 INTEGER_CST. */
3369 if (zerop (cst))
3370 {
3371 *out_bytes_read = 1;
3372 return tristate (true);
3373 }
3374 else
3375 {
3376 *out_bytes_read = available_bytes;
3377 return tristate (false);
3378 }
3379 }
3380 /* Treat any other access to an INTEGER_CST as unknown. */
3381 return tristate::TS_UNKNOWN;
3382
3383 default:
3384 gcc_unreachable ();
3385 break;
3386 }
3387 }
3388 break;
3389 default:
3390 // TODO: it may be possible to handle other cases here.
3391 return tristate::TS_UNKNOWN;
3392 }
325f9e88
DM
3393 }
3394
fe97f09a
DM
3395 byte_range m_byte_range;
3396 const svalue *m_sval;
3397};
3398
3399/* A frozen copy of a single base region's binding_cluster within a store,
3400 optimized for traversal of the concrete parts in byte order.
3401 This only captures concrete bindings, and is an implementation detail
3402 of region_model::scan_for_null_terminator. */
3403
3404class iterable_cluster
3405{
3406public:
3407 iterable_cluster (const binding_cluster *cluster)
325f9e88 3408 {
fe97f09a
DM
3409 if (!cluster)
3410 return;
3411 for (auto iter : *cluster)
3412 {
3413 const binding_key *key = iter.first;
3414 const svalue *sval = iter.second;
3415
3416 if (const concrete_binding *concrete_key
3417 = key->dyn_cast_concrete_binding ())
3418 {
3419 byte_range fragment_bytes (0, 0);
3420 if (concrete_key->get_byte_range (&fragment_bytes))
3421 m_fragments.safe_push (fragment (fragment_bytes, sval));
3422 }
3423 }
3424 m_fragments.qsort (fragment::cmp_ptrs);
325f9e88
DM
3425 }
3426
fe97f09a
DM
3427 bool
3428 get_fragment_for_byte (byte_offset_t byte, fragment *out_frag) const
325f9e88 3429 {
fe97f09a
DM
3430 /* TODO: binary search rather than linear. */
3431 unsigned iter_idx;
3432 for (iter_idx = 0; iter_idx < m_fragments.length (); iter_idx++)
3433 {
3434 if (m_fragments[iter_idx].m_byte_range.contains_p (byte))
3435 {
3436 *out_frag = m_fragments[iter_idx];
3437 return true;
3438 }
3439 }
3440 return false;
325f9e88
DM
3441 }
3442
3443private:
fe97f09a 3444 auto_vec<fragment> m_fragments;
325f9e88
DM
3445};
3446
fe97f09a
DM
3447/* Simulate reading the bytes at BYTES from BASE_REG.
3448 Complain to CTXT about any issues with the read e.g. out-of-bounds. */
3449
3450const svalue *
3451region_model::get_store_bytes (const region *base_reg,
3452 const byte_range &bytes,
3453 region_model_context *ctxt) const
3454{
3455 const svalue *index_sval
3456 = m_mgr->get_or_create_int_cst (size_type_node,
3457 bytes.get_start_byte_offset ());
3458 const region *offset_reg = m_mgr->get_offset_region (base_reg,
3459 NULL_TREE,
3460 index_sval);
3461 const svalue *byte_size_sval
3462 = m_mgr->get_or_create_int_cst (size_type_node, bytes.m_size_in_bytes);
3463 const region *read_reg = m_mgr->get_sized_region (offset_reg,
3464 NULL_TREE,
3465 byte_size_sval);
3466
3467 /* Simulate reading those bytes from the store. */
3468 const svalue *sval = get_store_value (read_reg, ctxt);
3469 return sval;
3470}
3471
3472static tree
3473get_tree_for_byte_offset (tree ptr_expr, byte_offset_t byte_offset)
3474{
3475 gcc_assert (ptr_expr);
3476 return fold_build2 (MEM_REF,
3477 char_type_node,
3478 ptr_expr, wide_int_to_tree (size_type_node, byte_offset));
3479}
3480
3481/* Simulate a series of reads of REG until we find a 0 byte
3482 (equivalent to calling strlen).
3483
3484 Complain to CTXT and return NULL if:
3485 - the buffer pointed to isn't null-terminated
3486 - the buffer pointed to has any uninitalized bytes before any 0-terminator
3487 - any of the reads aren't within the bounds of the underlying base region
3488
3489 Otherwise, return a svalue for the number of bytes read (strlen + 1),
3490 and, if OUT_SVAL is non-NULL, write to *OUT_SVAL with an svalue
3491 representing the content of REG up to and including the terminator.
3492
3493 Algorithm
3494 =========
3495
3496 Get offset for first byte to read.
3497 Find the binding (if any) that contains it.
3498 Find the size in bits of that binding.
3499 Round to the nearest byte (which way???)
3500 Or maybe give up if we have a partial binding there.
3501 Get the svalue from the binding.
3502 Determine the strlen (if any) of that svalue.
3503 Does it have a 0-terminator within it?
3504 If so, we have a partial read up to and including that terminator
3505 Read those bytes from the store; add to the result in the correct place.
3506 Finish
3507 If not, we have a full read of that svalue
3508 Read those bytes from the store; add to the result in the correct place.
3509 Update read/write offsets
3510 Continue
3511 If unknown:
3512 Result is unknown
3513 Finish
3514*/
3515
3516const svalue *
3517region_model::scan_for_null_terminator (const region *reg,
3518 tree expr,
3519 const svalue **out_sval,
3520 region_model_context *ctxt) const
3521{
3522 store_manager *store_mgr = m_mgr->get_store_manager ();
3523
3524 region_offset offset = reg->get_offset (m_mgr);
3525 if (offset.symbolic_p ())
3526 {
3527 if (out_sval)
3528 *out_sval = m_mgr->get_or_create_unknown_svalue (NULL_TREE);
3529 return m_mgr->get_or_create_unknown_svalue (size_type_node);
3530 }
3531 byte_offset_t src_byte_offset;
3532 if (!offset.get_concrete_byte_offset (&src_byte_offset))
3533 {
3534 if (out_sval)
3535 *out_sval = m_mgr->get_or_create_unknown_svalue (NULL_TREE);
3536 return m_mgr->get_or_create_unknown_svalue (size_type_node);
3537 }
3538 const byte_offset_t initial_src_byte_offset = src_byte_offset;
3539 byte_offset_t dst_byte_offset = 0;
3540
3541 const region *base_reg = reg->get_base_region ();
3542
3543 if (const string_region *str_reg = base_reg->dyn_cast_string_region ())
3544 {
3545 tree string_cst = str_reg->get_string_cst ();
3546 if (const void *p = memchr (TREE_STRING_POINTER (string_cst),
3547 0,
3548 TREE_STRING_LENGTH (string_cst)))
3549 {
3550 size_t num_bytes_read
3551 = (const char *)p - TREE_STRING_POINTER (string_cst) + 1;
3552 /* Simulate the read. */
3553 byte_range bytes_to_read (0, num_bytes_read);
3554 const svalue *sval = get_store_bytes (reg, bytes_to_read, ctxt);
3555 if (out_sval)
3556 *out_sval = sval;
3557 return m_mgr->get_or_create_int_cst (size_type_node,
3558 num_bytes_read);
3559 }
3560 }
3561
3562 const binding_cluster *cluster = m_store.get_cluster (base_reg);
3563 iterable_cluster c (cluster);
3564 binding_map result;
3565
3566 while (1)
3567 {
3568 fragment f;
3569 if (c.get_fragment_for_byte (src_byte_offset, &f))
3570 {
3571 byte_offset_t fragment_bytes_read;
3572 tristate is_terminated
3573 = f.has_null_terminator (src_byte_offset, &fragment_bytes_read);
3574 if (is_terminated.is_unknown ())
3575 {
3576 if (out_sval)
3577 *out_sval = m_mgr->get_or_create_unknown_svalue (NULL_TREE);
3578 return m_mgr->get_or_create_unknown_svalue (size_type_node);
3579 }
3580
3581 /* Simulate reading those bytes from the store. */
3582 byte_range bytes_to_read (src_byte_offset, fragment_bytes_read);
3583 const svalue *sval = get_store_bytes (base_reg, bytes_to_read, ctxt);
3584 check_for_poison (sval, expr, nullptr, ctxt);
3585
3586 if (out_sval)
3587 {
3588 byte_range bytes_to_write (dst_byte_offset, fragment_bytes_read);
3589 const binding_key *key
3590 = store_mgr->get_concrete_binding (bytes_to_write);
3591 result.put (key, sval);
3592 }
3593
3594 src_byte_offset += fragment_bytes_read;
3595 dst_byte_offset += fragment_bytes_read;
3596
3597 if (is_terminated.is_true ())
3598 {
3599 if (out_sval)
3600 *out_sval = m_mgr->get_or_create_compound_svalue (NULL_TREE,
3601 result);
3602 return m_mgr->get_or_create_int_cst (size_type_node,
3603 dst_byte_offset);
3604 }
3605 }
3606 else
3607 break;
3608 }
3609
3610 /* No binding for this base_region, or no binding at src_byte_offset
3611 (or a symbolic binding). */
3612
3613 /* TODO: the various special-cases seen in
3614 region_model::get_store_value. */
3615
3616 /* Simulate reading from this byte, then give up. */
3617 byte_range bytes_to_read (src_byte_offset, 1);
3618 const svalue *sval = get_store_bytes (base_reg, bytes_to_read, ctxt);
3619 tree byte_expr
3620 = get_tree_for_byte_offset (expr,
3621 src_byte_offset - initial_src_byte_offset);
3622 check_for_poison (sval, byte_expr, nullptr, ctxt);
3623 if (base_reg->can_have_initial_svalue_p ())
3624 {
3625 if (out_sval)
3626 *out_sval = m_mgr->get_or_create_unknown_svalue (NULL_TREE);
3627 return m_mgr->get_or_create_unknown_svalue (size_type_node);
3628 }
3629 else
3630 return nullptr;
3631}
3632
325f9e88
DM
3633/* Check that argument ARG_IDX (0-based) to the call described by CD
3634 is a pointer to a valid null-terminated string.
3635
fe97f09a
DM
3636 Simulate scanning through the buffer, reading until we find a 0 byte
3637 (equivalent to calling strlen).
325f9e88 3638
fe97f09a
DM
3639 Complain and return NULL if:
3640 - the buffer pointed to isn't null-terminated
3641 - the buffer pointed to has any uninitalized bytes before any 0-terminator
3642 - any of the reads aren't within the bounds of the underlying base region
325f9e88 3643
fe97f09a
DM
3644 Otherwise, return a svalue for the number of bytes read (strlen + 1),
3645 and, if OUT_SVAL is non-NULL, write to *OUT_SVAL with an svalue
3646 representing the content of the buffer up to and including the terminator.
325f9e88 3647
fe97f09a
DM
3648 TODO: we should also complain if:
3649 - the pointer is NULL (or could be). */
3650
3651const svalue *
325f9e88 3652region_model::check_for_null_terminated_string_arg (const call_details &cd,
fe97f09a
DM
3653 unsigned arg_idx,
3654 const svalue **out_sval)
325f9e88 3655{
fe97f09a
DM
3656 class null_terminator_check_event : public custom_event
3657 {
3658 public:
3659 null_terminator_check_event (const event_loc_info &loc_info,
3660 const call_arg_details &arg_details)
3661 : custom_event (loc_info),
3662 m_arg_details (arg_details)
3663 {
3664 }
3665
3666 label_text get_desc (bool can_colorize) const final override
3667 {
3668 if (m_arg_details.m_arg_expr)
3669 return make_label_text (can_colorize,
3670 "while looking for null terminator"
3671 " for argument %i (%qE) of %qD...",
3672 m_arg_details.m_arg_idx + 1,
3673 m_arg_details.m_arg_expr,
3674 m_arg_details.m_called_fndecl);
3675 else
3676 return make_label_text (can_colorize,
3677 "while looking for null terminator"
3678 " for argument %i of %qD...",
3679 m_arg_details.m_arg_idx + 1,
3680 m_arg_details.m_called_fndecl);
3681 }
3682
3683 private:
3684 const call_arg_details m_arg_details;
3685 };
3686
3687 class null_terminator_check_decl_note
3688 : public pending_note_subclass<null_terminator_check_decl_note>
3689 {
3690 public:
3691 null_terminator_check_decl_note (const call_arg_details &arg_details)
3692 : m_arg_details (arg_details)
3693 {
3694 }
3695
3696 const char *get_kind () const final override
3697 {
3698 return "null_terminator_check_decl_note";
3699 }
3700
3701 void emit () const final override
3702 {
3703 inform_about_expected_null_terminated_string_arg (m_arg_details);
3704 }
3705
3706 bool operator== (const null_terminator_check_decl_note &other) const
3707 {
3708 return m_arg_details == other.m_arg_details;
3709 }
3710
3711 private:
3712 const call_arg_details m_arg_details;
3713 };
3714
3715 /* Subclass of decorated_region_model_context that
3716 adds the above event and note to any saved diagnostics. */
3717 class annotating_ctxt : public annotating_context
3718 {
3719 public:
3720 annotating_ctxt (const call_details &cd,
3721 unsigned arg_idx)
3722 : annotating_context (cd.get_ctxt ()),
3723 m_cd (cd),
3724 m_arg_idx (arg_idx)
3725 {
3726 }
3727 void add_annotations () final override
3728 {
3729 call_arg_details arg_details (m_cd, m_arg_idx);
3730 event_loc_info loc_info (m_cd.get_location (),
3731 m_cd.get_model ()->get_current_function ()->decl,
3732 m_cd.get_model ()->get_stack_depth ());
3733
3734 add_event (make_unique<null_terminator_check_event> (loc_info,
3735 arg_details));
3736 add_note (make_unique <null_terminator_check_decl_note> (arg_details));
3737 }
3738 private:
3739 const call_details &m_cd;
3740 unsigned m_arg_idx;
3741 };
3742
3743 /* Use this ctxt below so that any diagnostics that get added
3744 get annotated. */
3745 annotating_ctxt my_ctxt (cd, arg_idx);
325f9e88
DM
3746
3747 const svalue *arg_sval = cd.get_arg_svalue (arg_idx);
3748 const region *buf_reg
fe97f09a 3749 = deref_rvalue (arg_sval, cd.get_arg_tree (arg_idx), &my_ctxt);
325f9e88 3750
fe97f09a
DM
3751 return scan_for_null_terminator (buf_reg,
3752 cd.get_arg_tree (arg_idx),
3753 out_sval,
3754 &my_ctxt);
325f9e88
DM
3755}
3756
808f4dfe 3757/* Remove all bindings overlapping REG within the store. */
884d9141
DM
3758
3759void
808f4dfe
DM
3760region_model::clobber_region (const region *reg)
3761{
3762 m_store.clobber_region (m_mgr->get_store_manager(), reg);
3763}
3764
3765/* Remove any bindings for REG within the store. */
3766
3767void
3768region_model::purge_region (const region *reg)
3769{
3770 m_store.purge_region (m_mgr->get_store_manager(), reg);
3771}
3772
e61ffa20
DM
3773/* Fill REG with SVAL. */
3774
3775void
3776region_model::fill_region (const region *reg, const svalue *sval)
3777{
3778 m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
3779}
3780
808f4dfe
DM
3781/* Zero-fill REG. */
3782
3783void
3784region_model::zero_fill_region (const region *reg)
3785{
3786 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
3787}
3788
3789/* Mark REG as having unknown content. */
3790
3791void
3a66c289
DM
3792region_model::mark_region_as_unknown (const region *reg,
3793 uncertainty_t *uncertainty)
884d9141 3794{
14f5e56a 3795 svalue_set maybe_live_values;
3a66c289 3796 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
14f5e56a
DM
3797 uncertainty, &maybe_live_values);
3798 m_store.on_maybe_live_values (maybe_live_values);
884d9141
DM
3799}
3800
808f4dfe 3801/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
757bf1df
DM
3802 this model. */
3803
3804tristate
808f4dfe
DM
3805region_model::eval_condition (const svalue *lhs,
3806 enum tree_code op,
3807 const svalue *rhs) const
757bf1df 3808{
757bf1df
DM
3809 gcc_assert (lhs);
3810 gcc_assert (rhs);
3811
808f4dfe
DM
3812 /* For now, make no attempt to capture constraints on floating-point
3813 values. */
3814 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
3815 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
3816 return tristate::unknown ();
3817
9bbcee45
DM
3818 /* See what we know based on the values. */
3819
808f4dfe
DM
3820 /* Unwrap any unmergeable values. */
3821 lhs = lhs->unwrap_any_unmergeable ();
3822 rhs = rhs->unwrap_any_unmergeable ();
3823
3824 if (lhs == rhs)
757bf1df 3825 {
808f4dfe
DM
3826 /* If we have the same svalue, then we have equality
3827 (apart from NaN-handling).
3828 TODO: should this definitely be the case for poisoned values? */
3829 /* Poisoned and unknown values are "unknowable". */
3830 if (lhs->get_kind () == SK_POISONED
3831 || lhs->get_kind () == SK_UNKNOWN)
3832 return tristate::TS_UNKNOWN;
e978955d 3833
808f4dfe 3834 switch (op)
757bf1df 3835 {
808f4dfe
DM
3836 case EQ_EXPR:
3837 case GE_EXPR:
3838 case LE_EXPR:
3839 return tristate::TS_TRUE;
07c86323 3840
808f4dfe
DM
3841 case NE_EXPR:
3842 case GT_EXPR:
3843 case LT_EXPR:
3844 return tristate::TS_FALSE;
3845
3846 default:
3847 /* For other ops, use the logic below. */
3848 break;
757bf1df 3849 }
808f4dfe 3850 }
757bf1df 3851
808f4dfe
DM
3852 /* If we have a pair of region_svalues, compare them. */
3853 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
3854 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
3855 {
3856 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
3857 if (res.is_known ())
3858 return res;
3859 /* Otherwise, only known through constraints. */
3860 }
757bf1df 3861
808f4dfe 3862 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
18faaeb3
DM
3863 {
3864 /* If we have a pair of constants, compare them. */
3865 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
3866 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
3867 else
3868 {
3869 /* When we have one constant, put it on the RHS. */
3870 std::swap (lhs, rhs);
3871 op = swap_tree_comparison (op);
3872 }
3873 }
3874 gcc_assert (lhs->get_kind () != SK_CONSTANT);
757bf1df 3875
e82e0f14
DM
3876 /* Handle comparison against zero. */
3877 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
3878 if (zerop (cst_rhs->get_constant ()))
3879 {
3880 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
3881 {
3882 /* A region_svalue is a non-NULL pointer, except in certain
3883 special cases (see the comment for region::non_null_p). */
3884 const region *pointee = ptr->get_pointee ();
3885 if (pointee->non_null_p ())
3886 {
3887 switch (op)
3888 {
3889 default:
3890 gcc_unreachable ();
3891
3892 case EQ_EXPR:
3893 case GE_EXPR:
3894 case LE_EXPR:
3895 return tristate::TS_FALSE;
3896
3897 case NE_EXPR:
3898 case GT_EXPR:
3899 case LT_EXPR:
3900 return tristate::TS_TRUE;
3901 }
3902 }
3903 }
3904 else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
3905 {
3906 /* Treat offsets from a non-NULL pointer as being non-NULL. This
3907 isn't strictly true, in that eventually ptr++ will wrap
3908 around and be NULL, but it won't occur in practise and thus
3909 can be used to suppress effectively false positives that we
3910 shouldn't warn for. */
3911 if (binop->get_op () == POINTER_PLUS_EXPR)
3912 {
9bbcee45 3913 tristate lhs_ts = eval_condition (binop->get_arg0 (), op, rhs);
e82e0f14
DM
3914 if (lhs_ts.is_known ())
3915 return lhs_ts;
3916 }
3917 }
0b737090
DM
3918 else if (const unaryop_svalue *unaryop
3919 = lhs->dyn_cast_unaryop_svalue ())
3920 {
3921 if (unaryop->get_op () == NEGATE_EXPR)
3922 {
3923 /* e.g. "-X <= 0" is equivalent to X >= 0". */
3924 tristate lhs_ts = eval_condition (unaryop->get_arg (),
3925 swap_tree_comparison (op),
3926 rhs);
3927 if (lhs_ts.is_known ())
3928 return lhs_ts;
3929 }
3930 }
e82e0f14 3931 }
808f4dfe
DM
3932
3933 /* Handle rejection of equality for comparisons of the initial values of
3934 "external" values (such as params) with the address of locals. */
3935 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
3936 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
3937 {
3938 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
3939 if (res.is_known ())
3940 return res;
3941 }
3942 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
3943 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
3944 {
3945 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
3946 if (res.is_known ())
3947 return res;
3948 }
3949
3950 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
3951 if (tree rhs_cst = rhs->maybe_get_constant ())
3952 {
3953 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
3954 if (res.is_known ())
3955 return res;
3956 }
3957
7a6564c9 3958 /* Handle comparisons between two svalues with more than one operand. */
9bbcee45 3959 if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
7a6564c9
TL
3960 {
3961 switch (op)
3962 {
3963 default:
3964 break;
3965 case EQ_EXPR:
3966 {
3967 /* TODO: binops can be equal even if they are not structurally
3968 equal in case of commutative operators. */
3969 tristate res = structural_equality (lhs, rhs);
3970 if (res.is_true ())
3971 return res;
3972 }
3973 break;
3974 case LE_EXPR:
3975 {
3976 tristate res = structural_equality (lhs, rhs);
3977 if (res.is_true ())
3978 return res;
3979 }
3980 break;
3981 case GE_EXPR:
3982 {
3983 tristate res = structural_equality (lhs, rhs);
3984 if (res.is_true ())
3985 return res;
3986 res = symbolic_greater_than (binop, rhs);
3987 if (res.is_true ())
3988 return res;
3989 }
3990 break;
3991 case GT_EXPR:
3992 {
3993 tristate res = symbolic_greater_than (binop, rhs);
3994 if (res.is_true ())
3995 return res;
3996 }
3997 break;
3998 }
3999 }
4000
9bbcee45
DM
4001 /* Otherwise, try constraints.
4002 Cast to const to ensure we don't change the constraint_manager as we
4003 do this (e.g. by creating equivalence classes). */
4004 const constraint_manager *constraints = m_constraints;
4005 return constraints->eval_condition (lhs, op, rhs);
808f4dfe
DM
4006}
4007
9bbcee45 4008/* Subroutine of region_model::eval_condition, for rejecting
808f4dfe
DM
4009 equality of INIT_VAL(PARM) with &LOCAL. */
4010
4011tristate
4012region_model::compare_initial_and_pointer (const initial_svalue *init,
4013 const region_svalue *ptr) const
4014{
4015 const region *pointee = ptr->get_pointee ();
4016
4017 /* If we have a pointer to something within a stack frame, it can't be the
4018 initial value of a param. */
4019 if (pointee->maybe_get_frame_region ())
e0139b2a
DM
4020 if (init->initial_value_of_param_p ())
4021 return tristate::TS_FALSE;
757bf1df
DM
4022
4023 return tristate::TS_UNKNOWN;
4024}
4025
7a6564c9
TL
4026/* Return true if SVAL is definitely positive. */
4027
4028static bool
4029is_positive_svalue (const svalue *sval)
4030{
4031 if (tree cst = sval->maybe_get_constant ())
4032 return !zerop (cst) && get_range_pos_neg (cst) == 1;
4033 tree type = sval->get_type ();
4034 if (!type)
4035 return false;
4036 /* Consider a binary operation size_t + int. The analyzer wraps the int in
4037 an unaryop_svalue, converting it to a size_t, but in the dynamic execution
4038 the result is smaller than the first operand. Thus, we have to look if
4039 the argument of the unaryop_svalue is also positive. */
4040 if (const unaryop_svalue *un_op = dyn_cast <const unaryop_svalue *> (sval))
4041 return CONVERT_EXPR_CODE_P (un_op->get_op ()) && TYPE_UNSIGNED (type)
4042 && is_positive_svalue (un_op->get_arg ());
4043 return TYPE_UNSIGNED (type);
4044}
4045
4046/* Return true if A is definitely larger than B.
4047
4048 Limitation: does not account for integer overflows and does not try to
4049 return false, so it can not be used negated. */
4050
4051tristate
4052region_model::symbolic_greater_than (const binop_svalue *bin_a,
4053 const svalue *b) const
4054{
4055 if (bin_a->get_op () == PLUS_EXPR || bin_a->get_op () == MULT_EXPR)
4056 {
4057 /* Eliminate the right-hand side of both svalues. */
4058 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
4059 if (bin_a->get_op () == bin_b->get_op ()
9bbcee45
DM
4060 && eval_condition (bin_a->get_arg1 (),
4061 GT_EXPR,
4062 bin_b->get_arg1 ()).is_true ()
4063 && eval_condition (bin_a->get_arg0 (),
4064 GE_EXPR,
4065 bin_b->get_arg0 ()).is_true ())
7a6564c9
TL
4066 return tristate (tristate::TS_TRUE);
4067
4068 /* Otherwise, try to remove a positive offset or factor from BIN_A. */
4069 if (is_positive_svalue (bin_a->get_arg1 ())
9bbcee45
DM
4070 && eval_condition (bin_a->get_arg0 (),
4071 GE_EXPR, b).is_true ())
7a6564c9
TL
4072 return tristate (tristate::TS_TRUE);
4073 }
4074 return tristate::unknown ();
4075}
4076
4077/* Return true if A and B are equal structurally.
4078
4079 Structural equality means that A and B are equal if the svalues A and B have
4080 the same nodes at the same positions in the tree and the leafs are equal.
4081 Equality for conjured_svalues and initial_svalues is determined by comparing
4082 the pointers while constants are compared by value. That behavior is useful
4083 to check for binaryop_svlaues that evaluate to the same concrete value but
4084 might use one operand with a different type but the same constant value.
4085
4086 For example,
4087 binop_svalue (mult_expr,
4088 initial_svalue (‘size_t’, decl_region (..., 'some_var')),
4089 constant_svalue (‘size_t’, 4))
4090 and
4091 binop_svalue (mult_expr,
4092 initial_svalue (‘size_t’, decl_region (..., 'some_var'),
4093 constant_svalue (‘sizetype’, 4))
4094 are structurally equal. A concrete C code example, where this occurs, can
4095 be found in test7 of out-of-bounds-5.c. */
4096
4097tristate
4098region_model::structural_equality (const svalue *a, const svalue *b) const
4099{
4100 /* If A and B are referentially equal, they are also structurally equal. */
4101 if (a == b)
4102 return tristate (tristate::TS_TRUE);
4103
4104 switch (a->get_kind ())
4105 {
4106 default:
4107 return tristate::unknown ();
4108 /* SK_CONJURED and SK_INITIAL are already handled
4109 by the referential equality above. */
4110 case SK_CONSTANT:
4111 {
4112 tree a_cst = a->maybe_get_constant ();
4113 tree b_cst = b->maybe_get_constant ();
4114 if (a_cst && b_cst)
4115 return tristate (tree_int_cst_equal (a_cst, b_cst));
4116 }
4117 return tristate (tristate::TS_FALSE);
4118 case SK_UNARYOP:
4119 {
4120 const unaryop_svalue *un_a = as_a <const unaryop_svalue *> (a);
4121 if (const unaryop_svalue *un_b = dyn_cast <const unaryop_svalue *> (b))
4122 return tristate (pending_diagnostic::same_tree_p (un_a->get_type (),
4123 un_b->get_type ())
4124 && un_a->get_op () == un_b->get_op ()
4125 && structural_equality (un_a->get_arg (),
4126 un_b->get_arg ()));
4127 }
4128 return tristate (tristate::TS_FALSE);
4129 case SK_BINOP:
4130 {
4131 const binop_svalue *bin_a = as_a <const binop_svalue *> (a);
4132 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
4133 return tristate (bin_a->get_op () == bin_b->get_op ()
4134 && structural_equality (bin_a->get_arg0 (),
4135 bin_b->get_arg0 ())
4136 && structural_equality (bin_a->get_arg1 (),
4137 bin_b->get_arg1 ()));
4138 }
4139 return tristate (tristate::TS_FALSE);
4140 }
4141}
4142
48e8a7a6
DM
4143/* Handle various constraints of the form:
4144 LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
4145 OP : == or !=
4146 RHS: zero
4147 and (with a cast):
4148 LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
4149 OP : == or !=
4150 RHS: zero
4151 by adding constraints for INNER_LHS INNEROP INNER_RHS.
4152
4153 Return true if this function can fully handle the constraint; if
4154 so, add the implied constraint(s) and write true to *OUT if they
4155 are consistent with existing constraints, or write false to *OUT
4156 if they contradicts existing constraints.
4157
4158 Return false for cases that this function doeesn't know how to handle.
4159
4160 For example, if we're checking a stored conditional, we'll have
4161 something like:
4162 LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
4163 OP : NE_EXPR
4164 RHS: zero
4165 which this function can turn into an add_constraint of:
4166 (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
4167
4168 Similarly, optimized && and || conditionals lead to e.g.
4169 if (p && q)
4170 becoming gimple like this:
4171 _1 = p_6 == 0B;
4172 _2 = q_8 == 0B
4173 _3 = _1 | _2
4174 On the "_3 is false" branch we can have constraints of the form:
4175 ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
4176 | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
4177 == 0
4178 which implies that both _1 and _2 are false,
4179 which this function can turn into a pair of add_constraints of
4180 (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
4181 and:
4182 (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
4183
4184bool
4185region_model::add_constraints_from_binop (const svalue *outer_lhs,
4186 enum tree_code outer_op,
4187 const svalue *outer_rhs,
4188 bool *out,
4189 region_model_context *ctxt)
4190{
4191 while (const svalue *cast = outer_lhs->maybe_undo_cast ())
4192 outer_lhs = cast;
4193 const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
4194 if (!binop_sval)
4195 return false;
4196 if (!outer_rhs->all_zeroes_p ())
4197 return false;
4198
4199 const svalue *inner_lhs = binop_sval->get_arg0 ();
4200 enum tree_code inner_op = binop_sval->get_op ();
4201 const svalue *inner_rhs = binop_sval->get_arg1 ();
4202
4203 if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
4204 return false;
4205
4206 /* We have either
4207 - "OUTER_LHS != false" (i.e. OUTER is true), or
4208 - "OUTER_LHS == false" (i.e. OUTER is false). */
4209 bool is_true = outer_op == NE_EXPR;
4210
4211 switch (inner_op)
4212 {
4213 default:
4214 return false;
4215
4216 case EQ_EXPR:
4217 case NE_EXPR:
4218 {
4219 /* ...and "(inner_lhs OP inner_rhs) == 0"
4220 then (inner_lhs OP inner_rhs) must have the same
4221 logical value as LHS. */
4222 if (!is_true)
4223 inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
4224 *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
4225 return true;
4226 }
4227 break;
4228
4229 case BIT_AND_EXPR:
4230 if (is_true)
4231 {
4232 /* ...and "(inner_lhs & inner_rhs) != 0"
4233 then both inner_lhs and inner_rhs must be true. */
4234 const svalue *false_sval
4235 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
4236 bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
4237 bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
4238 *out = sat1 && sat2;
4239 return true;
4240 }
4241 return false;
4242
4243 case BIT_IOR_EXPR:
4244 if (!is_true)
4245 {
4246 /* ...and "(inner_lhs | inner_rhs) == 0"
4247 i.e. "(inner_lhs | inner_rhs)" is false
4248 then both inner_lhs and inner_rhs must be false. */
4249 const svalue *false_sval
4250 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
4251 bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
4252 bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
4253 *out = sat1 && sat2;
4254 return true;
4255 }
4256 return false;
4257 }
4258}
4259
757bf1df
DM
4260/* Attempt to add the constraint "LHS OP RHS" to this region_model.
4261 If it is consistent with existing constraints, add it, and return true.
4262 Return false if it contradicts existing constraints.
4263 Use CTXT for reporting any diagnostics associated with the accesses. */
4264
4265bool
4266region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
4267 region_model_context *ctxt)
4268{
e978955d
DM
4269 /* For now, make no attempt to capture constraints on floating-point
4270 values. */
4271 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
4272 return true;
4273
808f4dfe
DM
4274 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
4275 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
757bf1df 4276
48e8a7a6
DM
4277 return add_constraint (lhs_sval, op, rhs_sval, ctxt);
4278}
4279
4280/* Attempt to add the constraint "LHS OP RHS" to this region_model.
4281 If it is consistent with existing constraints, add it, and return true.
4282 Return false if it contradicts existing constraints.
4283 Use CTXT for reporting any diagnostics associated with the accesses. */
4284
4285bool
4286region_model::add_constraint (const svalue *lhs,
4287 enum tree_code op,
4288 const svalue *rhs,
4289 region_model_context *ctxt)
4290{
4291 tristate t_cond = eval_condition (lhs, op, rhs);
757bf1df
DM
4292
4293 /* If we already have the condition, do nothing. */
4294 if (t_cond.is_true ())
4295 return true;
4296
4297 /* Reject a constraint that would contradict existing knowledge, as
4298 unsatisfiable. */
4299 if (t_cond.is_false ())
4300 return false;
4301
48e8a7a6
DM
4302 bool out;
4303 if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
4304 return out;
757bf1df 4305
c4b8f373
DM
4306 /* Attempt to store the constraint. */
4307 if (!m_constraints->add_constraint (lhs, op, rhs))
4308 return false;
757bf1df
DM
4309
4310 /* Notify the context, if any. This exists so that the state machines
4311 in a program_state can be notified about the condition, and so can
4312 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
4313 when synthesizing constraints as above. */
4314 if (ctxt)
4315 ctxt->on_condition (lhs, op, rhs);
4316
9a2c9579
DM
4317 /* If we have &REGION == NULL, then drop dynamic extents for REGION (for
4318 the case where REGION is heap-allocated and thus could be NULL). */
48e8a7a6
DM
4319 if (tree rhs_cst = rhs->maybe_get_constant ())
4320 if (op == EQ_EXPR && zerop (rhs_cst))
4321 if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
4322 unset_dynamic_extents (region_sval->get_pointee ());
9a2c9579 4323
757bf1df
DM
4324 return true;
4325}
4326
84fb3546
DM
4327/* As above, but when returning false, if OUT is non-NULL, write a
4328 new rejected_constraint to *OUT. */
4329
4330bool
4331region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
4332 region_model_context *ctxt,
4333 rejected_constraint **out)
4334{
4335 bool sat = add_constraint (lhs, op, rhs, ctxt);
4336 if (!sat && out)
8ca7fa84 4337 *out = new rejected_op_constraint (*this, lhs, op, rhs);
84fb3546
DM
4338 return sat;
4339}
4340
757bf1df
DM
4341/* Determine what is known about the condition "LHS OP RHS" within
4342 this model.
4343 Use CTXT for reporting any diagnostics associated with the accesses. */
4344
4345tristate
4346region_model::eval_condition (tree lhs,
4347 enum tree_code op,
4348 tree rhs,
5c6546ca 4349 region_model_context *ctxt) const
757bf1df 4350{
e978955d
DM
4351 /* For now, make no attempt to model constraints on floating-point
4352 values. */
4353 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
4354 return tristate::unknown ();
4355
757bf1df
DM
4356 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
4357}
4358
467a4820
DM
4359/* Implementation of region_model::get_representative_path_var.
4360 Attempt to return a path_var that represents SVAL, or return NULL_TREE.
808f4dfe
DM
4361 Use VISITED to prevent infinite mutual recursion with the overload for
4362 regions. */
757bf1df 4363
808f4dfe 4364path_var
467a4820
DM
4365region_model::get_representative_path_var_1 (const svalue *sval,
4366 svalue_set *visited) const
757bf1df 4367{
467a4820 4368 gcc_assert (sval);
757bf1df 4369
808f4dfe
DM
4370 /* Prevent infinite recursion. */
4371 if (visited->contains (sval))
0e466e97
DM
4372 {
4373 if (sval->get_kind () == SK_CONSTANT)
4374 return path_var (sval->maybe_get_constant (), 0);
4375 else
4376 return path_var (NULL_TREE, 0);
4377 }
808f4dfe 4378 visited->add (sval);
757bf1df 4379
467a4820
DM
4380 /* Handle casts by recursion into get_representative_path_var. */
4381 if (const svalue *cast_sval = sval->maybe_undo_cast ())
4382 {
4383 path_var result = get_representative_path_var (cast_sval, visited);
4384 tree orig_type = sval->get_type ();
4385 /* If necessary, wrap the result in a cast. */
4386 if (result.m_tree && orig_type)
4387 result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
4388 return result;
4389 }
4390
808f4dfe
DM
4391 auto_vec<path_var> pvs;
4392 m_store.get_representative_path_vars (this, visited, sval, &pvs);
757bf1df 4393
808f4dfe
DM
4394 if (tree cst = sval->maybe_get_constant ())
4395 pvs.safe_push (path_var (cst, 0));
757bf1df 4396
90f7c300 4397 /* Handle string literals and various other pointers. */
808f4dfe
DM
4398 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
4399 {
4400 const region *reg = ptr_sval->get_pointee ();
4401 if (path_var pv = get_representative_path_var (reg, visited))
4402 return path_var (build1 (ADDR_EXPR,
467a4820 4403 sval->get_type (),
808f4dfe
DM
4404 pv.m_tree),
4405 pv.m_stack_depth);
4406 }
4407
4408 /* If we have a sub_svalue, look for ways to represent the parent. */
4409 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
90f7c300 4410 {
808f4dfe
DM
4411 const svalue *parent_sval = sub_sval->get_parent ();
4412 const region *subreg = sub_sval->get_subregion ();
4413 if (path_var parent_pv
4414 = get_representative_path_var (parent_sval, visited))
4415 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
4416 return path_var (build3 (COMPONENT_REF,
4417 sval->get_type (),
4418 parent_pv.m_tree,
4419 field_reg->get_field (),
4420 NULL_TREE),
4421 parent_pv.m_stack_depth);
90f7c300
DM
4422 }
4423
b9365b93
DM
4424 /* Handle binops. */
4425 if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
4426 if (path_var lhs_pv
4427 = get_representative_path_var (binop_sval->get_arg0 (), visited))
4428 if (path_var rhs_pv
4429 = get_representative_path_var (binop_sval->get_arg1 (), visited))
4430 return path_var (build2 (binop_sval->get_op (),
4431 sval->get_type (),
4432 lhs_pv.m_tree, rhs_pv.m_tree),
4433 lhs_pv.m_stack_depth);
4434
808f4dfe
DM
4435 if (pvs.length () < 1)
4436 return path_var (NULL_TREE, 0);
4437
4438 pvs.qsort (readability_comparator);
4439 return pvs[0];
757bf1df
DM
4440}
4441
467a4820
DM
4442/* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
4443 Use VISITED to prevent infinite mutual recursion with the overload for
4444 regions
4445
4446 This function defers to get_representative_path_var_1 to do the work;
4447 it adds verification that get_representative_path_var_1 returned a tree
4448 of the correct type. */
4449
4450path_var
4451region_model::get_representative_path_var (const svalue *sval,
4452 svalue_set *visited) const
4453{
4454 if (sval == NULL)
4455 return path_var (NULL_TREE, 0);
4456
4457 tree orig_type = sval->get_type ();
4458
4459 path_var result = get_representative_path_var_1 (sval, visited);
4460
4461 /* Verify that the result has the same type as SVAL, if any. */
4462 if (result.m_tree && orig_type)
4463 gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
4464
4465 return result;
4466}
4467
4468/* Attempt to return a tree that represents SVAL, or return NULL_TREE.
4469
4470 Strip off any top-level cast, to avoid messages like
4471 double-free of '(void *)ptr'
4472 from analyzer diagnostics. */
757bf1df 4473
808f4dfe
DM
4474tree
4475region_model::get_representative_tree (const svalue *sval) const
757bf1df 4476{
808f4dfe 4477 svalue_set visited;
467a4820
DM
4478 tree expr = get_representative_path_var (sval, &visited).m_tree;
4479
4480 /* Strip off any top-level cast. */
7e3b45be
TL
4481 if (expr && TREE_CODE (expr) == NOP_EXPR)
4482 expr = TREE_OPERAND (expr, 0);
4483
4484 return fixup_tree_for_diagnostic (expr);
4485}
4486
4487tree
4488region_model::get_representative_tree (const region *reg) const
4489{
4490 svalue_set visited;
4491 tree expr = get_representative_path_var (reg, &visited).m_tree;
4492
4493 /* Strip off any top-level cast. */
467a4820 4494 if (expr && TREE_CODE (expr) == NOP_EXPR)
e4bb1bd6 4495 expr = TREE_OPERAND (expr, 0);
467a4820 4496
e4bb1bd6 4497 return fixup_tree_for_diagnostic (expr);
808f4dfe
DM
4498}
4499
467a4820
DM
4500/* Implementation of region_model::get_representative_path_var.
4501
4502 Attempt to return a path_var that represents REG, or return
808f4dfe
DM
4503 the NULL path_var.
4504 For example, a region for a field of a local would be a path_var
4505 wrapping a COMPONENT_REF.
4506 Use VISITED to prevent infinite mutual recursion with the overload for
4507 svalues. */
757bf1df 4508
808f4dfe 4509path_var
467a4820
DM
4510region_model::get_representative_path_var_1 (const region *reg,
4511 svalue_set *visited) const
808f4dfe
DM
4512{
4513 switch (reg->get_kind ())
757bf1df 4514 {
808f4dfe
DM
4515 default:
4516 gcc_unreachable ();
e516294a 4517
808f4dfe
DM
4518 case RK_FRAME:
4519 case RK_GLOBALS:
4520 case RK_CODE:
4521 case RK_HEAP:
4522 case RK_STACK:
358dab90 4523 case RK_THREAD_LOCAL:
808f4dfe
DM
4524 case RK_ROOT:
4525 /* Regions that represent memory spaces are not expressible as trees. */
4526 return path_var (NULL_TREE, 0);
757bf1df 4527
808f4dfe 4528 case RK_FUNCTION:
884d9141 4529 {
808f4dfe
DM
4530 const function_region *function_reg
4531 = as_a <const function_region *> (reg);
4532 return path_var (function_reg->get_fndecl (), 0);
884d9141 4533 }
808f4dfe 4534 case RK_LABEL:
9e78634c
DM
4535 {
4536 const label_region *label_reg = as_a <const label_region *> (reg);
4537 return path_var (label_reg->get_label (), 0);
4538 }
90f7c300 4539
808f4dfe
DM
4540 case RK_SYMBOLIC:
4541 {
4542 const symbolic_region *symbolic_reg
4543 = as_a <const symbolic_region *> (reg);
4544 const svalue *pointer = symbolic_reg->get_pointer ();
4545 path_var pointer_pv = get_representative_path_var (pointer, visited);
4546 if (!pointer_pv)
4547 return path_var (NULL_TREE, 0);
4548 tree offset = build_int_cst (pointer->get_type (), 0);
4549 return path_var (build2 (MEM_REF,
4550 reg->get_type (),
4551 pointer_pv.m_tree,
4552 offset),
4553 pointer_pv.m_stack_depth);
4554 }
4555 case RK_DECL:
4556 {
4557 const decl_region *decl_reg = as_a <const decl_region *> (reg);
4558 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
4559 }
4560 case RK_FIELD:
4561 {
4562 const field_region *field_reg = as_a <const field_region *> (reg);
4563 path_var parent_pv
4564 = get_representative_path_var (reg->get_parent_region (), visited);
4565 if (!parent_pv)
4566 return path_var (NULL_TREE, 0);
4567 return path_var (build3 (COMPONENT_REF,
4568 reg->get_type (),
4569 parent_pv.m_tree,
4570 field_reg->get_field (),
4571 NULL_TREE),
4572 parent_pv.m_stack_depth);
4573 }
757bf1df 4574
808f4dfe
DM
4575 case RK_ELEMENT:
4576 {
4577 const element_region *element_reg
4578 = as_a <const element_region *> (reg);
4579 path_var parent_pv
4580 = get_representative_path_var (reg->get_parent_region (), visited);
4581 if (!parent_pv)
4582 return path_var (NULL_TREE, 0);
4583 path_var index_pv
4584 = get_representative_path_var (element_reg->get_index (), visited);
4585 if (!index_pv)
4586 return path_var (NULL_TREE, 0);
4587 return path_var (build4 (ARRAY_REF,
4588 reg->get_type (),
4589 parent_pv.m_tree, index_pv.m_tree,
4590 NULL_TREE, NULL_TREE),
4591 parent_pv.m_stack_depth);
4592 }
757bf1df 4593
808f4dfe 4594 case RK_OFFSET:
757bf1df 4595 {
808f4dfe
DM
4596 const offset_region *offset_reg
4597 = as_a <const offset_region *> (reg);
4598 path_var parent_pv
4599 = get_representative_path_var (reg->get_parent_region (), visited);
4600 if (!parent_pv)
4601 return path_var (NULL_TREE, 0);
4602 path_var offset_pv
4603 = get_representative_path_var (offset_reg->get_byte_offset (),
4604 visited);
29f5db8e 4605 if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
808f4dfe 4606 return path_var (NULL_TREE, 0);
29f5db8e
DM
4607 tree addr_parent = build1 (ADDR_EXPR,
4608 build_pointer_type (reg->get_type ()),
4609 parent_pv.m_tree);
808f4dfe
DM
4610 return path_var (build2 (MEM_REF,
4611 reg->get_type (),
29f5db8e 4612 addr_parent, offset_pv.m_tree),
808f4dfe 4613 parent_pv.m_stack_depth);
757bf1df 4614 }
757bf1df 4615
e61ffa20
DM
4616 case RK_SIZED:
4617 return path_var (NULL_TREE, 0);
4618
808f4dfe
DM
4619 case RK_CAST:
4620 {
4621 path_var parent_pv
4622 = get_representative_path_var (reg->get_parent_region (), visited);
4623 if (!parent_pv)
4624 return path_var (NULL_TREE, 0);
4625 return path_var (build1 (NOP_EXPR,
4626 reg->get_type (),
4627 parent_pv.m_tree),
4628 parent_pv.m_stack_depth);
4629 }
757bf1df 4630
808f4dfe
DM
4631 case RK_HEAP_ALLOCATED:
4632 case RK_ALLOCA:
4633 /* No good way to express heap-allocated/alloca regions as trees. */
4634 return path_var (NULL_TREE, 0);
757bf1df 4635
808f4dfe
DM
4636 case RK_STRING:
4637 {
4638 const string_region *string_reg = as_a <const string_region *> (reg);
4639 return path_var (string_reg->get_string_cst (), 0);
4640 }
757bf1df 4641
2402dc6b 4642 case RK_VAR_ARG:
358dab90 4643 case RK_ERRNO:
808f4dfe
DM
4644 case RK_UNKNOWN:
4645 return path_var (NULL_TREE, 0);
4646 }
757bf1df
DM
4647}
4648
467a4820
DM
4649/* Attempt to return a path_var that represents REG, or return
4650 the NULL path_var.
4651 For example, a region for a field of a local would be a path_var
4652 wrapping a COMPONENT_REF.
4653 Use VISITED to prevent infinite mutual recursion with the overload for
4654 svalues.
4655
4656 This function defers to get_representative_path_var_1 to do the work;
4657 it adds verification that get_representative_path_var_1 returned a tree
4658 of the correct type. */
4659
4660path_var
4661region_model::get_representative_path_var (const region *reg,
4662 svalue_set *visited) const
4663{
4664 path_var result = get_representative_path_var_1 (reg, visited);
4665
4666 /* Verify that the result has the same type as REG, if any. */
4667 if (result.m_tree && reg->get_type ())
4668 gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
4669
4670 return result;
4671}
4672
757bf1df
DM
4673/* Update this model for any phis in SNODE, assuming we came from
4674 LAST_CFG_SUPEREDGE. */
4675
4676void
4677region_model::update_for_phis (const supernode *snode,
4678 const cfg_superedge *last_cfg_superedge,
4679 region_model_context *ctxt)
4680{
4681 gcc_assert (last_cfg_superedge);
4682
e0a7a675
DM
4683 /* Copy this state and pass it to handle_phi so that all of the phi stmts
4684 are effectively handled simultaneously. */
4685 const region_model old_state (*this);
4686
757bf1df
DM
4687 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
4688 !gsi_end_p (gpi); gsi_next (&gpi))
4689 {
4690 gphi *phi = gpi.phi ();
4691
4692 tree src = last_cfg_superedge->get_phi_arg (phi);
4693 tree lhs = gimple_phi_result (phi);
4694
e0a7a675
DM
4695 /* Update next_state based on phi and old_state. */
4696 handle_phi (phi, lhs, src, old_state, ctxt);
757bf1df
DM
4697 }
4698}
4699
4700/* Attempt to update this model for taking EDGE (where the last statement
4701 was LAST_STMT), returning true if the edge can be taken, false
4702 otherwise.
84fb3546
DM
4703 When returning false, if OUT is non-NULL, write a new rejected_constraint
4704 to it.
757bf1df
DM
4705
4706 For CFG superedges where LAST_STMT is a conditional or a switch
4707 statement, attempt to add the relevant conditions for EDGE to this
4708 model, returning true if they are feasible, or false if they are
4709 impossible.
4710
4711 For call superedges, push frame information and store arguments
4712 into parameters.
4713
4714 For return superedges, pop frame information and store return
4715 values into any lhs.
4716
4717 Rejection of call/return superedges happens elsewhere, in
4718 program_point::on_edge (i.e. based on program point, rather
4719 than program state). */
4720
4721bool
4722region_model::maybe_update_for_edge (const superedge &edge,
4723 const gimple *last_stmt,
84fb3546
DM
4724 region_model_context *ctxt,
4725 rejected_constraint **out)
757bf1df
DM
4726{
4727 /* Handle frame updates for interprocedural edges. */
4728 switch (edge.m_kind)
4729 {
4730 default:
4731 break;
4732
4733 case SUPEREDGE_CALL:
4734 {
4735 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
4736 update_for_call_superedge (*call_edge, ctxt);
4737 }
4738 break;
4739
4740 case SUPEREDGE_RETURN:
4741 {
4742 const return_superedge *return_edge
4743 = as_a <const return_superedge *> (&edge);
4744 update_for_return_superedge (*return_edge, ctxt);
4745 }
4746 break;
4747
4748 case SUPEREDGE_INTRAPROCEDURAL_CALL:
bfca9505
DM
4749 /* This is a no-op for call summaries; we should already
4750 have handled the effect of the call summary at the call stmt. */
757bf1df
DM
4751 break;
4752 }
4753
4754 if (last_stmt == NULL)
4755 return true;
4756
4757 /* Apply any constraints for conditionals/switch statements. */
4758
4759 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
4760 {
4761 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
84fb3546 4762 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out);
757bf1df
DM
4763 }
4764
4765 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
4766 {
4767 const switch_cfg_superedge *switch_sedge
4768 = as_a <const switch_cfg_superedge *> (&edge);
84fb3546
DM
4769 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt,
4770 ctxt, out);
757bf1df
DM
4771 }
4772
1690a839
DM
4773 /* Apply any constraints due to an exception being thrown. */
4774 if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge))
4775 if (cfg_sedge->get_flags () & EDGE_EH)
84fb3546 4776 return apply_constraints_for_exception (last_stmt, ctxt, out);
1690a839 4777
757bf1df
DM
4778 return true;
4779}
4780
4781/* Push a new frame_region on to the stack region.
4782 Populate the frame_region with child regions for the function call's
4783 parameters, using values from the arguments at the callsite in the
4784 caller's frame. */
4785
4786void
aef703cf 4787region_model::update_for_gcall (const gcall *call_stmt,
e92d0ff6
AS
4788 region_model_context *ctxt,
4789 function *callee)
757bf1df 4790{
808f4dfe 4791 /* Build a vec of argument svalues, using the current top
757bf1df 4792 frame for resolving tree expressions. */
808f4dfe 4793 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
757bf1df
DM
4794
4795 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
4796 {
4797 tree arg = gimple_call_arg (call_stmt, i);
808f4dfe 4798 arg_svals.quick_push (get_rvalue (arg, ctxt));
757bf1df
DM
4799 }
4800
e92d0ff6
AS
4801 if(!callee)
4802 {
4803 /* Get the function * from the gcall. */
4804 tree fn_decl = get_fndecl_for_call (call_stmt,ctxt);
4805 callee = DECL_STRUCT_FUNCTION (fn_decl);
4806 }
4807
4808 push_frame (callee, &arg_svals, ctxt);
757bf1df
DM
4809}
4810
a96f1c38
DM
4811/* Pop the top-most frame_region from the stack, and copy the return
4812 region's values (if any) into the region for the lvalue of the LHS of
757bf1df 4813 the call (if any). */
aef703cf 4814
757bf1df 4815void
aef703cf
AS
4816region_model::update_for_return_gcall (const gcall *call_stmt,
4817 region_model_context *ctxt)
757bf1df 4818{
4cebae09
DM
4819 /* Get the lvalue for the result of the call, passing it to pop_frame,
4820 so that pop_frame can determine the region with respect to the
4821 *caller* frame. */
757bf1df 4822 tree lhs = gimple_call_lhs (call_stmt);
4cebae09 4823 pop_frame (lhs, NULL, ctxt);
757bf1df
DM
4824}
4825
aef703cf
AS
4826/* Extract calling information from the superedge and update the model for the
4827 call */
4828
4829void
4830region_model::update_for_call_superedge (const call_superedge &call_edge,
4831 region_model_context *ctxt)
4832{
4833 const gcall *call_stmt = call_edge.get_call_stmt ();
e92d0ff6 4834 update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ());
aef703cf
AS
4835}
4836
4837/* Extract calling information from the return superedge and update the model
4838 for the returning call */
4839
4840void
4841region_model::update_for_return_superedge (const return_superedge &return_edge,
4842 region_model_context *ctxt)
4843{
4844 const gcall *call_stmt = return_edge.get_call_stmt ();
4845 update_for_return_gcall (call_stmt, ctxt);
4846}
4847
bfca9505
DM
4848/* Attempt to to use R to replay SUMMARY into this object.
4849 Return true if it is possible. */
757bf1df 4850
bfca9505
DM
4851bool
4852region_model::replay_call_summary (call_summary_replay &r,
4853 const region_model &summary)
757bf1df 4854{
bfca9505
DM
4855 gcc_assert (summary.get_stack_depth () == 1);
4856
4857 m_store.replay_call_summary (r, summary.m_store);
757bf1df 4858
bfca9505
DM
4859 if (!m_constraints->replay_call_summary (r, *summary.m_constraints))
4860 return false;
4861
4862 for (auto kv : summary.m_dynamic_extents)
4863 {
4864 const region *summary_reg = kv.first;
4865 const region *caller_reg = r.convert_region_from_summary (summary_reg);
4866 if (!caller_reg)
4867 continue;
4868 const svalue *summary_sval = kv.second;
4869 const svalue *caller_sval = r.convert_svalue_from_summary (summary_sval);
4870 if (!caller_sval)
4871 continue;
4872 m_dynamic_extents.put (caller_reg, caller_sval);
4873 }
4874
4875 return true;
757bf1df
DM
4876}
4877
4878/* Given a true or false edge guarded by conditional statement COND_STMT,
4879 determine appropriate constraints for the edge to be taken.
4880
4881 If they are feasible, add the constraints and return true.
4882
4883 Return false if the constraints contradict existing knowledge
84fb3546
DM
4884 (and so the edge should not be taken).
4885 When returning false, if OUT is non-NULL, write a new rejected_constraint
4886 to it. */
757bf1df
DM
4887
4888bool
4889region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
4890 const gcond *cond_stmt,
84fb3546
DM
4891 region_model_context *ctxt,
4892 rejected_constraint **out)
757bf1df
DM
4893{
4894 ::edge cfg_edge = sedge.get_cfg_edge ();
4895 gcc_assert (cfg_edge != NULL);
4896 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
4897
4898 enum tree_code op = gimple_cond_code (cond_stmt);
4899 tree lhs = gimple_cond_lhs (cond_stmt);
4900 tree rhs = gimple_cond_rhs (cond_stmt);
4901 if (cfg_edge->flags & EDGE_FALSE_VALUE)
4902 op = invert_tree_comparison (op, false /* honor_nans */);
84fb3546 4903 return add_constraint (lhs, op, rhs, ctxt, out);
757bf1df
DM
4904}
4905
ccd4df81
DM
4906/* Return true iff SWITCH_STMT has a non-default label that contains
4907 INT_CST. */
4908
4909static bool
4910has_nondefault_case_for_value_p (const gswitch *switch_stmt, tree int_cst)
4911{
4912 /* We expect the initial label to be the default; skip it. */
4913 gcc_assert (CASE_LOW (gimple_switch_label (switch_stmt, 0)) == NULL);
4914 unsigned min_idx = 1;
4915 unsigned max_idx = gimple_switch_num_labels (switch_stmt) - 1;
4916
4917 /* Binary search: try to find the label containing INT_CST.
4918 This requires the cases to be sorted by CASE_LOW (done by the
4919 gimplifier). */
4920 while (max_idx >= min_idx)
4921 {
4922 unsigned case_idx = (min_idx + max_idx) / 2;
4923 tree label = gimple_switch_label (switch_stmt, case_idx);
4924 tree low = CASE_LOW (label);
4925 gcc_assert (low);
4926 tree high = CASE_HIGH (label);
4927 if (!high)
4928 high = low;
4929 if (tree_int_cst_compare (int_cst, low) < 0)
4930 {
4931 /* INT_CST is below the range of this label. */
4932 gcc_assert (case_idx > 0);
4933 max_idx = case_idx - 1;
4934 }
4935 else if (tree_int_cst_compare (int_cst, high) > 0)
4936 {
4937 /* INT_CST is above the range of this case. */
4938 min_idx = case_idx + 1;
4939 }
4940 else
4941 /* This case contains INT_CST. */
4942 return true;
4943 }
4944 /* Not found. */
4945 return false;
4946}
4947
4948/* Return true iff SWITCH_STMT (which must be on an enum value)
4949 has nondefault cases handling all values in the enum. */
4950
4951static bool
4952has_nondefault_cases_for_all_enum_values_p (const gswitch *switch_stmt)
4953{
4954 gcc_assert (switch_stmt);
4955 tree type = TREE_TYPE (gimple_switch_index (switch_stmt));
4956 gcc_assert (TREE_CODE (type) == ENUMERAL_TYPE);
4957
4958 for (tree enum_val_iter = TYPE_VALUES (type);
4959 enum_val_iter;
4960 enum_val_iter = TREE_CHAIN (enum_val_iter))
4961 {
4962 tree enum_val = TREE_VALUE (enum_val_iter);
4963 gcc_assert (TREE_CODE (enum_val) == CONST_DECL);
4964 gcc_assert (TREE_CODE (DECL_INITIAL (enum_val)) == INTEGER_CST);
4965 if (!has_nondefault_case_for_value_p (switch_stmt,
4966 DECL_INITIAL (enum_val)))
4967 return false;
4968 }
4969 return true;
4970}
4971
757bf1df
DM
4972/* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
4973 for the edge to be taken.
4974
4975 If they are feasible, add the constraints and return true.
4976
4977 Return false if the constraints contradict existing knowledge
84fb3546
DM
4978 (and so the edge should not be taken).
4979 When returning false, if OUT is non-NULL, write a new rejected_constraint
4980 to it. */
757bf1df
DM
4981
4982bool
4983region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
4984 const gswitch *switch_stmt,
84fb3546
DM
4985 region_model_context *ctxt,
4986 rejected_constraint **out)
757bf1df 4987{
ccd4df81
DM
4988 tree index = gimple_switch_index (switch_stmt);
4989 const svalue *index_sval = get_rvalue (index, ctxt);
4990
4991 /* If we're switching based on an enum type, assume that the user is only
4992 working with values from the enum. Hence if this is an
4993 implicitly-created "default", assume it doesn't get followed.
4994 This fixes numerous "uninitialized" false positives where we otherwise
4995 consider jumping past the initialization cases. */
4996
4997 if (/* Don't check during feasibility-checking (when ctxt is NULL). */
4998 ctxt
4999 /* Must be an enum value. */
5000 && index_sval->get_type ()
5001 && TREE_CODE (TREE_TYPE (index)) == ENUMERAL_TYPE
5002 && TREE_CODE (index_sval->get_type ()) == ENUMERAL_TYPE
5003 /* If we have a constant, then we can check it directly. */
5004 && index_sval->get_kind () != SK_CONSTANT
5005 && edge.implicitly_created_default_p ()
5006 && has_nondefault_cases_for_all_enum_values_p (switch_stmt)
5007 /* Don't do this if there's a chance that the index is
5008 attacker-controlled. */
5009 && !ctxt->possibly_tainted_p (index_sval))
5010 {
5011 if (out)
5012 *out = new rejected_default_case (*this);
5013 return false;
5014 }
5015
8ca7fa84
DM
5016 bounded_ranges_manager *ranges_mgr = get_range_manager ();
5017 const bounded_ranges *all_cases_ranges
5018 = ranges_mgr->get_or_create_ranges_for_switch (&edge, switch_stmt);
8ca7fa84
DM
5019 bool sat = m_constraints->add_bounded_ranges (index_sval, all_cases_ranges);
5020 if (!sat && out)
5021 *out = new rejected_ranges_constraint (*this, index, all_cases_ranges);
2c044ff1
DM
5022 if (sat && ctxt && !all_cases_ranges->empty_p ())
5023 ctxt->on_bounded_ranges (*index_sval, *all_cases_ranges);
8ca7fa84 5024 return sat;
757bf1df
DM
5025}
5026
1690a839
DM
5027/* Apply any constraints due to an exception being thrown at LAST_STMT.
5028
5029 If they are feasible, add the constraints and return true.
5030
5031 Return false if the constraints contradict existing knowledge
84fb3546
DM
5032 (and so the edge should not be taken).
5033 When returning false, if OUT is non-NULL, write a new rejected_constraint
5034 to it. */
1690a839
DM
5035
5036bool
5037region_model::apply_constraints_for_exception (const gimple *last_stmt,
84fb3546
DM
5038 region_model_context *ctxt,
5039 rejected_constraint **out)
1690a839
DM
5040{
5041 gcc_assert (last_stmt);
5042 if (const gcall *call = dyn_cast <const gcall *> (last_stmt))
5043 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
5044 if (is_named_call_p (callee_fndecl, "operator new", call, 1)
5045 || is_named_call_p (callee_fndecl, "operator new []", call, 1))
5046 {
5047 /* We have an exception thrown from operator new.
5048 Add a constraint that the result was NULL, to avoid a false
5049 leak report due to the result being lost when following
5050 the EH edge. */
5051 if (tree lhs = gimple_call_lhs (call))
84fb3546 5052 return add_constraint (lhs, EQ_EXPR, null_pointer_node, ctxt, out);
1690a839
DM
5053 return true;
5054 }
5055 return true;
5056}
5057
808f4dfe
DM
5058/* For use with push_frame when handling a top-level call within the analysis.
5059 PARAM has a defined but unknown initial value.
5060 Anything it points to has escaped, since the calling context "knows"
5061 the pointer, and thus calls to unknown functions could read/write into
dcfc7ac9
DM
5062 the region.
5063 If NONNULL is true, then assume that PARAM must be non-NULL. */
757bf1df
DM
5064
5065void
808f4dfe 5066region_model::on_top_level_param (tree param,
dcfc7ac9
DM
5067 bool nonnull,
5068 region_model_context *ctxt)
757bf1df 5069{
808f4dfe 5070 if (POINTER_TYPE_P (TREE_TYPE (param)))
5eae0ac7 5071 {
808f4dfe
DM
5072 const region *param_reg = get_lvalue (param, ctxt);
5073 const svalue *init_ptr_sval
5074 = m_mgr->get_or_create_initial_value (param_reg);
5075 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
5076 m_store.mark_as_escaped (pointee_reg);
dcfc7ac9
DM
5077 if (nonnull)
5078 {
5079 const svalue *null_ptr_sval
5080 = m_mgr->get_or_create_null_ptr (TREE_TYPE (param));
5081 add_constraint (init_ptr_sval, NE_EXPR, null_ptr_sval, ctxt);
5082 }
5eae0ac7 5083 }
757bf1df
DM
5084}
5085
808f4dfe
DM
5086/* Update this region_model to reflect pushing a frame onto the stack
5087 for a call to FUN.
757bf1df 5088
808f4dfe
DM
5089 If ARG_SVALS is non-NULL, use it to populate the parameters
5090 in the new frame.
5091 Otherwise, the params have their initial_svalues.
757bf1df 5092
808f4dfe 5093 Return the frame_region for the new frame. */
757bf1df 5094
808f4dfe
DM
5095const region *
5096region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
5097 region_model_context *ctxt)
757bf1df 5098{
808f4dfe
DM
5099 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
5100 if (arg_svals)
757bf1df 5101 {
808f4dfe
DM
5102 /* Arguments supplied from a caller frame. */
5103 tree fndecl = fun->decl;
5104 unsigned idx = 0;
5105 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
5106 iter_parm = DECL_CHAIN (iter_parm), ++idx)
757bf1df 5107 {
808f4dfe
DM
5108 /* If there's a mismatching declaration, the call stmt might
5109 not have enough args. Handle this case by leaving the
5110 rest of the params as uninitialized. */
5111 if (idx >= arg_svals->length ())
5112 break;
294b6da2
DM
5113 tree parm_lval = iter_parm;
5114 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
5115 parm_lval = parm_default_ssa;
5116 const region *parm_reg = get_lvalue (parm_lval, ctxt);
808f4dfe 5117 const svalue *arg_sval = (*arg_svals)[idx];
808f4dfe 5118 set_value (parm_reg, arg_sval, ctxt);
757bf1df 5119 }
2402dc6b
DM
5120
5121 /* Handle any variadic args. */
5122 unsigned va_arg_idx = 0;
5123 for (; idx < arg_svals->length (); idx++, va_arg_idx++)
5124 {
5125 const svalue *arg_sval = (*arg_svals)[idx];
5126 const region *var_arg_reg
5127 = m_mgr->get_var_arg_region (m_current_frame,
5128 va_arg_idx);
5129 set_value (var_arg_reg, arg_sval, ctxt);
5130 }
757bf1df 5131 }
808f4dfe 5132 else
757bf1df 5133 {
808f4dfe
DM
5134 /* Otherwise we have a top-level call within the analysis. The params
5135 have defined but unknown initial values.
5136 Anything they point to has escaped. */
5137 tree fndecl = fun->decl;
dcfc7ac9
DM
5138
5139 /* Handle "__attribute__((nonnull))". */
5140 tree fntype = TREE_TYPE (fndecl);
5141 bitmap nonnull_args = get_nonnull_args (fntype);
5142
5143 unsigned parm_idx = 0;
808f4dfe
DM
5144 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
5145 iter_parm = DECL_CHAIN (iter_parm))
757bf1df 5146 {
dcfc7ac9
DM
5147 bool non_null = (nonnull_args
5148 ? (bitmap_empty_p (nonnull_args)
5149 || bitmap_bit_p (nonnull_args, parm_idx))
5150 : false);
294b6da2 5151 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
dcfc7ac9 5152 on_top_level_param (parm_default_ssa, non_null, ctxt);
294b6da2 5153 else
dcfc7ac9
DM
5154 on_top_level_param (iter_parm, non_null, ctxt);
5155 parm_idx++;
757bf1df 5156 }
dcfc7ac9
DM
5157
5158 BITMAP_FREE (nonnull_args);
757bf1df 5159 }
757bf1df 5160
808f4dfe 5161 return m_current_frame;
757bf1df
DM
5162}
5163
808f4dfe
DM
5164/* Get the function of the top-most frame in this region_model's stack.
5165 There must be such a frame. */
757bf1df 5166
808f4dfe
DM
5167function *
5168region_model::get_current_function () const
757bf1df 5169{
808f4dfe
DM
5170 const frame_region *frame = get_current_frame ();
5171 gcc_assert (frame);
5172 return frame->get_function ();
757bf1df
DM
5173}
5174
808f4dfe 5175/* Pop the topmost frame_region from this region_model's stack;
757bf1df 5176
4cebae09
DM
5177 If RESULT_LVALUE is non-null, copy any return value from the frame
5178 into the corresponding region (evaluated with respect to the *caller*
5179 frame, rather than the called frame).
808f4dfe
DM
5180 If OUT_RESULT is non-null, copy any return value from the frame
5181 into *OUT_RESULT.
757bf1df 5182
430d7d88
DM
5183 If EVAL_RETURN_SVALUE is false, then don't evaluate the return value.
5184 This is for use when unwinding frames e.g. due to longjmp, to suppress
5185 erroneously reporting uninitialized return values.
5186
808f4dfe
DM
5187 Purge the frame region and all its descendent regions.
5188 Convert any pointers that point into such regions into
5189 POISON_KIND_POPPED_STACK svalues. */
757bf1df 5190
808f4dfe 5191void
4cebae09 5192region_model::pop_frame (tree result_lvalue,
808f4dfe 5193 const svalue **out_result,
430d7d88
DM
5194 region_model_context *ctxt,
5195 bool eval_return_svalue)
808f4dfe
DM
5196{
5197 gcc_assert (m_current_frame);
757bf1df 5198
808f4dfe 5199 const frame_region *frame_reg = m_current_frame;
5c6546ca
DM
5200
5201 /* Notify state machines. */
5202 if (ctxt)
5203 ctxt->on_pop_frame (frame_reg);
5204
5205 /* Evaluate the result, within the callee frame. */
808f4dfe
DM
5206 tree fndecl = m_current_frame->get_function ()->decl;
5207 tree result = DECL_RESULT (fndecl);
4cebae09 5208 const svalue *retval = NULL;
430d7d88
DM
5209 if (result
5210 && TREE_TYPE (result) != void_type_node
5211 && eval_return_svalue)
808f4dfe 5212 {
4cebae09 5213 retval = get_rvalue (result, ctxt);
808f4dfe 5214 if (out_result)
13ad6d9f 5215 *out_result = retval;
808f4dfe 5216 }
757bf1df 5217
808f4dfe
DM
5218 /* Pop the frame. */
5219 m_current_frame = m_current_frame->get_calling_frame ();
757bf1df 5220
4cebae09
DM
5221 if (result_lvalue && retval)
5222 {
430d7d88
DM
5223 gcc_assert (eval_return_svalue);
5224
4cebae09
DM
5225 /* Compute result_dst_reg using RESULT_LVALUE *after* popping
5226 the frame, but before poisoning pointers into the old frame. */
5227 const region *result_dst_reg = get_lvalue (result_lvalue, ctxt);
5228 set_value (result_dst_reg, retval, ctxt);
5229 }
5230
808f4dfe 5231 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
757bf1df
DM
5232}
5233
808f4dfe 5234/* Get the number of frames in this region_model's stack. */
757bf1df 5235
808f4dfe
DM
5236int
5237region_model::get_stack_depth () const
757bf1df 5238{
808f4dfe
DM
5239 const frame_region *frame = get_current_frame ();
5240 if (frame)
5241 return frame->get_stack_depth ();
5242 else
5243 return 0;
757bf1df
DM
5244}
5245
808f4dfe
DM
5246/* Get the frame_region with the given index within the stack.
5247 The frame_region must exist. */
757bf1df 5248
808f4dfe
DM
5249const frame_region *
5250region_model::get_frame_at_index (int index) const
757bf1df 5251{
808f4dfe
DM
5252 const frame_region *frame = get_current_frame ();
5253 gcc_assert (frame);
5254 gcc_assert (index >= 0);
5255 gcc_assert (index <= frame->get_index ());
5256 while (index != frame->get_index ())
5257 {
5258 frame = frame->get_calling_frame ();
5259 gcc_assert (frame);
5260 }
5261 return frame;
757bf1df
DM
5262}
5263
808f4dfe
DM
5264/* Unbind svalues for any regions in REG and below.
5265 Find any pointers to such regions; convert them to
9a2c9579
DM
5266 poisoned values of kind PKIND.
5267 Also purge any dynamic extents. */
757bf1df 5268
808f4dfe
DM
5269void
5270region_model::unbind_region_and_descendents (const region *reg,
5271 enum poison_kind pkind)
757bf1df 5272{
808f4dfe
DM
5273 /* Gather a set of base regions to be unbound. */
5274 hash_set<const region *> base_regs;
5275 for (store::cluster_map_t::iterator iter = m_store.begin ();
5276 iter != m_store.end (); ++iter)
757bf1df 5277 {
808f4dfe
DM
5278 const region *iter_base_reg = (*iter).first;
5279 if (iter_base_reg->descendent_of_p (reg))
5280 base_regs.add (iter_base_reg);
757bf1df 5281 }
808f4dfe
DM
5282 for (hash_set<const region *>::iterator iter = base_regs.begin ();
5283 iter != base_regs.end (); ++iter)
5284 m_store.purge_cluster (*iter);
757bf1df 5285
808f4dfe
DM
5286 /* Find any pointers to REG or its descendents; convert to poisoned. */
5287 poison_any_pointers_to_descendents (reg, pkind);
9a2c9579
DM
5288
5289 /* Purge dynamic extents of any base regions in REG and below
5290 (e.g. VLAs and alloca stack regions). */
5291 for (auto iter : m_dynamic_extents)
5292 {
5293 const region *iter_reg = iter.first;
5294 if (iter_reg->descendent_of_p (reg))
5295 unset_dynamic_extents (iter_reg);
5296 }
757bf1df
DM
5297}
5298
808f4dfe
DM
5299/* Implementation of BindingVisitor.
5300 Update the bound svalues for regions below REG to use poisoned
5301 values instead. */
757bf1df 5302
808f4dfe 5303struct bad_pointer_finder
757bf1df 5304{
808f4dfe
DM
5305 bad_pointer_finder (const region *reg, enum poison_kind pkind,
5306 region_model_manager *mgr)
5307 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
5308 {}
757bf1df 5309
808f4dfe
DM
5310 void on_binding (const binding_key *, const svalue *&sval)
5311 {
5312 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
5313 {
5314 const region *ptr_dst = ptr_sval->get_pointee ();
5315 /* Poison ptrs to descendents of REG, but not to REG itself,
5316 otherwise double-free detection doesn't work (since sm-state
5317 for "free" is stored on the original ptr svalue). */
5318 if (ptr_dst->descendent_of_p (m_reg)
5319 && ptr_dst != m_reg)
5320 {
5321 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
5322 sval->get_type ());
5323 ++m_count;
5324 }
5325 }
5326 }
757bf1df 5327
808f4dfe
DM
5328 const region *m_reg;
5329 enum poison_kind m_pkind;
5330 region_model_manager *const m_mgr;
5331 int m_count;
5332};
757bf1df 5333
808f4dfe
DM
5334/* Find any pointers to REG or its descendents; convert them to
5335 poisoned values of kind PKIND.
5336 Return the number of pointers that were poisoned. */
757bf1df 5337
808f4dfe
DM
5338int
5339region_model::poison_any_pointers_to_descendents (const region *reg,
5340 enum poison_kind pkind)
5341{
5342 bad_pointer_finder bv (reg, pkind, m_mgr);
5343 m_store.for_each_binding (bv);
5344 return bv.m_count;
757bf1df
DM
5345}
5346
808f4dfe
DM
5347/* Attempt to merge THIS with OTHER_MODEL, writing the result
5348 to OUT_MODEL. Use POINT to distinguish values created as a
5349 result of merging. */
757bf1df 5350
808f4dfe
DM
5351bool
5352region_model::can_merge_with_p (const region_model &other_model,
5353 const program_point &point,
f573d351
DM
5354 region_model *out_model,
5355 const extrinsic_state *ext_state,
5356 const program_state *state_a,
5357 const program_state *state_b) const
757bf1df 5358{
808f4dfe
DM
5359 gcc_assert (out_model);
5360 gcc_assert (m_mgr == other_model.m_mgr);
5361 gcc_assert (m_mgr == out_model->m_mgr);
757bf1df 5362
808f4dfe
DM
5363 if (m_current_frame != other_model.m_current_frame)
5364 return false;
5365 out_model->m_current_frame = m_current_frame;
757bf1df 5366
f573d351
DM
5367 model_merger m (this, &other_model, point, out_model,
5368 ext_state, state_a, state_b);
757bf1df 5369
808f4dfe
DM
5370 if (!store::can_merge_p (&m_store, &other_model.m_store,
5371 &out_model->m_store, m_mgr->get_store_manager (),
5372 &m))
5373 return false;
5374
9a2c9579
DM
5375 if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
5376 &out_model->m_dynamic_extents))
5377 return false;
5378
808f4dfe
DM
5379 /* Merge constraints. */
5380 constraint_manager::merge (*m_constraints,
5381 *other_model.m_constraints,
c710051a 5382 out_model->m_constraints);
757bf1df 5383
808f4dfe 5384 return true;
757bf1df
DM
5385}
5386
5387/* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
5388 otherwise. */
5389
5390tree
5391region_model::get_fndecl_for_call (const gcall *call,
5392 region_model_context *ctxt)
5393{
5394 tree fn_ptr = gimple_call_fn (call);
5395 if (fn_ptr == NULL_TREE)
5396 return NULL_TREE;
808f4dfe
DM
5397 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
5398 if (const region_svalue *fn_ptr_ptr
5399 = fn_ptr_sval->dyn_cast_region_svalue ())
757bf1df 5400 {
808f4dfe
DM
5401 const region *reg = fn_ptr_ptr->get_pointee ();
5402 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
757bf1df 5403 {
808f4dfe 5404 tree fn_decl = fn_reg->get_fndecl ();
0ba70d1b
DM
5405 cgraph_node *node = cgraph_node::get (fn_decl);
5406 if (!node)
5407 return NULL_TREE;
5408 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
91f993b7
DM
5409 if (ultimate_node)
5410 return ultimate_node->decl;
757bf1df
DM
5411 }
5412 }
5413
5414 return NULL_TREE;
5415}
5416
808f4dfe 5417/* Would be much simpler to use a lambda here, if it were supported. */
757bf1df 5418
faacafd2 5419struct append_regions_cb_data
757bf1df 5420{
808f4dfe
DM
5421 const region_model *model;
5422 auto_vec<const decl_region *> *out;
5423};
757bf1df 5424
faacafd2 5425/* Populate *OUT with all decl_regions in the current
808f4dfe 5426 frame that have clusters within the store. */
757bf1df
DM
5427
5428void
808f4dfe 5429region_model::
faacafd2 5430get_regions_for_current_frame (auto_vec<const decl_region *> *out) const
757bf1df 5431{
faacafd2 5432 append_regions_cb_data data;
808f4dfe
DM
5433 data.model = this;
5434 data.out = out;
faacafd2 5435 m_store.for_each_cluster (append_regions_cb, &data);
757bf1df
DM
5436}
5437
faacafd2 5438/* Implementation detail of get_regions_for_current_frame. */
757bf1df 5439
808f4dfe 5440void
faacafd2
DM
5441region_model::append_regions_cb (const region *base_reg,
5442 append_regions_cb_data *cb_data)
757bf1df 5443{
808f4dfe
DM
5444 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
5445 return;
5446 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
faacafd2 5447 cb_data->out->safe_push (decl_reg);
757bf1df
DM
5448}
5449
c83e9731
TL
5450
5451/* Abstract class for diagnostics related to the use of
5452 floating-point arithmetic where precision is needed. */
5453
5454class imprecise_floating_point_arithmetic : public pending_diagnostic
5455{
5456public:
5457 int get_controlling_option () const final override
5458 {
5459 return OPT_Wanalyzer_imprecise_fp_arithmetic;
5460 }
5461};
5462
5463/* Concrete diagnostic to complain about uses of floating-point arithmetic
5464 in the size argument of malloc etc. */
5465
5466class float_as_size_arg : public imprecise_floating_point_arithmetic
5467{
5468public:
5469 float_as_size_arg (tree arg) : m_arg (arg)
5470 {}
5471
5472 const char *get_kind () const final override
5473 {
5474 return "float_as_size_arg_diagnostic";
5475 }
5476
ac9230fb 5477 bool subclass_equal_p (const pending_diagnostic &other) const final override
c83e9731
TL
5478 {
5479 return same_tree_p (m_arg, ((const float_as_size_arg &) other).m_arg);
5480 }
5481
0e466e97 5482 bool emit (rich_location *rich_loc, logger *) final override
c83e9731
TL
5483 {
5484 diagnostic_metadata m;
5485 bool warned = warning_meta (rich_loc, m, get_controlling_option (),
5486 "use of floating-point arithmetic here might"
5487 " yield unexpected results");
5488 if (warned)
5489 inform (rich_loc->get_loc (), "only use operands of an integer type"
5490 " inside the size argument");
5491 return warned;
5492 }
5493
5494 label_text describe_final_event (const evdesc::final_event &ev) final
5495 override
5496 {
5497 if (m_arg)
5498 return ev.formatted_print ("operand %qE is of type %qT",
5499 m_arg, TREE_TYPE (m_arg));
5500 return ev.formatted_print ("at least one operand of the size argument is"
5501 " of a floating-point type");
5502 }
5503
5504private:
5505 tree m_arg;
5506};
5507
5508/* Visitor to find uses of floating-point variables/constants in an svalue. */
5509
5510class contains_floating_point_visitor : public visitor
5511{
5512public:
5513 contains_floating_point_visitor (const svalue *root_sval) : m_result (NULL)
5514 {
5515 root_sval->accept (this);
5516 }
5517
5518 const svalue *get_svalue_to_report ()
5519 {
5520 return m_result;
5521 }
5522
5523 void visit_constant_svalue (const constant_svalue *sval) final override
5524 {
5525 /* At the point the analyzer runs, constant integer operands in a floating
5526 point expression are already implictly converted to floating-points.
5527 Thus, we do prefer to report non-constants such that the diagnostic
5528 always reports a floating-point operand. */
5529 tree type = sval->get_type ();
5530 if (type && FLOAT_TYPE_P (type) && !m_result)
5531 m_result = sval;
5532 }
5533
5534 void visit_conjured_svalue (const conjured_svalue *sval) final override
5535 {
5536 tree type = sval->get_type ();
5537 if (type && FLOAT_TYPE_P (type))
5538 m_result = sval;
5539 }
5540
5541 void visit_initial_svalue (const initial_svalue *sval) final override
5542 {
5543 tree type = sval->get_type ();
5544 if (type && FLOAT_TYPE_P (type))
5545 m_result = sval;
5546 }
5547
5548private:
5549 /* Non-null if at least one floating-point operand was found. */
5550 const svalue *m_result;
5551};
5552
5553/* May complain about uses of floating-point operands in SIZE_IN_BYTES. */
5554
5555void
5556region_model::check_dynamic_size_for_floats (const svalue *size_in_bytes,
5557 region_model_context *ctxt) const
5558{
5559 gcc_assert (ctxt);
5560
5561 contains_floating_point_visitor v (size_in_bytes);
5562 if (const svalue *float_sval = v.get_svalue_to_report ())
5563 {
5564 tree diag_arg = get_representative_tree (float_sval);
6341f14e 5565 ctxt->warn (make_unique<float_as_size_arg> (diag_arg));
c83e9731
TL
5566 }
5567}
5568
ce917b04
DM
5569/* Return a region describing a heap-allocated block of memory.
5570 Use CTXT to complain about tainted sizes.
5571
5572 Reuse an existing heap_allocated_region if it's not being referenced by
38c00edd
EF
5573 this region_model; otherwise create a new one.
5574
5575 Optionally (update_state_machine) transitions the pointer pointing to the
5576 heap_allocated_region from start to assumed non-null. */
757bf1df 5577
808f4dfe 5578const region *
ce917b04 5579region_model::get_or_create_region_for_heap_alloc (const svalue *size_in_bytes,
38c00edd
EF
5580 region_model_context *ctxt,
5581 bool update_state_machine,
5582 const call_details *cd)
ce917b04
DM
5583{
5584 /* Determine which regions are referenced in this region_model, so that
5585 we can reuse an existing heap_allocated_region if it's not in use on
5586 this path. */
7dc0ecaf 5587 auto_bitmap base_regs_in_use;
ce917b04 5588 get_referenced_base_regions (base_regs_in_use);
b03a10b0
DM
5589
5590 /* Don't reuse regions that are marked as TOUCHED. */
5591 for (store::cluster_map_t::iterator iter = m_store.begin ();
5592 iter != m_store.end (); ++iter)
5593 if ((*iter).second->touched_p ())
5594 {
5595 const region *base_reg = (*iter).first;
5596 bitmap_set_bit (base_regs_in_use, base_reg->get_id ());
5597 }
5598
ce917b04
DM
5599 const region *reg
5600 = m_mgr->get_or_create_region_for_heap_alloc (base_regs_in_use);
688fc162
DM
5601 if (size_in_bytes)
5602 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
5603 set_dynamic_extents (reg, size_in_bytes, ctxt);
38c00edd
EF
5604
5605 if (update_state_machine && cd)
5606 {
5607 const svalue *ptr_sval
5608 = m_mgr->get_ptr_svalue (cd->get_lhs_type (), reg);
5609 transition_ptr_sval_non_null (ctxt, ptr_sval);
5610 }
5611
808f4dfe 5612 return reg;
757bf1df
DM
5613}
5614
ce917b04
DM
5615/* Populate OUT_IDS with the set of IDs of those base regions which are
5616 reachable in this region_model. */
5617
5618void
7dc0ecaf 5619region_model::get_referenced_base_regions (auto_bitmap &out_ids) const
ce917b04
DM
5620{
5621 reachable_regions reachable_regs (const_cast<region_model *> (this));
5622 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
5623 &reachable_regs);
5624 /* Get regions for locals that have explicitly bound values. */
5625 for (store::cluster_map_t::iterator iter = m_store.begin ();
5626 iter != m_store.end (); ++iter)
5627 {
5628 const region *base_reg = (*iter).first;
5629 if (const region *parent = base_reg->get_parent_region ())
5630 if (parent->get_kind () == RK_FRAME)
5631 reachable_regs.add (base_reg, false);
5632 }
5633
5634 bitmap_clear (out_ids);
5635 for (auto iter_reg : reachable_regs)
5636 bitmap_set_bit (out_ids, iter_reg->get_id ());
5637}
5638
808f4dfe 5639/* Return a new region describing a block of memory allocated within the
b9365b93
DM
5640 current frame.
5641 Use CTXT to complain about tainted sizes. */
757bf1df 5642
808f4dfe 5643const region *
b9365b93
DM
5644region_model::create_region_for_alloca (const svalue *size_in_bytes,
5645 region_model_context *ctxt)
757bf1df 5646{
808f4dfe 5647 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
ea4e3218 5648 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
b9365b93 5649 set_dynamic_extents (reg, size_in_bytes, ctxt);
808f4dfe 5650 return reg;
757bf1df
DM
5651}
5652
b9365b93
DM
5653/* Record that the size of REG is SIZE_IN_BYTES.
5654 Use CTXT to complain about tainted sizes. */
757bf1df
DM
5655
5656void
9a2c9579 5657region_model::set_dynamic_extents (const region *reg,
b9365b93
DM
5658 const svalue *size_in_bytes,
5659 region_model_context *ctxt)
9a2c9579
DM
5660{
5661 assert_compat_types (size_in_bytes->get_type (), size_type_node);
b9365b93 5662 if (ctxt)
c83e9731
TL
5663 {
5664 check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
5665 ctxt);
5666 check_dynamic_size_for_floats (size_in_bytes, ctxt);
5667 }
9a2c9579
DM
5668 m_dynamic_extents.put (reg, size_in_bytes);
5669}
5670
5671/* Get the recording of REG in bytes, or NULL if no dynamic size was
5672 recorded. */
5673
5674const svalue *
5675region_model::get_dynamic_extents (const region *reg) const
757bf1df 5676{
9a2c9579
DM
5677 if (const svalue * const *slot = m_dynamic_extents.get (reg))
5678 return *slot;
5679 return NULL;
5680}
5681
5682/* Unset any recorded dynamic size of REG. */
5683
5684void
5685region_model::unset_dynamic_extents (const region *reg)
5686{
5687 m_dynamic_extents.remove (reg);
757bf1df
DM
5688}
5689
c81b60b8
DM
5690/* Information of the layout of a RECORD_TYPE, capturing it as a vector
5691 of items, where each item is either a field or padding. */
5692
5693class record_layout
5694{
5695public:
5696 /* An item within a record; either a field, or padding after a field. */
5697 struct item
5698 {
5699 public:
5700 item (const bit_range &br,
5701 tree field,
5702 bool is_padding)
5703 : m_bit_range (br),
5704 m_field (field),
5705 m_is_padding (is_padding)
5706 {
5707 }
5708
5709 bit_offset_t get_start_bit_offset () const
5710 {
5711 return m_bit_range.get_start_bit_offset ();
5712 }
5713 bit_offset_t get_next_bit_offset () const
5714 {
5715 return m_bit_range.get_next_bit_offset ();
5716 }
5717
5718 bool contains_p (bit_offset_t offset) const
5719 {
5720 return m_bit_range.contains_p (offset);
5721 }
5722
5723 void dump_to_pp (pretty_printer *pp) const
5724 {
5725 if (m_is_padding)
5726 pp_printf (pp, "padding after %qD", m_field);
5727 else
5728 pp_printf (pp, "%qD", m_field);
5729 pp_string (pp, ", ");
5730 m_bit_range.dump_to_pp (pp);
5731 }
5732
5733 bit_range m_bit_range;
5734 tree m_field;
5735 bool m_is_padding;
5736 };
5737
5738 record_layout (tree record_type)
c81b60b8
DM
5739 {
5740 gcc_assert (TREE_CODE (record_type) == RECORD_TYPE);
5741
5742 for (tree iter = TYPE_FIELDS (record_type); iter != NULL_TREE;
5743 iter = DECL_CHAIN (iter))
5744 {
5745 if (TREE_CODE (iter) == FIELD_DECL)
5746 {
5747 int iter_field_offset = int_bit_position (iter);
5748 bit_size_t size_in_bits;
5749 if (!int_size_in_bits (TREE_TYPE (iter), &size_in_bits))
5750 size_in_bits = 0;
5751
5752 maybe_pad_to (iter_field_offset);
5753
5754 /* Add field. */
5755 m_items.safe_push (item (bit_range (iter_field_offset,
5756 size_in_bits),
5757 iter, false));
5758 }
5759 }
5760
5761 /* Add any trailing padding. */
5762 bit_size_t size_in_bits;
5763 if (int_size_in_bits (record_type, &size_in_bits))
5764 maybe_pad_to (size_in_bits);
5765 }
5766
5767 void dump_to_pp (pretty_printer *pp) const
5768 {
5769 unsigned i;
5770 item *it;
5771 FOR_EACH_VEC_ELT (m_items, i, it)
5772 {
5773 it->dump_to_pp (pp);
5774 pp_newline (pp);
5775 }
5776 }
5777
5778 DEBUG_FUNCTION void dump () const
5779 {
5780 pretty_printer pp;
5781 pp_format_decoder (&pp) = default_tree_printer;
5782 pp.buffer->stream = stderr;
5783 dump_to_pp (&pp);
5784 pp_flush (&pp);
5785 }
5786
5787 const record_layout::item *get_item_at (bit_offset_t offset) const
5788 {
5789 unsigned i;
5790 item *it;
5791 FOR_EACH_VEC_ELT (m_items, i, it)
5792 if (it->contains_p (offset))
5793 return it;
5794 return NULL;
5795 }
5796
5797private:
5798 /* Subroutine of ctor. Add padding item to NEXT_OFFSET if necessary. */
5799
5800 void maybe_pad_to (bit_offset_t next_offset)
5801 {
5802 if (m_items.length () > 0)
5803 {
5804 const item &last_item = m_items[m_items.length () - 1];
5805 bit_offset_t offset_after_last_item
5806 = last_item.get_next_bit_offset ();
5807 if (next_offset > offset_after_last_item)
5808 {
5809 bit_size_t padding_size
5810 = next_offset - offset_after_last_item;
5811 m_items.safe_push (item (bit_range (offset_after_last_item,
5812 padding_size),
5813 last_item.m_field, true));
5814 }
5815 }
5816 }
5817
c81b60b8
DM
5818 auto_vec<item> m_items;
5819};
5820
5821/* A subclass of pending_diagnostic for complaining about uninitialized data
5822 being copied across a trust boundary to an untrusted output
5823 (e.g. copy_to_user infoleaks in the Linux kernel). */
5824
5825class exposure_through_uninit_copy
5826 : public pending_diagnostic_subclass<exposure_through_uninit_copy>
5827{
5828public:
5829 exposure_through_uninit_copy (const region *src_region,
5830 const region *dest_region,
ffaeb9dc 5831 const svalue *copied_sval)
c81b60b8
DM
5832 : m_src_region (src_region),
5833 m_dest_region (dest_region),
ffaeb9dc 5834 m_copied_sval (copied_sval)
c81b60b8
DM
5835 {
5836 gcc_assert (m_copied_sval->get_kind () == SK_POISONED
5837 || m_copied_sval->get_kind () == SK_COMPOUND);
5838 }
5839
5840 const char *get_kind () const final override
5841 {
5842 return "exposure_through_uninit_copy";
5843 }
5844
5845 bool operator== (const exposure_through_uninit_copy &other) const
5846 {
5847 return (m_src_region == other.m_src_region
5848 && m_dest_region == other.m_dest_region
5849 && m_copied_sval == other.m_copied_sval);
5850 }
5851
5852 int get_controlling_option () const final override
5853 {
5854 return OPT_Wanalyzer_exposure_through_uninit_copy;
5855 }
5856
0e466e97 5857 bool emit (rich_location *rich_loc, logger *) final override
c81b60b8
DM
5858 {
5859 diagnostic_metadata m;
5860 /* CWE-200: Exposure of Sensitive Information to an Unauthorized Actor. */
5861 m.add_cwe (200);
5862 enum memory_space mem_space = get_src_memory_space ();
5863 bool warned;
5864 switch (mem_space)
5865 {
5866 default:
5867 warned = warning_meta
5868 (rich_loc, m, get_controlling_option (),
5869 "potential exposure of sensitive information"
5870 " by copying uninitialized data across trust boundary");
5871 break;
5872 case MEMSPACE_STACK:
5873 warned = warning_meta
5874 (rich_loc, m, get_controlling_option (),
5875 "potential exposure of sensitive information"
5876 " by copying uninitialized data from stack across trust boundary");
5877 break;
5878 case MEMSPACE_HEAP:
5879 warned = warning_meta
5880 (rich_loc, m, get_controlling_option (),
5881 "potential exposure of sensitive information"
5882 " by copying uninitialized data from heap across trust boundary");
5883 break;
5884 }
5885 if (warned)
5886 {
5887 location_t loc = rich_loc->get_loc ();
5888 inform_number_of_uninit_bits (loc);
5889 complain_about_uninit_ranges (loc);
5890
5891 if (mem_space == MEMSPACE_STACK)
5892 maybe_emit_fixit_hint ();
5893 }
5894 return warned;
5895 }
5896
5897 label_text describe_final_event (const evdesc::final_event &) final override
5898 {
5899 enum memory_space mem_space = get_src_memory_space ();
5900 switch (mem_space)
5901 {
5902 default:
5903 return label_text::borrow ("uninitialized data copied here");
5904
5905 case MEMSPACE_STACK:
5906 return label_text::borrow ("uninitialized data copied from stack here");
5907
5908 case MEMSPACE_HEAP:
5909 return label_text::borrow ("uninitialized data copied from heap here");
5910 }
5911 }
5912
5913 void mark_interesting_stuff (interesting_t *interest) final override
5914 {
5915 if (m_src_region)
5916 interest->add_region_creation (m_src_region);
5917 }
5918
5919private:
5920 enum memory_space get_src_memory_space () const
5921 {
5922 return m_src_region ? m_src_region->get_memory_space () : MEMSPACE_UNKNOWN;
5923 }
5924
5925 bit_size_t calc_num_uninit_bits () const
5926 {
5927 switch (m_copied_sval->get_kind ())
5928 {
5929 default:
5930 gcc_unreachable ();
5931 break;
5932 case SK_POISONED:
5933 {
5934 const poisoned_svalue *poisoned_sval
5935 = as_a <const poisoned_svalue *> (m_copied_sval);
5936 gcc_assert (poisoned_sval->get_poison_kind () == POISON_KIND_UNINIT);
5937
5938 /* Give up if don't have type information. */
5939 if (m_copied_sval->get_type () == NULL_TREE)
5940 return 0;
5941
5942 bit_size_t size_in_bits;
5943 if (int_size_in_bits (m_copied_sval->get_type (), &size_in_bits))
5944 return size_in_bits;
5945
5946 /* Give up if we can't get the size of the type. */
5947 return 0;
5948 }
5949 break;
5950 case SK_COMPOUND:
5951 {
5952 const compound_svalue *compound_sval
5953 = as_a <const compound_svalue *> (m_copied_sval);
5954 bit_size_t result = 0;
5955 /* Find keys for uninit svals. */
5956 for (auto iter : *compound_sval)
5957 {
5958 const svalue *sval = iter.second;
5959 if (const poisoned_svalue *psval
5960 = sval->dyn_cast_poisoned_svalue ())
5961 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
5962 {
5963 const binding_key *key = iter.first;
5964 const concrete_binding *ckey
5965 = key->dyn_cast_concrete_binding ();
5966 gcc_assert (ckey);
5967 result += ckey->get_size_in_bits ();
5968 }
5969 }
5970 return result;
5971 }
5972 }
5973 }
5974
5975 void inform_number_of_uninit_bits (location_t loc) const
5976 {
5977 bit_size_t num_uninit_bits = calc_num_uninit_bits ();
5978 if (num_uninit_bits <= 0)
5979 return;
5980 if (num_uninit_bits % BITS_PER_UNIT == 0)
5981 {
5982 /* Express in bytes. */
5983 byte_size_t num_uninit_bytes = num_uninit_bits / BITS_PER_UNIT;
5984 if (num_uninit_bytes == 1)
5985 inform (loc, "1 byte is uninitialized");
5986 else
5987 inform (loc,
5988 "%wu bytes are uninitialized", num_uninit_bytes.to_uhwi ());
5989 }
5990 else
5991 {
5992 /* Express in bits. */
5993 if (num_uninit_bits == 1)
5994 inform (loc, "1 bit is uninitialized");
5995 else
5996 inform (loc,
5997 "%wu bits are uninitialized", num_uninit_bits.to_uhwi ());
5998 }
5999 }
6000
6001 void complain_about_uninit_ranges (location_t loc) const
6002 {
6003 if (const compound_svalue *compound_sval
6004 = m_copied_sval->dyn_cast_compound_svalue ())
6005 {
6006 /* Find keys for uninit svals. */
6007 auto_vec<const concrete_binding *> uninit_keys;
6008 for (auto iter : *compound_sval)
6009 {
6010 const svalue *sval = iter.second;
6011 if (const poisoned_svalue *psval
6012 = sval->dyn_cast_poisoned_svalue ())
6013 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
6014 {
6015 const binding_key *key = iter.first;
6016 const concrete_binding *ckey
6017 = key->dyn_cast_concrete_binding ();
6018 gcc_assert (ckey);
6019 uninit_keys.safe_push (ckey);
6020 }
6021 }
6022 /* Complain about them in sorted order. */
6023 uninit_keys.qsort (concrete_binding::cmp_ptr_ptr);
6024
6025 std::unique_ptr<record_layout> layout;
6026
6027 tree type = m_copied_sval->get_type ();
6028 if (type && TREE_CODE (type) == RECORD_TYPE)
6029 {
6030 // (std::make_unique is C++14)
6031 layout = std::unique_ptr<record_layout> (new record_layout (type));
6032
6033 if (0)
6034 layout->dump ();
6035 }
6036
6037 unsigned i;
6038 const concrete_binding *ckey;
6039 FOR_EACH_VEC_ELT (uninit_keys, i, ckey)
6040 {
6041 bit_offset_t start_bit = ckey->get_start_bit_offset ();
6042 bit_offset_t next_bit = ckey->get_next_bit_offset ();
6043 complain_about_uninit_range (loc, start_bit, next_bit,
6044 layout.get ());
6045 }
6046 }
6047 }
6048
6049 void complain_about_uninit_range (location_t loc,
6050 bit_offset_t start_bit,
6051 bit_offset_t next_bit,
6052 const record_layout *layout) const
6053 {
6054 if (layout)
6055 {
6056 while (start_bit < next_bit)
6057 {
6058 if (const record_layout::item *item
6059 = layout->get_item_at (start_bit))
6060 {
6061 gcc_assert (start_bit >= item->get_start_bit_offset ());
6062 gcc_assert (start_bit < item->get_next_bit_offset ());
6063 if (item->get_start_bit_offset () == start_bit
6064 && item->get_next_bit_offset () <= next_bit)
6065 complain_about_fully_uninit_item (*item);
6066 else
6067 complain_about_partially_uninit_item (*item);
6068 start_bit = item->get_next_bit_offset ();
6069 continue;
6070 }
6071 else
6072 break;
6073 }
6074 }
6075
6076 if (start_bit >= next_bit)
6077 return;
6078
6079 if (start_bit % 8 == 0 && next_bit % 8 == 0)
6080 {
6081 /* Express in bytes. */
6082 byte_offset_t start_byte = start_bit / 8;
6083 byte_offset_t last_byte = (next_bit / 8) - 1;
6084 if (last_byte == start_byte)
6085 inform (loc,
6086 "byte %wu is uninitialized",
6087 start_byte.to_uhwi ());
6088 else
6089 inform (loc,
6090 "bytes %wu - %wu are uninitialized",
6091 start_byte.to_uhwi (),
6092 last_byte.to_uhwi ());
6093 }
6094 else
6095 {
6096 /* Express in bits. */
6097 bit_offset_t last_bit = next_bit - 1;
6098 if (last_bit == start_bit)
6099 inform (loc,
6100 "bit %wu is uninitialized",
6101 start_bit.to_uhwi ());
6102 else
6103 inform (loc,
6104 "bits %wu - %wu are uninitialized",
6105 start_bit.to_uhwi (),
6106 last_bit.to_uhwi ());
6107 }
6108 }
6109
6110 static void
6111 complain_about_fully_uninit_item (const record_layout::item &item)
6112 {
6113 tree field = item.m_field;
6114 bit_size_t num_bits = item.m_bit_range.m_size_in_bits;
6115 if (item.m_is_padding)
6116 {
6117 if (num_bits % 8 == 0)
6118 {
6119 /* Express in bytes. */
6120 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
6121 if (num_bytes == 1)
6122 inform (DECL_SOURCE_LOCATION (field),
6123 "padding after field %qD is uninitialized (1 byte)",
6124 field);
6125 else
6126 inform (DECL_SOURCE_LOCATION (field),
6127 "padding after field %qD is uninitialized (%wu bytes)",
6128 field, num_bytes.to_uhwi ());
6129 }
6130 else
6131 {
6132 /* Express in bits. */
6133 if (num_bits == 1)
6134 inform (DECL_SOURCE_LOCATION (field),
6135 "padding after field %qD is uninitialized (1 bit)",
6136 field);
6137 else
6138 inform (DECL_SOURCE_LOCATION (field),
6139 "padding after field %qD is uninitialized (%wu bits)",
6140 field, num_bits.to_uhwi ());
6141 }
6142 }
6143 else
6144 {
6145 if (num_bits % 8 == 0)
6146 {
6147 /* Express in bytes. */
6148 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
6149 if (num_bytes == 1)
6150 inform (DECL_SOURCE_LOCATION (field),
6151 "field %qD is uninitialized (1 byte)", field);
6152 else
6153 inform (DECL_SOURCE_LOCATION (field),
6154 "field %qD is uninitialized (%wu bytes)",
6155 field, num_bytes.to_uhwi ());
6156 }
6157 else
6158 {
6159 /* Express in bits. */
6160 if (num_bits == 1)
6161 inform (DECL_SOURCE_LOCATION (field),
6162 "field %qD is uninitialized (1 bit)", field);
6163 else
6164 inform (DECL_SOURCE_LOCATION (field),
6165 "field %qD is uninitialized (%wu bits)",
6166 field, num_bits.to_uhwi ());
6167 }
6168 }
6169 }
6170
6171 static void
6172 complain_about_partially_uninit_item (const record_layout::item &item)
6173 {
6174 tree field = item.m_field;
6175 if (item.m_is_padding)
6176 inform (DECL_SOURCE_LOCATION (field),
6177 "padding after field %qD is partially uninitialized",
6178 field);
6179 else
6180 inform (DECL_SOURCE_LOCATION (field),
6181 "field %qD is partially uninitialized",
6182 field);
6183 /* TODO: ideally we'd describe what parts are uninitialized. */
6184 }
6185
6186 void maybe_emit_fixit_hint () const
6187 {
6188 if (tree decl = m_src_region->maybe_get_decl ())
6189 {
6190 gcc_rich_location hint_richloc (DECL_SOURCE_LOCATION (decl));
6191 hint_richloc.add_fixit_insert_after (" = {0}");
6192 inform (&hint_richloc,
6193 "suggest forcing zero-initialization by"
6194 " providing a %<{0}%> initializer");
6195 }
6196 }
6197
6198private:
6199 const region *m_src_region;
6200 const region *m_dest_region;
6201 const svalue *m_copied_sval;
c81b60b8
DM
6202};
6203
6204/* Return true if any part of SVAL is uninitialized. */
6205
6206static bool
6207contains_uninit_p (const svalue *sval)
6208{
6209 struct uninit_finder : public visitor
6210 {
6211 public:
6212 uninit_finder () : m_found_uninit (false) {}
6213 void visit_poisoned_svalue (const poisoned_svalue *sval)
6214 {
6215 if (sval->get_poison_kind () == POISON_KIND_UNINIT)
6216 m_found_uninit = true;
6217 }
6218 bool m_found_uninit;
6219 };
6220
6221 uninit_finder v;
6222 sval->accept (&v);
6223
6224 return v.m_found_uninit;
6225}
6226
6227/* Function for use by plugins when simulating writing data through a
6228 pointer to an "untrusted" region DST_REG (and thus crossing a security
6229 boundary), such as copying data to user space in an OS kernel.
6230
6231 Check that COPIED_SVAL is fully initialized. If not, complain about
6232 an infoleak to CTXT.
6233
6234 SRC_REG can be NULL; if non-NULL it is used as a hint in the diagnostic
6235 as to where COPIED_SVAL came from. */
6236
6237void
6238region_model::maybe_complain_about_infoleak (const region *dst_reg,
6239 const svalue *copied_sval,
6240 const region *src_reg,
6241 region_model_context *ctxt)
6242{
6243 /* Check for exposure. */
6244 if (contains_uninit_p (copied_sval))
6341f14e
DM
6245 ctxt->warn (make_unique<exposure_through_uninit_copy> (src_reg,
6246 dst_reg,
6247 copied_sval));
c81b60b8
DM
6248}
6249
3d2d04cd
DM
6250/* Set errno to a positive symbolic int, as if some error has occurred. */
6251
6252void
6253region_model::set_errno (const call_details &cd)
6254{
6255 const region *errno_reg = m_mgr->get_errno_region ();
6256 conjured_purge p (this, cd.get_ctxt ());
6257 const svalue *new_errno_sval
6258 = m_mgr->get_or_create_conjured_svalue (integer_type_node,
6259 cd.get_call_stmt (),
6260 errno_reg, p);
6261 const svalue *zero
6262 = m_mgr->get_or_create_int_cst (integer_type_node, 0);
6263 add_constraint (new_errno_sval, GT_EXPR, zero, cd.get_ctxt ());
6264 set_value (errno_reg, new_errno_sval, cd.get_ctxt ());
6265}
6266
eafa9d96
DM
6267/* class noop_region_model_context : public region_model_context. */
6268
c65d3c7f 6269void
6341f14e 6270noop_region_model_context::add_note (std::unique_ptr<pending_note>)
c65d3c7f 6271{
c65d3c7f
DM
6272}
6273
2503dd59
DM
6274void
6275noop_region_model_context::add_event (std::unique_ptr<checker_event>)
6276{
6277}
6278
eafa9d96 6279void
accece8c 6280noop_region_model_context::bifurcate (std::unique_ptr<custom_edge_info>)
eafa9d96 6281{
eafa9d96
DM
6282}
6283
6284void
6285noop_region_model_context::terminate_path ()
6286{
6287}
6288
2503dd59
DM
6289/* class region_model_context_decorator : public region_model_context. */
6290
6291void
6292region_model_context_decorator::add_event (std::unique_ptr<checker_event> event)
6293{
1e7b0a5d
DM
6294 if (m_inner)
6295 m_inner->add_event (std::move (event));
2503dd59
DM
6296}
6297
808f4dfe 6298/* struct model_merger. */
757bf1df 6299
808f4dfe 6300/* Dump a multiline representation of this merger to PP. */
757bf1df
DM
6301
6302void
808f4dfe 6303model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
757bf1df 6304{
808f4dfe
DM
6305 pp_string (pp, "model A:");
6306 pp_newline (pp);
6307 m_model_a->dump_to_pp (pp, simple, true);
6308 pp_newline (pp);
757bf1df 6309
808f4dfe 6310 pp_string (pp, "model B:");
757bf1df 6311 pp_newline (pp);
808f4dfe 6312 m_model_b->dump_to_pp (pp, simple, true);
757bf1df
DM
6313 pp_newline (pp);
6314
808f4dfe 6315 pp_string (pp, "merged model:");
757bf1df 6316 pp_newline (pp);
808f4dfe 6317 m_merged_model->dump_to_pp (pp, simple, true);
757bf1df
DM
6318 pp_newline (pp);
6319}
6320
808f4dfe 6321/* Dump a multiline representation of this merger to FILE. */
757bf1df
DM
6322
6323void
808f4dfe 6324model_merger::dump (FILE *fp, bool simple) const
757bf1df
DM
6325{
6326 pretty_printer pp;
6327 pp_format_decoder (&pp) = default_tree_printer;
6328 pp_show_color (&pp) = pp_show_color (global_dc->printer);
6329 pp.buffer->stream = fp;
808f4dfe 6330 dump_to_pp (&pp, simple);
757bf1df
DM
6331 pp_flush (&pp);
6332}
6333
808f4dfe 6334/* Dump a multiline representation of this merger to stderr. */
757bf1df
DM
6335
6336DEBUG_FUNCTION void
808f4dfe 6337model_merger::dump (bool simple) const
757bf1df 6338{
808f4dfe 6339 dump (stderr, simple);
757bf1df
DM
6340}
6341
f573d351
DM
6342/* Return true if it's OK to merge SVAL with other svalues. */
6343
6344bool
6345model_merger::mergeable_svalue_p (const svalue *sval) const
6346{
6347 if (m_ext_state)
6348 {
6349 /* Reject merging svalues that have non-purgable sm-state,
6350 to avoid falsely reporting memory leaks by merging them
6351 with something else. For example, given a local var "p",
6352 reject the merger of a:
6353 store_a mapping "p" to a malloc-ed ptr
6354 with:
6355 store_b mapping "p" to a NULL ptr. */
6356 if (m_state_a)
6357 if (!m_state_a->can_purge_p (*m_ext_state, sval))
6358 return false;
6359 if (m_state_b)
6360 if (!m_state_b->can_purge_p (*m_ext_state, sval))
6361 return false;
6362 }
6363 return true;
6364}
6365
75038aa6
DM
6366} // namespace ana
6367
808f4dfe 6368/* Dump RMODEL fully to stderr (i.e. without summarization). */
757bf1df 6369
808f4dfe
DM
6370DEBUG_FUNCTION void
6371debug (const region_model &rmodel)
757bf1df 6372{
808f4dfe 6373 rmodel.dump (false);
757bf1df
DM
6374}
6375
8ca7fa84 6376/* class rejected_op_constraint : public rejected_constraint. */
84fb3546
DM
6377
6378void
8ca7fa84 6379rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
84fb3546
DM
6380{
6381 region_model m (m_model);
6382 const svalue *lhs_sval = m.get_rvalue (m_lhs, NULL);
6383 const svalue *rhs_sval = m.get_rvalue (m_rhs, NULL);
6384 lhs_sval->dump_to_pp (pp, true);
6385 pp_printf (pp, " %s ", op_symbol_code (m_op));
6386 rhs_sval->dump_to_pp (pp, true);
6387}
6388
ccd4df81
DM
6389/* class rejected_default_case : public rejected_constraint. */
6390
6391void
6392rejected_default_case::dump_to_pp (pretty_printer *pp) const
6393{
6394 pp_string (pp, "implicit default for enum");
6395}
6396
8ca7fa84
DM
6397/* class rejected_ranges_constraint : public rejected_constraint. */
6398
6399void
6400rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
6401{
6402 region_model m (m_model);
6403 const svalue *sval = m.get_rvalue (m_expr, NULL);
6404 sval->dump_to_pp (pp, true);
6405 pp_string (pp, " in ");
6406 m_ranges->dump_to_pp (pp, true);
6407}
6408
808f4dfe 6409/* class engine. */
757bf1df 6410
11a2ff8d
DM
6411/* engine's ctor. */
6412
4cebae09
DM
6413engine::engine (const supergraph *sg, logger *logger)
6414: m_sg (sg), m_mgr (logger)
11a2ff8d
DM
6415{
6416}
6417
808f4dfe 6418/* Dump the managed objects by class to LOGGER, and the per-class totals. */
757bf1df 6419
808f4dfe
DM
6420void
6421engine::log_stats (logger *logger) const
757bf1df 6422{
808f4dfe 6423 m_mgr.log_stats (logger, true);
757bf1df
DM
6424}
6425
75038aa6
DM
6426namespace ana {
6427
757bf1df
DM
6428#if CHECKING_P
6429
6430namespace selftest {
6431
8c08c983
DM
6432/* Build a constant tree of the given type from STR. */
6433
6434static tree
6435build_real_cst_from_string (tree type, const char *str)
6436{
6437 REAL_VALUE_TYPE real;
6438 real_from_string (&real, str);
6439 return build_real (type, real);
6440}
6441
6442/* Append various "interesting" constants to OUT (e.g. NaN). */
6443
6444static void
6445append_interesting_constants (auto_vec<tree> *out)
6446{
6447 out->safe_push (build_int_cst (integer_type_node, 0));
6448 out->safe_push (build_int_cst (integer_type_node, 42));
6449 out->safe_push (build_int_cst (unsigned_type_node, 0));
6450 out->safe_push (build_int_cst (unsigned_type_node, 42));
6451 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
6452 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
6453 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
6454 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
6455 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
6456 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
6457 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
6458 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
6459}
6460
6461/* Verify that tree_cmp is a well-behaved comparator for qsort, even
6462 if the underlying constants aren't comparable. */
6463
6464static void
6465test_tree_cmp_on_constants ()
6466{
6467 auto_vec<tree> csts;
6468 append_interesting_constants (&csts);
6469
6470 /* Try sorting every triple. */
6471 const unsigned num = csts.length ();
6472 for (unsigned i = 0; i < num; i++)
6473 for (unsigned j = 0; j < num; j++)
6474 for (unsigned k = 0; k < num; k++)
6475 {
6476 auto_vec<tree> v (3);
6477 v.quick_push (csts[i]);
6478 v.quick_push (csts[j]);
6479 v.quick_push (csts[k]);
6480 v.qsort (tree_cmp);
6481 }
6482}
6483
757bf1df
DM
6484/* Implementation detail of the ASSERT_CONDITION_* macros. */
6485
808f4dfe
DM
6486void
6487assert_condition (const location &loc,
6488 region_model &model,
6489 const svalue *lhs, tree_code op, const svalue *rhs,
6490 tristate expected)
6491{
6492 tristate actual = model.eval_condition (lhs, op, rhs);
6493 ASSERT_EQ_AT (loc, actual, expected);
6494}
6495
6496/* Implementation detail of the ASSERT_CONDITION_* macros. */
6497
757bf1df
DM
6498void
6499assert_condition (const location &loc,
6500 region_model &model,
6501 tree lhs, tree_code op, tree rhs,
6502 tristate expected)
6503{
6504 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
6505 ASSERT_EQ_AT (loc, actual, expected);
6506}
6507
90f7c300
DM
6508/* Implementation detail of ASSERT_DUMP_TREE_EQ. */
6509
6510static void
6511assert_dump_tree_eq (const location &loc, tree t, const char *expected)
6512{
6513 auto_fix_quotes sentinel;
6514 pretty_printer pp;
6515 pp_format_decoder (&pp) = default_tree_printer;
6516 dump_tree (&pp, t);
6517 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
6518}
6519
6520/* Assert that dump_tree (T) is EXPECTED. */
6521
6522#define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
6523 SELFTEST_BEGIN_STMT \
6524 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
6525 SELFTEST_END_STMT
6526
757bf1df
DM
6527/* Implementation detail of ASSERT_DUMP_EQ. */
6528
6529static void
6530assert_dump_eq (const location &loc,
6531 const region_model &model,
6532 bool summarize,
6533 const char *expected)
6534{
6535 auto_fix_quotes sentinel;
6536 pretty_printer pp;
6537 pp_format_decoder (&pp) = default_tree_printer;
808f4dfe
DM
6538
6539 model.dump_to_pp (&pp, summarize, true);
757bf1df
DM
6540 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
6541}
6542
6543/* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
6544
6545#define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
6546 SELFTEST_BEGIN_STMT \
6547 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
6548 SELFTEST_END_STMT
6549
6550/* Smoketest for region_model::dump_to_pp. */
6551
6552static void
6553test_dump ()
6554{
808f4dfe
DM
6555 region_model_manager mgr;
6556 region_model model (&mgr);
757bf1df
DM
6557
6558 ASSERT_DUMP_EQ (model, false,
808f4dfe
DM
6559 "stack depth: 0\n"
6560 "m_called_unknown_fn: FALSE\n"
6561 "constraint_manager:\n"
6562 " equiv classes:\n"
6563 " constraints:\n");
6564 ASSERT_DUMP_EQ (model, true,
6565 "stack depth: 0\n"
6566 "m_called_unknown_fn: FALSE\n"
6567 "constraint_manager:\n"
757bf1df
DM
6568 " equiv classes:\n"
6569 " constraints:\n");
757bf1df
DM
6570}
6571
884d9141
DM
6572/* Helper function for selftests. Create a struct or union type named NAME,
6573 with the fields given by the FIELD_DECLS in FIELDS.
6574 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
6575 create a UNION_TYPE. */
6576
6577static tree
6578make_test_compound_type (const char *name, bool is_struct,
6579 const auto_vec<tree> *fields)
6580{
6581 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
6582 TYPE_NAME (t) = get_identifier (name);
6583 TYPE_SIZE (t) = 0;
6584
6585 tree fieldlist = NULL;
6586 int i;
6587 tree field;
6588 FOR_EACH_VEC_ELT (*fields, i, field)
6589 {
6590 gcc_assert (TREE_CODE (field) == FIELD_DECL);
6591 DECL_CONTEXT (field) = t;
6592 fieldlist = chainon (field, fieldlist);
6593 }
6594 fieldlist = nreverse (fieldlist);
6595 TYPE_FIELDS (t) = fieldlist;
6596
6597 layout_type (t);
6598 return t;
6599}
6600
a96f1c38
DM
6601/* Selftest fixture for creating the type "struct coord {int x; int y; };". */
6602
6603struct coord_test
6604{
6605 coord_test ()
6606 {
6607 auto_vec<tree> fields;
6608 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
6609 get_identifier ("x"), integer_type_node);
6610 fields.safe_push (m_x_field);
6611 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
6612 get_identifier ("y"), integer_type_node);
6613 fields.safe_push (m_y_field);
6614 m_coord_type = make_test_compound_type ("coord", true, &fields);
6615 }
6616
6617 tree m_x_field;
6618 tree m_y_field;
6619 tree m_coord_type;
6620};
6621
808f4dfe 6622/* Verify usage of a struct. */
884d9141
DM
6623
6624static void
808f4dfe 6625test_struct ()
884d9141 6626{
a96f1c38
DM
6627 coord_test ct;
6628
6629 tree c = build_global_decl ("c", ct.m_coord_type);
6630 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6631 c, ct.m_x_field, NULL_TREE);
6632 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6633 c, ct.m_y_field, NULL_TREE);
884d9141
DM
6634
6635 tree int_17 = build_int_cst (integer_type_node, 17);
6636 tree int_m3 = build_int_cst (integer_type_node, -3);
6637
808f4dfe
DM
6638 region_model_manager mgr;
6639 region_model model (&mgr);
884d9141
DM
6640 model.set_value (c_x, int_17, NULL);
6641 model.set_value (c_y, int_m3, NULL);
6642
808f4dfe
DM
6643 /* Verify get_offset for "c.x". */
6644 {
6645 const region *c_x_reg = model.get_lvalue (c_x, NULL);
7a6564c9 6646 region_offset offset = c_x_reg->get_offset (&mgr);
808f4dfe
DM
6647 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
6648 ASSERT_EQ (offset.get_bit_offset (), 0);
6649 }
6650
6651 /* Verify get_offset for "c.y". */
6652 {
6653 const region *c_y_reg = model.get_lvalue (c_y, NULL);
7a6564c9 6654 region_offset offset = c_y_reg->get_offset (&mgr);
808f4dfe
DM
6655 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
6656 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
6657 }
884d9141
DM
6658}
6659
808f4dfe 6660/* Verify usage of an array element. */
884d9141
DM
6661
6662static void
808f4dfe 6663test_array_1 ()
884d9141
DM
6664{
6665 tree tlen = size_int (10);
6666 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
6667
6668 tree a = build_global_decl ("a", arr_type);
6669
808f4dfe
DM
6670 region_model_manager mgr;
6671 region_model model (&mgr);
884d9141
DM
6672 tree int_0 = build_int_cst (integer_type_node, 0);
6673 tree a_0 = build4 (ARRAY_REF, char_type_node,
6674 a, int_0, NULL_TREE, NULL_TREE);
6675 tree char_A = build_int_cst (char_type_node, 'A');
6676 model.set_value (a_0, char_A, NULL);
884d9141
DM
6677}
6678
90f7c300
DM
6679/* Verify that region_model::get_representative_tree works as expected. */
6680
6681static void
6682test_get_representative_tree ()
6683{
808f4dfe
DM
6684 region_model_manager mgr;
6685
90f7c300
DM
6686 /* STRING_CST. */
6687 {
6688 tree string_cst = build_string (4, "foo");
808f4dfe
DM
6689 region_model m (&mgr);
6690 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
6691 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
6692 ASSERT_EQ (rep, string_cst);
6693 }
6694
6695 /* String literal. */
6696 {
6697 tree string_cst_ptr = build_string_literal (4, "foo");
808f4dfe
DM
6698 region_model m (&mgr);
6699 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
6700 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
6701 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
6702 }
808f4dfe
DM
6703
6704 /* Value of an element within an array. */
6705 {
6706 tree tlen = size_int (10);
6707 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
6708 tree a = build_global_decl ("a", arr_type);
9d804f9b
DM
6709 placeholder_svalue test_sval (mgr.alloc_symbol_id (),
6710 char_type_node, "test value");
808f4dfe
DM
6711
6712 /* Value of a[3]. */
6713 {
6714 test_region_model_context ctxt;
6715 region_model model (&mgr);
6716 tree int_3 = build_int_cst (integer_type_node, 3);
6717 tree a_3 = build4 (ARRAY_REF, char_type_node,
6718 a, int_3, NULL_TREE, NULL_TREE);
6719 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
6720 model.set_value (a_3_reg, &test_sval, &ctxt);
6721 tree rep = model.get_representative_tree (&test_sval);
6722 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
6723 }
6724
6725 /* Value of a[0]. */
6726 {
6727 test_region_model_context ctxt;
6728 region_model model (&mgr);
6729 tree idx = build_int_cst (integer_type_node, 0);
6730 tree a_0 = build4 (ARRAY_REF, char_type_node,
6731 a, idx, NULL_TREE, NULL_TREE);
6732 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
6733 model.set_value (a_0_reg, &test_sval, &ctxt);
6734 tree rep = model.get_representative_tree (&test_sval);
6735 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
6736 }
6737 }
6738
6739 /* Value of a field within a struct. */
6740 {
6741 coord_test ct;
6742
6743 tree c = build_global_decl ("c", ct.m_coord_type);
6744 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6745 c, ct.m_x_field, NULL_TREE);
6746 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6747 c, ct.m_y_field, NULL_TREE);
6748
6749 test_region_model_context ctxt;
6750
6751 /* Value of initial field. */
6752 {
6753 region_model m (&mgr);
6754 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
9d804f9b
DM
6755 placeholder_svalue test_sval_x (mgr.alloc_symbol_id (),
6756 integer_type_node, "test x val");
808f4dfe
DM
6757 m.set_value (c_x_reg, &test_sval_x, &ctxt);
6758 tree rep = m.get_representative_tree (&test_sval_x);
6759 ASSERT_DUMP_TREE_EQ (rep, "c.x");
6760 }
6761
6762 /* Value of non-initial field. */
6763 {
6764 region_model m (&mgr);
6765 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
9d804f9b
DM
6766 placeholder_svalue test_sval_y (mgr.alloc_symbol_id (),
6767 integer_type_node, "test y val");
808f4dfe
DM
6768 m.set_value (c_y_reg, &test_sval_y, &ctxt);
6769 tree rep = m.get_representative_tree (&test_sval_y);
6770 ASSERT_DUMP_TREE_EQ (rep, "c.y");
6771 }
6772 }
90f7c300
DM
6773}
6774
757bf1df 6775/* Verify that calling region_model::get_rvalue repeatedly on the same
808f4dfe 6776 tree constant retrieves the same svalue *. */
757bf1df
DM
6777
6778static void
6779test_unique_constants ()
6780{
6781 tree int_0 = build_int_cst (integer_type_node, 0);
6782 tree int_42 = build_int_cst (integer_type_node, 42);
6783
6784 test_region_model_context ctxt;
808f4dfe
DM
6785 region_model_manager mgr;
6786 region_model model (&mgr);
757bf1df
DM
6787 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
6788 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
6789 model.get_rvalue (int_42, &ctxt));
6790 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
6791 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
757bf1df 6792
808f4dfe
DM
6793 /* A "(const int)42" will be a different tree from "(int)42)"... */
6794 tree const_int_type_node
6795 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
6796 tree const_int_42 = build_int_cst (const_int_type_node, 42);
6797 ASSERT_NE (int_42, const_int_42);
6798 /* It should have a different const_svalue. */
6799 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
6800 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
6801 ASSERT_NE (int_42_sval, const_int_42_sval);
6802 /* But they should compare as equal. */
6803 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
6804 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
757bf1df
DM
6805}
6806
808f4dfe
DM
6807/* Verify that each type gets its own singleton unknown_svalue within a
6808 region_model_manager, and that NULL_TREE gets its own singleton. */
757bf1df
DM
6809
6810static void
808f4dfe 6811test_unique_unknowns ()
757bf1df 6812{
808f4dfe
DM
6813 region_model_manager mgr;
6814 const svalue *unknown_int
6815 = mgr.get_or_create_unknown_svalue (integer_type_node);
6816 /* Repeated calls with the same type should get the same "unknown"
6817 svalue. */
6818 const svalue *unknown_int_2
6819 = mgr.get_or_create_unknown_svalue (integer_type_node);
6820 ASSERT_EQ (unknown_int, unknown_int_2);
757bf1df 6821
808f4dfe
DM
6822 /* Different types (or the NULL type) should have different
6823 unknown_svalues. */
6824 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
6825 ASSERT_NE (unknown_NULL_type, unknown_int);
757bf1df 6826
808f4dfe
DM
6827 /* Repeated calls with NULL for the type should get the same "unknown"
6828 svalue. */
6829 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
6830 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
757bf1df
DM
6831}
6832
808f4dfe 6833/* Verify that initial_svalue are handled as expected. */
757bf1df 6834
808f4dfe
DM
6835static void
6836test_initial_svalue_folding ()
757bf1df 6837{
808f4dfe
DM
6838 region_model_manager mgr;
6839 tree x = build_global_decl ("x", integer_type_node);
6840 tree y = build_global_decl ("y", integer_type_node);
757bf1df 6841
808f4dfe
DM
6842 test_region_model_context ctxt;
6843 region_model model (&mgr);
6844 const svalue *x_init = model.get_rvalue (x, &ctxt);
6845 const svalue *y_init = model.get_rvalue (y, &ctxt);
6846 ASSERT_NE (x_init, y_init);
6847 const region *x_reg = model.get_lvalue (x, &ctxt);
6848 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
757bf1df 6849
808f4dfe 6850}
757bf1df 6851
808f4dfe 6852/* Verify that unary ops are folded as expected. */
757bf1df
DM
6853
6854static void
808f4dfe 6855test_unaryop_svalue_folding ()
757bf1df 6856{
808f4dfe 6857 region_model_manager mgr;
757bf1df
DM
6858 tree x = build_global_decl ("x", integer_type_node);
6859 tree y = build_global_decl ("y", integer_type_node);
6860
808f4dfe
DM
6861 test_region_model_context ctxt;
6862 region_model model (&mgr);
6863 const svalue *x_init = model.get_rvalue (x, &ctxt);
6864 const svalue *y_init = model.get_rvalue (y, &ctxt);
6865 const region *x_reg = model.get_lvalue (x, &ctxt);
6866 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
6867
6868 /* "(int)x" -> "x". */
6869 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
6870
6871 /* "(void *)x" -> something other than "x". */
6872 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
6873
6874 /* "!(x == y)" -> "x != y". */
6875 ASSERT_EQ (mgr.get_or_create_unaryop
6876 (boolean_type_node, TRUTH_NOT_EXPR,
6877 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
6878 x_init, y_init)),
6879 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
6880 x_init, y_init));
6881 /* "!(x > y)" -> "x <= y". */
6882 ASSERT_EQ (mgr.get_or_create_unaryop
6883 (boolean_type_node, TRUTH_NOT_EXPR,
6884 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
6885 x_init, y_init)),
6886 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
6887 x_init, y_init));
6888}
6889
6890/* Verify that binops on constant svalues are folded. */
757bf1df 6891
808f4dfe
DM
6892static void
6893test_binop_svalue_folding ()
6894{
6895#define NUM_CSTS 10
6896 tree cst_int[NUM_CSTS];
6897 region_model_manager mgr;
6898 const svalue *cst_sval[NUM_CSTS];
6899 for (int i = 0; i < NUM_CSTS; i++)
6900 {
6901 cst_int[i] = build_int_cst (integer_type_node, i);
6902 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
6903 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
6904 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
6905 }
757bf1df 6906
808f4dfe
DM
6907 for (int i = 0; i < NUM_CSTS; i++)
6908 for (int j = 0; j < NUM_CSTS; j++)
6909 {
6910 if (i != j)
6911 ASSERT_NE (cst_sval[i], cst_sval[j]);
6912 if (i + j < NUM_CSTS)
6913 {
6914 const svalue *sum
6915 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6916 cst_sval[i], cst_sval[j]);
6917 ASSERT_EQ (sum, cst_sval[i + j]);
6918 }
6919 if (i - j >= 0)
6920 {
6921 const svalue *difference
6922 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
6923 cst_sval[i], cst_sval[j]);
6924 ASSERT_EQ (difference, cst_sval[i - j]);
6925 }
6926 if (i * j < NUM_CSTS)
6927 {
6928 const svalue *product
6929 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6930 cst_sval[i], cst_sval[j]);
6931 ASSERT_EQ (product, cst_sval[i * j]);
6932 }
6933 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
6934 cst_sval[i], cst_sval[j]);
6935 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
6936 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
6937 cst_sval[i], cst_sval[j]);
6938 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
6939 // etc
6940 }
757bf1df 6941
808f4dfe 6942 tree x = build_global_decl ("x", integer_type_node);
757bf1df 6943
808f4dfe
DM
6944 test_region_model_context ctxt;
6945 region_model model (&mgr);
6946 const svalue *x_init = model.get_rvalue (x, &ctxt);
6947
6948 /* PLUS_EXPR folding. */
6949 const svalue *x_init_plus_zero
6950 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6951 x_init, cst_sval[0]);
6952 ASSERT_EQ (x_init_plus_zero, x_init);
6953 const svalue *zero_plus_x_init
6954 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6955 cst_sval[0], x_init);
6956 ASSERT_EQ (zero_plus_x_init, x_init);
6957
6958 /* MULT_EXPR folding. */
6959 const svalue *x_init_times_zero
6960 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6961 x_init, cst_sval[0]);
6962 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
6963 const svalue *zero_times_x_init
6964 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6965 cst_sval[0], x_init);
6966 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
6967
6968 const svalue *x_init_times_one
6969 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6970 x_init, cst_sval[1]);
6971 ASSERT_EQ (x_init_times_one, x_init);
6972 const svalue *one_times_x_init
6973 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6974 cst_sval[1], x_init);
6975 ASSERT_EQ (one_times_x_init, x_init);
6976
6977 // etc
6978 // TODO: do we want to use the match-and-simplify DSL for this?
6979
6980 /* Verify that binops put any constants on the RHS. */
6981 const svalue *four_times_x_init
6982 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6983 cst_sval[4], x_init);
6984 const svalue *x_init_times_four
6985 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6986 x_init, cst_sval[4]);
6987 ASSERT_EQ (four_times_x_init, x_init_times_four);
6988 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
6989 ASSERT_EQ (binop->get_op (), MULT_EXPR);
6990 ASSERT_EQ (binop->get_arg0 (), x_init);
6991 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
6992
6993 /* Verify that ((x + 1) + 1) == (x + 2). */
6994 const svalue *x_init_plus_one
6995 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6996 x_init, cst_sval[1]);
6997 const svalue *x_init_plus_two
6998 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6999 x_init, cst_sval[2]);
7000 const svalue *x_init_plus_one_plus_one
7001 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7002 x_init_plus_one, cst_sval[1]);
7003 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
4f34f8cc
DM
7004
7005 /* Verify various binops on booleans. */
7006 {
7007 const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1);
7008 const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0);
7009 const svalue *sval_unknown
7010 = mgr.get_or_create_unknown_svalue (boolean_type_node);
9d804f9b
DM
7011 const placeholder_svalue sval_placeholder (mgr.alloc_symbol_id (),
7012 boolean_type_node, "v");
4f34f8cc
DM
7013 for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR})
7014 {
7015 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7016 sval_true, sval_unknown),
7017 sval_true);
7018 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7019 sval_false, sval_unknown),
7020 sval_unknown);
7021 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7022 sval_false, &sval_placeholder),
7023 &sval_placeholder);
7024 }
7025 for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR})
7026 {
7027 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7028 sval_false, sval_unknown),
7029 sval_false);
7030 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7031 sval_true, sval_unknown),
7032 sval_unknown);
7033 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7034 sval_true, &sval_placeholder),
7035 &sval_placeholder);
7036 }
7037 }
808f4dfe
DM
7038}
7039
7040/* Verify that sub_svalues are folded as expected. */
757bf1df 7041
808f4dfe
DM
7042static void
7043test_sub_svalue_folding ()
7044{
7045 coord_test ct;
7046 tree c = build_global_decl ("c", ct.m_coord_type);
7047 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7048 c, ct.m_x_field, NULL_TREE);
757bf1df 7049
808f4dfe
DM
7050 region_model_manager mgr;
7051 region_model model (&mgr);
7052 test_region_model_context ctxt;
7053 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
757bf1df 7054
808f4dfe
DM
7055 /* Verify that sub_svalue of "unknown" simply
7056 yields an unknown. */
757bf1df 7057
808f4dfe
DM
7058 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
7059 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
7060 unknown, c_x_reg);
7061 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
7062 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
757bf1df
DM
7063}
7064
f09b9955
DM
7065/* Get BIT within VAL as a symbolic value within MGR. */
7066
7067static const svalue *
7068get_bit (region_model_manager *mgr,
7069 bit_offset_t bit,
7070 unsigned HOST_WIDE_INT val)
7071{
7072 const svalue *inner_svalue
7073 = mgr->get_or_create_int_cst (unsigned_type_node, val);
7074 return mgr->get_or_create_bits_within (boolean_type_node,
7075 bit_range (bit, 1),
7076 inner_svalue);
7077}
7078
7079/* Verify that bits_within_svalues are folded as expected. */
7080
7081static void
7082test_bits_within_svalue_folding ()
7083{
7084 region_model_manager mgr;
7085
7086 const svalue *zero = mgr.get_or_create_int_cst (boolean_type_node, 0);
7087 const svalue *one = mgr.get_or_create_int_cst (boolean_type_node, 1);
7088
7089 {
7090 const unsigned val = 0x0000;
7091 for (unsigned bit = 0; bit < 16; bit++)
7092 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
7093 }
7094
7095 {
7096 const unsigned val = 0x0001;
7097 ASSERT_EQ (get_bit (&mgr, 0, val), one);
7098 for (unsigned bit = 1; bit < 16; bit++)
7099 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
7100 }
7101
7102 {
7103 const unsigned val = 0x8000;
7104 for (unsigned bit = 0; bit < 15; bit++)
7105 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
7106 ASSERT_EQ (get_bit (&mgr, 15, val), one);
7107 }
7108
7109 {
7110 const unsigned val = 0xFFFF;
7111 for (unsigned bit = 0; bit < 16; bit++)
7112 ASSERT_EQ (get_bit (&mgr, bit, val), one);
7113 }
7114}
7115
808f4dfe 7116/* Test that region::descendent_of_p works as expected. */
757bf1df
DM
7117
7118static void
808f4dfe 7119test_descendent_of_p ()
757bf1df 7120{
808f4dfe
DM
7121 region_model_manager mgr;
7122 const region *stack = mgr.get_stack_region ();
7123 const region *heap = mgr.get_heap_region ();
7124 const region *code = mgr.get_code_region ();
7125 const region *globals = mgr.get_globals_region ();
757bf1df 7126
808f4dfe
DM
7127 /* descendent_of_p should return true when used on the region itself. */
7128 ASSERT_TRUE (stack->descendent_of_p (stack));
7129 ASSERT_FALSE (stack->descendent_of_p (heap));
7130 ASSERT_FALSE (stack->descendent_of_p (code));
7131 ASSERT_FALSE (stack->descendent_of_p (globals));
757bf1df 7132
808f4dfe
DM
7133 tree x = build_global_decl ("x", integer_type_node);
7134 const region *x_reg = mgr.get_region_for_global (x);
7135 ASSERT_TRUE (x_reg->descendent_of_p (globals));
757bf1df 7136
808f4dfe
DM
7137 /* A cast_region should be a descendent of the original region. */
7138 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
7139 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
757bf1df
DM
7140}
7141
391512ad
DM
7142/* Verify that bit_range_region works as expected. */
7143
7144static void
7145test_bit_range_regions ()
7146{
7147 tree x = build_global_decl ("x", integer_type_node);
7148 region_model_manager mgr;
7149 const region *x_reg = mgr.get_region_for_global (x);
7150 const region *byte0
7151 = mgr.get_bit_range (x_reg, char_type_node, bit_range (0, 8));
7152 const region *byte1
7153 = mgr.get_bit_range (x_reg, char_type_node, bit_range (8, 8));
7154 ASSERT_TRUE (byte0->descendent_of_p (x_reg));
7155 ASSERT_TRUE (byte1->descendent_of_p (x_reg));
7156 ASSERT_NE (byte0, byte1);
7157}
7158
757bf1df
DM
7159/* Verify that simple assignments work as expected. */
7160
7161static void
7162test_assignment ()
7163{
7164 tree int_0 = build_int_cst (integer_type_node, 0);
7165 tree x = build_global_decl ("x", integer_type_node);
7166 tree y = build_global_decl ("y", integer_type_node);
7167
7168 /* "x == 0", then use of y, then "y = 0;". */
808f4dfe
DM
7169 region_model_manager mgr;
7170 region_model model (&mgr);
757bf1df
DM
7171 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
7172 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
7173 model.set_value (model.get_lvalue (y, NULL),
7174 model.get_rvalue (int_0, NULL),
7175 NULL);
7176 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
7177 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
757bf1df
DM
7178}
7179
a96f1c38
DM
7180/* Verify that compound assignments work as expected. */
7181
7182static void
7183test_compound_assignment ()
7184{
7185 coord_test ct;
7186
7187 tree c = build_global_decl ("c", ct.m_coord_type);
7188 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7189 c, ct.m_x_field, NULL_TREE);
7190 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
7191 c, ct.m_y_field, NULL_TREE);
7192 tree d = build_global_decl ("d", ct.m_coord_type);
7193 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7194 d, ct.m_x_field, NULL_TREE);
7195 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
7196 d, ct.m_y_field, NULL_TREE);
7197
7198 tree int_17 = build_int_cst (integer_type_node, 17);
7199 tree int_m3 = build_int_cst (integer_type_node, -3);
7200
808f4dfe
DM
7201 region_model_manager mgr;
7202 region_model model (&mgr);
a96f1c38
DM
7203 model.set_value (c_x, int_17, NULL);
7204 model.set_value (c_y, int_m3, NULL);
7205
a96f1c38 7206 /* Copy c to d. */
13ad6d9f
DM
7207 const svalue *sval = model.get_rvalue (c, NULL);
7208 model.set_value (model.get_lvalue (d, NULL), sval, NULL);
7209
a96f1c38
DM
7210 /* Check that the fields have the same svalues. */
7211 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
7212 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
7213}
7214
757bf1df
DM
7215/* Verify the details of pushing and popping stack frames. */
7216
7217static void
7218test_stack_frames ()
7219{
7220 tree int_42 = build_int_cst (integer_type_node, 42);
7221 tree int_10 = build_int_cst (integer_type_node, 10);
7222 tree int_5 = build_int_cst (integer_type_node, 5);
7223 tree int_0 = build_int_cst (integer_type_node, 0);
7224
7225 auto_vec <tree> param_types;
7226 tree parent_fndecl = make_fndecl (integer_type_node,
7227 "parent_fn",
7228 param_types);
7229 allocate_struct_function (parent_fndecl, true);
7230
7231 tree child_fndecl = make_fndecl (integer_type_node,
7232 "child_fn",
7233 param_types);
7234 allocate_struct_function (child_fndecl, true);
7235
7236 /* "a" and "b" in the parent frame. */
7237 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7238 get_identifier ("a"),
7239 integer_type_node);
4cebae09 7240 DECL_CONTEXT (a) = parent_fndecl;
757bf1df
DM
7241 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7242 get_identifier ("b"),
7243 integer_type_node);
4cebae09 7244 DECL_CONTEXT (b) = parent_fndecl;
757bf1df
DM
7245 /* "x" and "y" in a child frame. */
7246 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7247 get_identifier ("x"),
7248 integer_type_node);
4cebae09 7249 DECL_CONTEXT (x) = child_fndecl;
757bf1df
DM
7250 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7251 get_identifier ("y"),
7252 integer_type_node);
4cebae09 7253 DECL_CONTEXT (y) = child_fndecl;
757bf1df
DM
7254
7255 /* "p" global. */
7256 tree p = build_global_decl ("p", ptr_type_node);
7257
7258 /* "q" global. */
7259 tree q = build_global_decl ("q", ptr_type_node);
7260
808f4dfe 7261 region_model_manager mgr;
757bf1df 7262 test_region_model_context ctxt;
808f4dfe 7263 region_model model (&mgr);
757bf1df
DM
7264
7265 /* Push stack frame for "parent_fn". */
808f4dfe
DM
7266 const region *parent_frame_reg
7267 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl),
7268 NULL, &ctxt);
7269 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
7270 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
7271 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
7272 model.set_value (a_in_parent_reg,
7273 model.get_rvalue (int_42, &ctxt),
7274 &ctxt);
7275 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
7276
757bf1df
DM
7277 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
7278 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
7279 tristate (tristate::TS_TRUE));
7280
7281 /* Push stack frame for "child_fn". */
808f4dfe 7282 const region *child_frame_reg
757bf1df 7283 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
808f4dfe
DM
7284 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
7285 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
7286 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
7287 model.set_value (x_in_child_reg,
7288 model.get_rvalue (int_0, &ctxt),
7289 &ctxt);
7290 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
7291
757bf1df
DM
7292 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
7293 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
7294 tristate (tristate::TS_TRUE));
7295
7296 /* Point a global pointer at a local in the child frame: p = &x. */
808f4dfe
DM
7297 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
7298 model.set_value (p_in_globals_reg,
7299 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
757bf1df 7300 &ctxt);
808f4dfe 7301 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
757bf1df
DM
7302
7303 /* Point another global pointer at p: q = &p. */
808f4dfe
DM
7304 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
7305 model.set_value (q_in_globals_reg,
7306 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
757bf1df
DM
7307 &ctxt);
7308
808f4dfe
DM
7309 /* Test region::descendent_of_p. */
7310 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
7311 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
7312 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
757bf1df
DM
7313
7314 /* Pop the "child_fn" frame from the stack. */
808f4dfe
DM
7315 model.pop_frame (NULL, NULL, &ctxt);
7316 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
7317 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
757bf1df
DM
7318
7319 /* Verify that p (which was pointing at the local "x" in the popped
7320 frame) has been poisoned. */
33255ad3 7321 const svalue *new_p_sval = model.get_rvalue (p, NULL);
757bf1df
DM
7322 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
7323 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
7324 POISON_KIND_POPPED_STACK);
7325
7326 /* Verify that q still points to p, in spite of the region
7327 renumbering. */
808f4dfe 7328 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
757bf1df 7329 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
5932dd35 7330 ASSERT_EQ (new_q_sval->maybe_get_region (),
757bf1df
DM
7331 model.get_lvalue (p, &ctxt));
7332
7333 /* Verify that top of stack has been updated. */
808f4dfe 7334 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
757bf1df
DM
7335
7336 /* Verify locals in parent frame. */
7337 /* Verify "a" still has its value. */
808f4dfe 7338 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
757bf1df
DM
7339 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
7340 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
7341 int_42);
7342 /* Verify "b" still has its constraint. */
7343 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
7344 tristate (tristate::TS_TRUE));
7345}
7346
7347/* Verify that get_representative_path_var works as expected, that
808f4dfe 7348 we can map from regions to parms and back within a recursive call
757bf1df
DM
7349 stack. */
7350
7351static void
7352test_get_representative_path_var ()
7353{
7354 auto_vec <tree> param_types;
7355 tree fndecl = make_fndecl (integer_type_node,
7356 "factorial",
7357 param_types);
7358 allocate_struct_function (fndecl, true);
7359
7360 /* Parm "n". */
7361 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7362 get_identifier ("n"),
7363 integer_type_node);
4cebae09 7364 DECL_CONTEXT (n) = fndecl;
757bf1df 7365
808f4dfe
DM
7366 region_model_manager mgr;
7367 test_region_model_context ctxt;
7368 region_model model (&mgr);
757bf1df
DM
7369
7370 /* Push 5 stack frames for "factorial", each with a param */
808f4dfe
DM
7371 auto_vec<const region *> parm_regs;
7372 auto_vec<const svalue *> parm_svals;
757bf1df
DM
7373 for (int depth = 0; depth < 5; depth++)
7374 {
808f4dfe
DM
7375 const region *frame_n_reg
7376 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
7377 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
7378 parm_regs.safe_push (parm_n_reg);
757bf1df 7379
808f4dfe
DM
7380 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
7381 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
7382 parm_svals.safe_push (sval_n);
757bf1df
DM
7383 }
7384
7385 /* Verify that we can recognize that the regions are the parms,
7386 at every depth. */
7387 for (int depth = 0; depth < 5; depth++)
7388 {
808f4dfe
DM
7389 {
7390 svalue_set visited;
7391 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
7392 &visited),
7393 path_var (n, depth + 1));
7394 }
757bf1df
DM
7395 /* ...and that we can lookup lvalues for locals for all frames,
7396 not just the top. */
7397 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
808f4dfe 7398 parm_regs[depth]);
757bf1df 7399 /* ...and that we can locate the svalues. */
808f4dfe
DM
7400 {
7401 svalue_set visited;
7402 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
7403 &visited),
7404 path_var (n, depth + 1));
7405 }
757bf1df
DM
7406 }
7407}
7408
808f4dfe 7409/* Ensure that region_model::operator== works as expected. */
757bf1df
DM
7410
7411static void
808f4dfe 7412test_equality_1 ()
757bf1df 7413{
808f4dfe
DM
7414 tree int_42 = build_int_cst (integer_type_node, 42);
7415 tree int_17 = build_int_cst (integer_type_node, 17);
757bf1df 7416
808f4dfe
DM
7417/* Verify that "empty" region_model instances are equal to each other. */
7418 region_model_manager mgr;
7419 region_model model0 (&mgr);
7420 region_model model1 (&mgr);
757bf1df 7421 ASSERT_EQ (model0, model1);
808f4dfe
DM
7422
7423 /* Verify that setting state in model1 makes the models non-equal. */
7424 tree x = build_global_decl ("x", integer_type_node);
7425 model0.set_value (x, int_42, NULL);
7426 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
7427 ASSERT_NE (model0, model1);
7428
7429 /* Verify the copy-ctor. */
7430 region_model model2 (model0);
7431 ASSERT_EQ (model0, model2);
7432 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
7433 ASSERT_NE (model1, model2);
7434
7435 /* Verify that models obtained from copy-ctor are independently editable
7436 w/o affecting the original model. */
7437 model2.set_value (x, int_17, NULL);
7438 ASSERT_NE (model0, model2);
7439 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
7440 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
757bf1df
DM
7441}
7442
7443/* Verify that region models for
7444 x = 42; y = 113;
7445 and
7446 y = 113; x = 42;
808f4dfe 7447 are equal. */
757bf1df
DM
7448
7449static void
7450test_canonicalization_2 ()
7451{
7452 tree int_42 = build_int_cst (integer_type_node, 42);
7453 tree int_113 = build_int_cst (integer_type_node, 113);
7454 tree x = build_global_decl ("x", integer_type_node);
7455 tree y = build_global_decl ("y", integer_type_node);
7456
808f4dfe
DM
7457 region_model_manager mgr;
7458 region_model model0 (&mgr);
757bf1df
DM
7459 model0.set_value (model0.get_lvalue (x, NULL),
7460 model0.get_rvalue (int_42, NULL),
7461 NULL);
7462 model0.set_value (model0.get_lvalue (y, NULL),
7463 model0.get_rvalue (int_113, NULL),
7464 NULL);
7465
808f4dfe 7466 region_model model1 (&mgr);
757bf1df
DM
7467 model1.set_value (model1.get_lvalue (y, NULL),
7468 model1.get_rvalue (int_113, NULL),
7469 NULL);
7470 model1.set_value (model1.get_lvalue (x, NULL),
7471 model1.get_rvalue (int_42, NULL),
7472 NULL);
7473
757bf1df
DM
7474 ASSERT_EQ (model0, model1);
7475}
7476
7477/* Verify that constraints for
7478 x > 3 && y > 42
7479 and
7480 y > 42 && x > 3
7481 are equal after canonicalization. */
7482
7483static void
7484test_canonicalization_3 ()
7485{
7486 tree int_3 = build_int_cst (integer_type_node, 3);
7487 tree int_42 = build_int_cst (integer_type_node, 42);
7488 tree x = build_global_decl ("x", integer_type_node);
7489 tree y = build_global_decl ("y", integer_type_node);
7490
808f4dfe
DM
7491 region_model_manager mgr;
7492 region_model model0 (&mgr);
757bf1df
DM
7493 model0.add_constraint (x, GT_EXPR, int_3, NULL);
7494 model0.add_constraint (y, GT_EXPR, int_42, NULL);
7495
808f4dfe 7496 region_model model1 (&mgr);
757bf1df
DM
7497 model1.add_constraint (y, GT_EXPR, int_42, NULL);
7498 model1.add_constraint (x, GT_EXPR, int_3, NULL);
7499
808f4dfe
DM
7500 model0.canonicalize ();
7501 model1.canonicalize ();
757bf1df
DM
7502 ASSERT_EQ (model0, model1);
7503}
7504
8c08c983
DM
7505/* Verify that we can canonicalize a model containing NaN and other real
7506 constants. */
7507
7508static void
7509test_canonicalization_4 ()
7510{
7511 auto_vec<tree> csts;
7512 append_interesting_constants (&csts);
7513
808f4dfe
DM
7514 region_model_manager mgr;
7515 region_model model (&mgr);
8c08c983 7516
3f207ab3 7517 for (tree cst : csts)
8c08c983
DM
7518 model.get_rvalue (cst, NULL);
7519
808f4dfe 7520 model.canonicalize ();
8c08c983
DM
7521}
7522
757bf1df
DM
7523/* Assert that if we have two region_model instances
7524 with values VAL_A and VAL_B for EXPR that they are
7525 mergable. Write the merged model to *OUT_MERGED_MODEL,
7526 and the merged svalue ptr to *OUT_MERGED_SVALUE.
7527 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
7528 for that region_model. */
7529
7530static void
7531assert_region_models_merge (tree expr, tree val_a, tree val_b,
808f4dfe
DM
7532 region_model *out_merged_model,
7533 const svalue **out_merged_svalue)
757bf1df 7534{
808f4dfe 7535 region_model_manager *mgr = out_merged_model->get_manager ();
bb8e93eb
DM
7536 program_point point (program_point::origin (*mgr));
7537 test_region_model_context ctxt;
808f4dfe
DM
7538 region_model model0 (mgr);
7539 region_model model1 (mgr);
757bf1df
DM
7540 if (val_a)
7541 model0.set_value (model0.get_lvalue (expr, &ctxt),
7542 model0.get_rvalue (val_a, &ctxt),
7543 &ctxt);
7544 if (val_b)
7545 model1.set_value (model1.get_lvalue (expr, &ctxt),
7546 model1.get_rvalue (val_b, &ctxt),
7547 &ctxt);
7548
7549 /* They should be mergeable. */
808f4dfe
DM
7550 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
7551 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
757bf1df
DM
7552}
7553
7554/* Verify that we can merge region_model instances. */
7555
7556static void
7557test_state_merging ()
7558{
7559 tree int_42 = build_int_cst (integer_type_node, 42);
7560 tree int_113 = build_int_cst (integer_type_node, 113);
7561 tree x = build_global_decl ("x", integer_type_node);
7562 tree y = build_global_decl ("y", integer_type_node);
7563 tree z = build_global_decl ("z", integer_type_node);
7564 tree p = build_global_decl ("p", ptr_type_node);
7565
7566 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
7567 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
7568
7569 auto_vec <tree> param_types;
7570 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
7571 allocate_struct_function (test_fndecl, true);
7572
7573 /* Param "a". */
7574 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7575 get_identifier ("a"),
7576 integer_type_node);
4cebae09 7577 DECL_CONTEXT (a) = test_fndecl;
757bf1df
DM
7578 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
7579
455f58ec
DM
7580 /* Param "q", a pointer. */
7581 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7582 get_identifier ("q"),
7583 ptr_type_node);
4cebae09 7584 DECL_CONTEXT (q) = test_fndecl;
455f58ec 7585
808f4dfe 7586 region_model_manager mgr;
bb8e93eb 7587 program_point point (program_point::origin (mgr));
808f4dfe 7588
757bf1df 7589 {
808f4dfe
DM
7590 region_model model0 (&mgr);
7591 region_model model1 (&mgr);
7592 region_model merged (&mgr);
757bf1df 7593 /* Verify empty models can be merged. */
808f4dfe 7594 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7595 ASSERT_EQ (model0, merged);
7596 }
7597
7598 /* Verify that we can merge two contradictory constraints on the
7599 value for a global. */
7600 /* TODO: verify that the merged model doesn't have a value for
7601 the global */
7602 {
808f4dfe
DM
7603 region_model model0 (&mgr);
7604 region_model model1 (&mgr);
7605 region_model merged (&mgr);
757bf1df
DM
7606 test_region_model_context ctxt;
7607 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7608 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe 7609 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7610 ASSERT_NE (model0, merged);
7611 ASSERT_NE (model1, merged);
7612 }
7613
7614 /* Verify handling of a PARM_DECL. */
7615 {
7616 test_region_model_context ctxt;
808f4dfe
DM
7617 region_model model0 (&mgr);
7618 region_model model1 (&mgr);
757bf1df
DM
7619 ASSERT_EQ (model0.get_stack_depth (), 0);
7620 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7621 ASSERT_EQ (model0.get_stack_depth (), 1);
757bf1df
DM
7622 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7623
9d804f9b
DM
7624 placeholder_svalue test_sval (mgr.alloc_symbol_id (),
7625 integer_type_node, "test sval");
808f4dfe
DM
7626 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
7627 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
757bf1df
DM
7628 ASSERT_EQ (model0, model1);
7629
757bf1df 7630 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7631 region_model merged (&mgr);
7632 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7633 ASSERT_EQ (model0, merged);
808f4dfe
DM
7634 /* In particular, "a" should have the placeholder value. */
7635 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
757bf1df
DM
7636 }
7637
7638 /* Verify handling of a global. */
7639 {
7640 test_region_model_context ctxt;
808f4dfe
DM
7641 region_model model0 (&mgr);
7642 region_model model1 (&mgr);
757bf1df 7643
9d804f9b
DM
7644 placeholder_svalue test_sval (mgr.alloc_symbol_id (),
7645 integer_type_node, "test sval");
808f4dfe
DM
7646 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
7647 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
7648 ASSERT_EQ (model0, model1);
757bf1df
DM
7649
7650 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7651 region_model merged (&mgr);
7652 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7653 ASSERT_EQ (model0, merged);
808f4dfe
DM
7654 /* In particular, "x" should have the placeholder value. */
7655 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
757bf1df
DM
7656 }
7657
7658 /* Use global-handling to verify various combinations of values. */
7659
7660 /* Two equal constant values. */
7661 {
808f4dfe
DM
7662 region_model merged (&mgr);
7663 const svalue *merged_x_sval;
757bf1df
DM
7664 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
7665
7666 /* In particular, there should be a constant value for "x". */
7667 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
7668 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
7669 int_42);
7670 }
7671
7672 /* Two non-equal constant values. */
7673 {
808f4dfe
DM
7674 region_model merged (&mgr);
7675 const svalue *merged_x_sval;
757bf1df
DM
7676 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
7677
808f4dfe
DM
7678 /* In particular, there should be a "widening" value for "x". */
7679 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
757bf1df
DM
7680 }
7681
808f4dfe 7682 /* Initial and constant. */
757bf1df 7683 {
808f4dfe
DM
7684 region_model merged (&mgr);
7685 const svalue *merged_x_sval;
757bf1df
DM
7686 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
7687
7688 /* In particular, there should be an unknown value for "x". */
7689 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7690 }
7691
808f4dfe 7692 /* Constant and initial. */
757bf1df 7693 {
808f4dfe
DM
7694 region_model merged (&mgr);
7695 const svalue *merged_x_sval;
757bf1df
DM
7696 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
7697
7698 /* In particular, there should be an unknown value for "x". */
7699 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7700 }
7701
7702 /* Unknown and constant. */
7703 // TODO
7704
7705 /* Pointers: NULL and NULL. */
7706 // TODO
7707
7708 /* Pointers: NULL and non-NULL. */
7709 // TODO
7710
7711 /* Pointers: non-NULL and non-NULL: ptr to a local. */
7712 {
808f4dfe 7713 region_model model0 (&mgr);
757bf1df 7714 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
757bf1df
DM
7715 model0.set_value (model0.get_lvalue (p, NULL),
7716 model0.get_rvalue (addr_of_a, NULL), NULL);
7717
7718 region_model model1 (model0);
7719 ASSERT_EQ (model0, model1);
7720
7721 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7722 region_model merged (&mgr);
7723 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7724 ASSERT_EQ (model0, merged);
7725 }
7726
7727 /* Pointers: non-NULL and non-NULL: ptr to a global. */
7728 {
808f4dfe 7729 region_model merged (&mgr);
757bf1df 7730 /* p == &y in both input models. */
808f4dfe 7731 const svalue *merged_p_sval;
757bf1df
DM
7732 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
7733 &merged_p_sval);
7734
7735 /* We should get p == &y in the merged model. */
7736 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
808f4dfe
DM
7737 const region_svalue *merged_p_ptr
7738 = merged_p_sval->dyn_cast_region_svalue ();
7739 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
7740 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
757bf1df
DM
7741 }
7742
7743 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
7744 {
808f4dfe
DM
7745 region_model merged (&mgr);
7746 /* x == &y vs x == &z in the input models; these are actually casts
7747 of the ptrs to "int". */
7748 const svalue *merged_x_sval;
7749 // TODO:
757bf1df
DM
7750 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
7751 &merged_x_sval);
7752
7753 /* We should get x == unknown in the merged model. */
7754 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7755 }
7756
7757 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
7758 {
7759 test_region_model_context ctxt;
808f4dfe 7760 region_model model0 (&mgr);
9a2c9579 7761 tree size = build_int_cst (size_type_node, 1024);
808f4dfe 7762 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
b9365b93 7763 const region *new_reg
ce917b04 7764 = model0.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe 7765 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
757bf1df 7766 model0.set_value (model0.get_lvalue (p, &ctxt),
808f4dfe 7767 ptr_sval, &ctxt);
757bf1df
DM
7768
7769 region_model model1 (model0);
7770
7771 ASSERT_EQ (model0, model1);
7772
808f4dfe
DM
7773 region_model merged (&mgr);
7774 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7775
808f4dfe 7776 /* The merged model ought to be identical. */
757bf1df
DM
7777 ASSERT_EQ (model0, merged);
7778 }
7779
808f4dfe
DM
7780 /* Two regions sharing the same placeholder svalue should continue sharing
7781 it after self-merger. */
757bf1df
DM
7782 {
7783 test_region_model_context ctxt;
808f4dfe 7784 region_model model0 (&mgr);
9d804f9b
DM
7785 placeholder_svalue placeholder_sval (mgr.alloc_symbol_id (),
7786 integer_type_node, "test");
808f4dfe
DM
7787 model0.set_value (model0.get_lvalue (x, &ctxt),
7788 &placeholder_sval, &ctxt);
7789 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
757bf1df
DM
7790 region_model model1 (model0);
7791
7792 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7793 region_model merged (&mgr);
7794 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7795 ASSERT_EQ (model0, merged);
7796
7797 /* In particular, we should have x == y. */
7798 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
7799 tristate (tristate::TS_TRUE));
7800 }
7801
757bf1df 7802 {
808f4dfe
DM
7803 region_model model0 (&mgr);
7804 region_model model1 (&mgr);
757bf1df
DM
7805 test_region_model_context ctxt;
7806 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7807 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
808f4dfe
DM
7808 region_model merged (&mgr);
7809 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7810 }
7811
7812 {
808f4dfe
DM
7813 region_model model0 (&mgr);
7814 region_model model1 (&mgr);
757bf1df
DM
7815 test_region_model_context ctxt;
7816 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7817 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
7818 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe
DM
7819 region_model merged (&mgr);
7820 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7821 }
757bf1df
DM
7822
7823 // TODO: what can't we merge? need at least one such test
7824
7825 /* TODO: various things
7826 - heap regions
7827 - value merging:
7828 - every combination, but in particular
808f4dfe 7829 - pairs of regions
757bf1df
DM
7830 */
7831
7832 /* Views. */
7833 {
7834 test_region_model_context ctxt;
808f4dfe 7835 region_model model0 (&mgr);
757bf1df 7836
808f4dfe
DM
7837 const region *x_reg = model0.get_lvalue (x, &ctxt);
7838 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
757bf1df
DM
7839 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
7840
7841 region_model model1 (model0);
7842 ASSERT_EQ (model1, model0);
7843
7844 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7845 region_model merged (&mgr);
7846 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7847 }
455f58ec
DM
7848
7849 /* Verify that we can merge a model in which a local in an older stack
7850 frame points to a local in a more recent stack frame. */
7851 {
808f4dfe 7852 region_model model0 (&mgr);
455f58ec 7853 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
808f4dfe 7854 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
455f58ec
DM
7855
7856 /* Push a second frame. */
808f4dfe 7857 const region *reg_2nd_frame
455f58ec
DM
7858 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7859
7860 /* Have a pointer in the older frame point to a local in the
7861 more recent frame. */
808f4dfe
DM
7862 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
7863 model0.set_value (q_in_first_frame, sval_ptr, NULL);
455f58ec
DM
7864
7865 /* Verify that it's pointing at the newer frame. */
5932dd35 7866 const region *reg_pointee = sval_ptr->maybe_get_region ();
808f4dfe 7867 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
455f58ec 7868
808f4dfe 7869 model0.canonicalize ();
455f58ec
DM
7870
7871 region_model model1 (model0);
7872 ASSERT_EQ (model0, model1);
7873
7874 /* They should be mergeable, and the result should be the same
7875 (after canonicalization, at least). */
808f4dfe
DM
7876 region_model merged (&mgr);
7877 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7878 merged.canonicalize ();
455f58ec
DM
7879 ASSERT_EQ (model0, merged);
7880 }
7881
7882 /* Verify that we can merge a model in which a local points to a global. */
7883 {
808f4dfe 7884 region_model model0 (&mgr);
455f58ec
DM
7885 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7886 model0.set_value (model0.get_lvalue (q, NULL),
7887 model0.get_rvalue (addr_of_y, NULL), NULL);
7888
455f58ec
DM
7889 region_model model1 (model0);
7890 ASSERT_EQ (model0, model1);
7891
7892 /* They should be mergeable, and the result should be the same
7893 (after canonicalization, at least). */
808f4dfe
DM
7894 region_model merged (&mgr);
7895 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
455f58ec
DM
7896 ASSERT_EQ (model0, merged);
7897 }
757bf1df
DM
7898}
7899
7900/* Verify that constraints are correctly merged when merging region_model
7901 instances. */
7902
7903static void
7904test_constraint_merging ()
7905{
7906 tree int_0 = build_int_cst (integer_type_node, 0);
7907 tree int_5 = build_int_cst (integer_type_node, 5);
7908 tree x = build_global_decl ("x", integer_type_node);
7909 tree y = build_global_decl ("y", integer_type_node);
7910 tree z = build_global_decl ("z", integer_type_node);
7911 tree n = build_global_decl ("n", integer_type_node);
7912
808f4dfe 7913 region_model_manager mgr;
757bf1df
DM
7914 test_region_model_context ctxt;
7915
7916 /* model0: 0 <= (x == y) < n. */
808f4dfe 7917 region_model model0 (&mgr);
757bf1df
DM
7918 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
7919 model0.add_constraint (x, GE_EXPR, int_0, NULL);
7920 model0.add_constraint (x, LT_EXPR, n, NULL);
7921
7922 /* model1: z != 5 && (0 <= x < n). */
808f4dfe 7923 region_model model1 (&mgr);
757bf1df
DM
7924 model1.add_constraint (z, NE_EXPR, int_5, NULL);
7925 model1.add_constraint (x, GE_EXPR, int_0, NULL);
7926 model1.add_constraint (x, LT_EXPR, n, NULL);
7927
7928 /* They should be mergeable; the merged constraints should
7929 be: (0 <= x < n). */
bb8e93eb 7930 program_point point (program_point::origin (mgr));
808f4dfe
DM
7931 region_model merged (&mgr);
7932 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7933
7934 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
7935 tristate (tristate::TS_TRUE));
7936 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
7937 tristate (tristate::TS_TRUE));
7938
7939 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
7940 tristate (tristate::TS_UNKNOWN));
7941 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
7942 tristate (tristate::TS_UNKNOWN));
7943}
7944
808f4dfe
DM
7945/* Verify that widening_svalue::eval_condition_without_cm works as
7946 expected. */
7947
7948static void
7949test_widening_constraints ()
7950{
bb8e93eb 7951 region_model_manager mgr;
e6fe02d8 7952 function_point point (program_point::origin (mgr).get_function_point ());
808f4dfe
DM
7953 tree int_0 = build_int_cst (integer_type_node, 0);
7954 tree int_m1 = build_int_cst (integer_type_node, -1);
7955 tree int_1 = build_int_cst (integer_type_node, 1);
7956 tree int_256 = build_int_cst (integer_type_node, 256);
808f4dfe
DM
7957 test_region_model_context ctxt;
7958 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
7959 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
7960 const svalue *w_zero_then_one_sval
7961 = mgr.get_or_create_widening_svalue (integer_type_node, point,
7962 int_0_sval, int_1_sval);
7963 const widening_svalue *w_zero_then_one
7964 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
7965 ASSERT_EQ (w_zero_then_one->get_direction (),
7966 widening_svalue::DIR_ASCENDING);
7967 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
7968 tristate::TS_FALSE);
7969 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
7970 tristate::TS_FALSE);
7971 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
7972 tristate::TS_UNKNOWN);
7973 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
7974 tristate::TS_UNKNOWN);
7975
7976 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
7977 tristate::TS_FALSE);
7978 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
7979 tristate::TS_UNKNOWN);
7980 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
7981 tristate::TS_UNKNOWN);
7982 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
7983 tristate::TS_UNKNOWN);
7984
7985 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
7986 tristate::TS_TRUE);
7987 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
7988 tristate::TS_UNKNOWN);
7989 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
7990 tristate::TS_UNKNOWN);
7991 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
7992 tristate::TS_UNKNOWN);
7993
7994 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
7995 tristate::TS_TRUE);
7996 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
7997 tristate::TS_TRUE);
7998 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
7999 tristate::TS_UNKNOWN);
8000 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
8001 tristate::TS_UNKNOWN);
8002
8003 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
8004 tristate::TS_FALSE);
8005 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
8006 tristate::TS_UNKNOWN);
8007 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
8008 tristate::TS_UNKNOWN);
8009 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
8010 tristate::TS_UNKNOWN);
8011
8012 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
8013 tristate::TS_TRUE);
8014 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
8015 tristate::TS_UNKNOWN);
8016 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
8017 tristate::TS_UNKNOWN);
8018 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
8019 tristate::TS_UNKNOWN);
8020}
8021
8022/* Verify merging constraints for states simulating successive iterations
8023 of a loop.
8024 Simulate:
8025 for (i = 0; i < 256; i++)
8026 [...body...]
8027 i.e. this gimple:.
8028 i_15 = 0;
8029 goto <bb 4>;
8030
8031 <bb 4> :
8032 i_11 = PHI <i_15(2), i_23(3)>
8033 if (i_11 <= 255)
8034 goto <bb 3>;
8035 else
8036 goto [AFTER LOOP]
8037
8038 <bb 3> :
8039 [LOOP BODY]
8040 i_23 = i_11 + 1;
8041
8042 and thus these ops (and resultant states):
8043 i_11 = PHI()
8044 {i_11: 0}
8045 add_constraint (i_11 <= 255) [for the true edge]
8046 {i_11: 0} [constraint was a no-op]
8047 i_23 = i_11 + 1;
8048 {i_22: 1}
8049 i_11 = PHI()
8050 {i_11: WIDENED (at phi, 0, 1)}
8051 add_constraint (i_11 <= 255) [for the true edge]
8052 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
8053 i_23 = i_11 + 1;
8054 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
8055 i_11 = PHI(); merge with state at phi above
8056 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
8057 [changing meaning of "WIDENED" here]
8058 if (i_11 <= 255)
8059 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
8060 F: {i_11: 256}
8061 */
8062
8063static void
8064test_iteration_1 ()
8065{
bb8e93eb
DM
8066 region_model_manager mgr;
8067 program_point point (program_point::origin (mgr));
808f4dfe
DM
8068
8069 tree int_0 = build_int_cst (integer_type_node, 0);
8070 tree int_1 = build_int_cst (integer_type_node, 1);
8071 tree int_256 = build_int_cst (integer_type_node, 256);
8072 tree int_257 = build_int_cst (integer_type_node, 257);
8073 tree i = build_global_decl ("i", integer_type_node);
8074
808f4dfe
DM
8075 test_region_model_context ctxt;
8076
8077 /* model0: i: 0. */
8078 region_model model0 (&mgr);
8079 model0.set_value (i, int_0, &ctxt);
8080
8081 /* model1: i: 1. */
8082 region_model model1 (&mgr);
8083 model1.set_value (i, int_1, &ctxt);
8084
8085 /* Should merge "i" to a widened value. */
8086 region_model model2 (&mgr);
8087 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
8088 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
8089 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
8090 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
8091 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
8092
8093 /* Add constraint: i < 256 */
8094 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
8095 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
8096 tristate (tristate::TS_TRUE));
8097 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
8098 tristate (tristate::TS_TRUE));
8099
8100 /* Try merging with the initial state. */
8101 region_model model3 (&mgr);
8102 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
8103 /* Merging the merged value with the initial value should be idempotent,
8104 so that the analysis converges. */
8105 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
8106 /* Merger of 0 and a widening value with constraint < CST
8107 should retain the constraint, even though it was implicit
8108 for the 0 case. */
8109 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
8110 tristate (tristate::TS_TRUE));
8111 /* ...and we should have equality: the analysis should have converged. */
8112 ASSERT_EQ (model3, model2);
8113
8114 /* "i_23 = i_11 + 1;" */
8115 region_model model4 (model3);
8116 ASSERT_EQ (model4, model2);
8117 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
8118 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
8119 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
8120
8121 /* Try merging with the "i: 1" state. */
8122 region_model model5 (&mgr);
8123 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
8124 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
8125 ASSERT_EQ (model5, model4);
8126
8127 /* "i_11 = PHI();" merge with state at phi above.
8128 For i, we should have a merger of WIDENING with WIDENING + 1,
8129 and this should be WIDENING again. */
8130 region_model model6 (&mgr);
8131 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
8132 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
8133 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
8134
8135 ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257);
8136}
8137
6969ac30
DM
8138/* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
8139 all cast pointers to that region are also known to be non-NULL. */
8140
8141static void
8142test_malloc_constraints ()
8143{
808f4dfe
DM
8144 region_model_manager mgr;
8145 region_model model (&mgr);
6969ac30
DM
8146 tree p = build_global_decl ("p", ptr_type_node);
8147 tree char_star = build_pointer_type (char_type_node);
8148 tree q = build_global_decl ("q", char_star);
8149 tree null_ptr = build_int_cst (ptr_type_node, 0);
8150
808f4dfe 8151 const svalue *size_in_bytes
9a2c9579 8152 = mgr.get_or_create_unknown_svalue (size_type_node);
ce917b04
DM
8153 const region *reg
8154 = model.get_or_create_region_for_heap_alloc (size_in_bytes, NULL);
808f4dfe
DM
8155 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
8156 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
6969ac30
DM
8157 model.set_value (q, p, NULL);
8158
6969ac30
DM
8159 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
8160 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
8161 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
8162 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
8163
8164 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
8165
6969ac30
DM
8166 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
8167 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
8168 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
8169 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
8170}
8171
808f4dfe
DM
8172/* Smoketest of getting and setting the value of a variable. */
8173
8174static void
8175test_var ()
8176{
8177 /* "int i;" */
8178 tree i = build_global_decl ("i", integer_type_node);
8179
8180 tree int_17 = build_int_cst (integer_type_node, 17);
8181 tree int_m3 = build_int_cst (integer_type_node, -3);
8182
8183 region_model_manager mgr;
8184 region_model model (&mgr);
8185
8186 const region *i_reg = model.get_lvalue (i, NULL);
8187 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
8188
8189 /* Reading "i" should give a symbolic "initial value". */
8190 const svalue *sval_init = model.get_rvalue (i, NULL);
8191 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
8192 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
8193 /* ..and doing it again should give the same "initial value". */
8194 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
8195
8196 /* "i = 17;". */
8197 model.set_value (i, int_17, NULL);
8198 ASSERT_EQ (model.get_rvalue (i, NULL),
8199 model.get_rvalue (int_17, NULL));
8200
8201 /* "i = -3;". */
8202 model.set_value (i, int_m3, NULL);
8203 ASSERT_EQ (model.get_rvalue (i, NULL),
8204 model.get_rvalue (int_m3, NULL));
8205
8206 /* Verify get_offset for "i". */
8207 {
7a6564c9 8208 region_offset offset = i_reg->get_offset (&mgr);
808f4dfe
DM
8209 ASSERT_EQ (offset.get_base_region (), i_reg);
8210 ASSERT_EQ (offset.get_bit_offset (), 0);
8211 }
8212}
8213
8214static void
8215test_array_2 ()
8216{
8217 /* "int arr[10];" */
8218 tree tlen = size_int (10);
8219 tree arr_type
8220 = build_array_type (integer_type_node, build_index_type (tlen));
8221 tree arr = build_global_decl ("arr", arr_type);
8222
8223 /* "int i;" */
8224 tree i = build_global_decl ("i", integer_type_node);
8225
8226 tree int_0 = build_int_cst (integer_type_node, 0);
8227 tree int_1 = build_int_cst (integer_type_node, 1);
8228
8229 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
8230 arr, int_0, NULL_TREE, NULL_TREE);
8231 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
8232 arr, int_1, NULL_TREE, NULL_TREE);
8233 tree arr_i = build4 (ARRAY_REF, integer_type_node,
8234 arr, i, NULL_TREE, NULL_TREE);
8235
8236 tree int_17 = build_int_cst (integer_type_node, 17);
8237 tree int_42 = build_int_cst (integer_type_node, 42);
8238 tree int_m3 = build_int_cst (integer_type_node, -3);
8239
8240 region_model_manager mgr;
8241 region_model model (&mgr);
8242 /* "arr[0] = 17;". */
8243 model.set_value (arr_0, int_17, NULL);
8244 /* "arr[1] = -3;". */
8245 model.set_value (arr_1, int_m3, NULL);
8246
8247 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
8248 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
8249
8250 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
8251 model.set_value (arr_1, int_42, NULL);
8252 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
8253
8254 /* Verify get_offset for "arr[0]". */
8255 {
8256 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
7a6564c9 8257 region_offset offset = arr_0_reg->get_offset (&mgr);
808f4dfe
DM
8258 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
8259 ASSERT_EQ (offset.get_bit_offset (), 0);
8260 }
8261
8262 /* Verify get_offset for "arr[1]". */
8263 {
8264 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
7a6564c9 8265 region_offset offset = arr_1_reg->get_offset (&mgr);
808f4dfe
DM
8266 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
8267 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
8268 }
8269
7a6564c9
TL
8270 /* Verify get_offset for "arr[i]". */
8271 {
8272 const region *arr_i_reg = model.get_lvalue (arr_i, NULL);
8273 region_offset offset = arr_i_reg->get_offset (&mgr);
8274 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
8275 ASSERT_EQ (offset.get_symbolic_byte_offset ()->get_kind (), SK_BINOP);
8276 }
8277
808f4dfe
DM
8278 /* "arr[i] = i;" - this should remove the earlier bindings. */
8279 model.set_value (arr_i, i, NULL);
8280 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
8281 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
8282
8283 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
8284 model.set_value (arr_0, int_17, NULL);
8285 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
8286 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
8287}
8288
8289/* Smoketest of dereferencing a pointer via MEM_REF. */
8290
8291static void
8292test_mem_ref ()
8293{
8294 /*
8295 x = 17;
8296 p = &x;
8297 *p;
8298 */
8299 tree x = build_global_decl ("x", integer_type_node);
8300 tree int_star = build_pointer_type (integer_type_node);
8301 tree p = build_global_decl ("p", int_star);
8302
8303 tree int_17 = build_int_cst (integer_type_node, 17);
8304 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
8305 tree offset_0 = build_int_cst (integer_type_node, 0);
8306 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
8307
8308 region_model_manager mgr;
8309 region_model model (&mgr);
8310
8311 /* "x = 17;". */
8312 model.set_value (x, int_17, NULL);
8313
8314 /* "p = &x;". */
8315 model.set_value (p, addr_of_x, NULL);
8316
8317 const svalue *sval = model.get_rvalue (star_p, NULL);
8318 ASSERT_EQ (sval->maybe_get_constant (), int_17);
8319}
8320
8321/* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
8322 Analogous to this code:
8323 void test_6 (int a[10])
8324 {
8325 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
8326 a[3] = 42;
8327 __analyzer_eval (a[3] == 42); [should be TRUE]
8328 }
8329 from data-model-1.c, which looks like this at the gimple level:
8330 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
8331 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
8332 int _2 = *_1; # MEM_REF
8333 _Bool _3 = _2 == 42;
8334 int _4 = (int) _3;
8335 __analyzer_eval (_4);
8336
8337 # a[3] = 42;
8338 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
8339 *_5 = 42; # MEM_REF
8340
8341 # __analyzer_eval (a[3] == 42); [should be TRUE]
8342 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
8343 int _7 = *_6; # MEM_REF
8344 _Bool _8 = _7 == 42;
8345 int _9 = (int) _8;
8346 __analyzer_eval (_9); */
8347
8348static void
8349test_POINTER_PLUS_EXPR_then_MEM_REF ()
8350{
8351 tree int_star = build_pointer_type (integer_type_node);
8352 tree a = build_global_decl ("a", int_star);
8353 tree offset_12 = build_int_cst (size_type_node, 12);
8354 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
8355 tree offset_0 = build_int_cst (integer_type_node, 0);
8356 tree mem_ref = build2 (MEM_REF, integer_type_node,
8357 pointer_plus_expr, offset_0);
8358 region_model_manager mgr;
8359 region_model m (&mgr);
8360
8361 tree int_42 = build_int_cst (integer_type_node, 42);
8362 m.set_value (mem_ref, int_42, NULL);
8363 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
8364}
8365
8366/* Verify that malloc works. */
8367
8368static void
8369test_malloc ()
8370{
8371 tree int_star = build_pointer_type (integer_type_node);
8372 tree p = build_global_decl ("p", int_star);
8373 tree n = build_global_decl ("n", integer_type_node);
8374 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
8375 n, build_int_cst (size_type_node, 4));
8376
8377 region_model_manager mgr;
8378 test_region_model_context ctxt;
8379 region_model model (&mgr);
8380
8381 /* "p = malloc (n * 4);". */
8382 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
ce917b04
DM
8383 const region *reg
8384 = model.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe
DM
8385 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
8386 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 8387 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
8388}
8389
8390/* Verify that alloca works. */
8391
8392static void
8393test_alloca ()
8394{
8395 auto_vec <tree> param_types;
8396 tree fndecl = make_fndecl (integer_type_node,
8397 "test_fn",
8398 param_types);
8399 allocate_struct_function (fndecl, true);
8400
8401
8402 tree int_star = build_pointer_type (integer_type_node);
8403 tree p = build_global_decl ("p", int_star);
8404 tree n = build_global_decl ("n", integer_type_node);
8405 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
8406 n, build_int_cst (size_type_node, 4));
8407
8408 region_model_manager mgr;
8409 test_region_model_context ctxt;
8410 region_model model (&mgr);
8411
8412 /* Push stack frame. */
8413 const region *frame_reg
8414 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl),
8415 NULL, &ctxt);
8416 /* "p = alloca (n * 4);". */
8417 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
b9365b93 8418 const region *reg = model.create_region_for_alloca (size_sval, &ctxt);
808f4dfe
DM
8419 ASSERT_EQ (reg->get_parent_region (), frame_reg);
8420 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
8421 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 8422 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
8423
8424 /* Verify that the pointers to the alloca region are replaced by
8425 poisoned values when the frame is popped. */
8426 model.pop_frame (NULL, NULL, &ctxt);
33255ad3 8427 ASSERT_EQ (model.get_rvalue (p, NULL)->get_kind (), SK_POISONED);
808f4dfe
DM
8428}
8429
71fc4655
DM
8430/* Verify that svalue::involves_p works. */
8431
8432static void
8433test_involves_p ()
8434{
8435 region_model_manager mgr;
8436 tree int_star = build_pointer_type (integer_type_node);
8437 tree p = build_global_decl ("p", int_star);
8438 tree q = build_global_decl ("q", int_star);
8439
8440 test_region_model_context ctxt;
8441 region_model model (&mgr);
8442 const svalue *p_init = model.get_rvalue (p, &ctxt);
8443 const svalue *q_init = model.get_rvalue (q, &ctxt);
8444
8445 ASSERT_TRUE (p_init->involves_p (p_init));
8446 ASSERT_FALSE (p_init->involves_p (q_init));
8447
8448 const region *star_p_reg = mgr.get_symbolic_region (p_init);
8449 const region *star_q_reg = mgr.get_symbolic_region (q_init);
8450
8451 const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
8452 const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
8453
8454 ASSERT_TRUE (init_star_p->involves_p (p_init));
8455 ASSERT_FALSE (p_init->involves_p (init_star_p));
8456 ASSERT_FALSE (init_star_p->involves_p (q_init));
8457 ASSERT_TRUE (init_star_q->involves_p (q_init));
8458 ASSERT_FALSE (init_star_q->involves_p (p_init));
8459}
8460
757bf1df
DM
8461/* Run all of the selftests within this file. */
8462
8463void
8464analyzer_region_model_cc_tests ()
8465{
8c08c983 8466 test_tree_cmp_on_constants ();
757bf1df 8467 test_dump ();
808f4dfe
DM
8468 test_struct ();
8469 test_array_1 ();
90f7c300 8470 test_get_representative_tree ();
757bf1df 8471 test_unique_constants ();
808f4dfe
DM
8472 test_unique_unknowns ();
8473 test_initial_svalue_folding ();
8474 test_unaryop_svalue_folding ();
8475 test_binop_svalue_folding ();
8476 test_sub_svalue_folding ();
f09b9955 8477 test_bits_within_svalue_folding ();
808f4dfe 8478 test_descendent_of_p ();
391512ad 8479 test_bit_range_regions ();
757bf1df 8480 test_assignment ();
a96f1c38 8481 test_compound_assignment ();
757bf1df
DM
8482 test_stack_frames ();
8483 test_get_representative_path_var ();
808f4dfe 8484 test_equality_1 ();
757bf1df
DM
8485 test_canonicalization_2 ();
8486 test_canonicalization_3 ();
8c08c983 8487 test_canonicalization_4 ();
757bf1df
DM
8488 test_state_merging ();
8489 test_constraint_merging ();
808f4dfe
DM
8490 test_widening_constraints ();
8491 test_iteration_1 ();
6969ac30 8492 test_malloc_constraints ();
808f4dfe
DM
8493 test_var ();
8494 test_array_2 ();
8495 test_mem_ref ();
8496 test_POINTER_PLUS_EXPR_then_MEM_REF ();
8497 test_malloc ();
8498 test_alloca ();
71fc4655 8499 test_involves_p ();
757bf1df
DM
8500}
8501
8502} // namespace selftest
8503
8504#endif /* CHECKING_P */
8505
75038aa6
DM
8506} // namespace ana
8507
757bf1df 8508#endif /* #if ENABLE_ANALYZER */