]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/analyzer/region-model.cc
analyzer: Bail out on function pointer for -Wanalyzer-allocation-size
[thirdparty/gcc.git] / gcc / analyzer / region-model.cc
CommitLineData
757bf1df 1/* Classes for modeling the state of memory.
a945c346 2 Copyright (C) 2019-2024 Free Software Foundation, Inc.
757bf1df
DM
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
c81b60b8 22#define INCLUDE_MEMORY
4e1e8752 23#define INCLUDE_ALGORITHM
757bf1df
DM
24#include "system.h"
25#include "coretypes.h"
6341f14e 26#include "make-unique.h"
757bf1df
DM
27#include "tree.h"
28#include "function.h"
29#include "basic-block.h"
30#include "gimple.h"
31#include "gimple-iterator.h"
7892ff37 32#include "diagnostic-core.h"
757bf1df
DM
33#include "graphviz.h"
34#include "options.h"
35#include "cgraph.h"
36#include "tree-dfa.h"
37#include "stringpool.h"
38#include "convert.h"
39#include "target.h"
40#include "fold-const.h"
41#include "tree-pretty-print.h"
42#include "diagnostic-color.h"
ef7827b0 43#include "bitmap.h"
757bf1df 44#include "selftest.h"
757bf1df
DM
45#include "analyzer/analyzer.h"
46#include "analyzer/analyzer-logging.h"
47#include "ordered-hash-map.h"
48#include "options.h"
49#include "cgraph.h"
50#include "cfg.h"
757bf1df
DM
51#include "analyzer/supergraph.h"
52#include "sbitmap.h"
808f4dfe
DM
53#include "analyzer/call-string.h"
54#include "analyzer/program-point.h"
55#include "analyzer/store.h"
757bf1df
DM
56#include "analyzer/region-model.h"
57#include "analyzer/constraint-manager.h"
58#include "diagnostic-event-id.h"
59#include "analyzer/sm.h"
60#include "diagnostic-event-id.h"
61#include "analyzer/sm.h"
62#include "analyzer/pending-diagnostic.h"
808f4dfe 63#include "analyzer/region-model-reachability.h"
757bf1df 64#include "analyzer/analyzer-selftests.h"
f573d351 65#include "analyzer/program-state.h"
bfca9505 66#include "analyzer/call-summary.h"
884d9141 67#include "stor-layout.h"
c7e276b8 68#include "attribs.h"
9a2c9579 69#include "tree-object-size.h"
1e2fe671
DM
70#include "gimple-ssa.h"
71#include "tree-phinodes.h"
72#include "tree-ssa-operands.h"
73#include "ssa-iterators.h"
5fbcbcaf 74#include "calls.h"
e6c3bb37 75#include "is-a.h"
c81b60b8 76#include "gcc-rich-location.h"
f5758fe5
DM
77#include "analyzer/checker-event.h"
78#include "analyzer/checker-path.h"
b03a10b0 79#include "analyzer/feasible-graph.h"
37e1634e 80#include "analyzer/record-layout.h"
12b67d1e 81#include "diagnostic-format-sarif.h"
757bf1df
DM
82
83#if ENABLE_ANALYZER
84
75038aa6
DM
85namespace ana {
86
597b9ec6
EF
87auto_vec<pop_frame_callback> region_model::pop_frame_callbacks;
88
757bf1df
DM
89/* Dump T to PP in language-independent form, for debugging/logging/dumping
90 purposes. */
91
757bf1df 92void
808f4dfe 93dump_tree (pretty_printer *pp, tree t)
757bf1df 94{
808f4dfe 95 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
757bf1df
DM
96}
97
808f4dfe
DM
98/* Dump T to PP in language-independent form in quotes, for
99 debugging/logging/dumping purposes. */
757bf1df
DM
100
101void
808f4dfe 102dump_quoted_tree (pretty_printer *pp, tree t)
757bf1df 103{
808f4dfe
DM
104 pp_begin_quote (pp, pp_show_color (pp));
105 dump_tree (pp, t);
106 pp_end_quote (pp, pp_show_color (pp));
757bf1df
DM
107}
108
808f4dfe
DM
109/* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
110 calls within other pp_printf calls.
757bf1df 111
808f4dfe
DM
112 default_tree_printer handles 'T' and some other codes by calling
113 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
114 dump_generic_node calls pp_printf in various places, leading to
115 garbled output.
757bf1df 116
808f4dfe
DM
117 Ideally pp_printf could be made to be reentrant, but in the meantime
118 this function provides a workaround. */
6969ac30
DM
119
120void
808f4dfe 121print_quoted_type (pretty_printer *pp, tree t)
6969ac30 122{
1579394c
DM
123 if (!t)
124 return;
808f4dfe
DM
125 pp_begin_quote (pp, pp_show_color (pp));
126 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
127 pp_end_quote (pp, pp_show_color (pp));
6969ac30
DM
128}
129
1579394c
DM
130/* Print EXPR to PP, without quotes.
131 For use within svalue::maybe_print_for_user
132 and region::maybe_print_for_user. */
133
134void
135print_expr_for_user (pretty_printer *pp, tree expr)
136{
137 /* Workaround for C++'s lang_hooks.decl_printable_name,
138 which unhelpfully (for us) prefixes the decl with its
139 type. */
140 if (DECL_P (expr))
141 dump_generic_node (pp, expr, 0, TDF_SLIM, 0);
142 else
143 pp_printf (pp, "%E", expr);
144}
145
d726a57b
DM
146/* class region_to_value_map. */
147
148/* Assignment operator for region_to_value_map. */
149
150region_to_value_map &
151region_to_value_map::operator= (const region_to_value_map &other)
152{
153 m_hash_map.empty ();
154 for (auto iter : other.m_hash_map)
155 {
156 const region *reg = iter.first;
157 const svalue *sval = iter.second;
158 m_hash_map.put (reg, sval);
159 }
160 return *this;
161}
162
163/* Equality operator for region_to_value_map. */
164
165bool
166region_to_value_map::operator== (const region_to_value_map &other) const
167{
168 if (m_hash_map.elements () != other.m_hash_map.elements ())
169 return false;
170
171 for (auto iter : *this)
172 {
173 const region *reg = iter.first;
174 const svalue *sval = iter.second;
175 const svalue * const *other_slot = other.get (reg);
176 if (other_slot == NULL)
177 return false;
178 if (sval != *other_slot)
179 return false;
180 }
181
182 return true;
183}
184
185/* Dump this object to PP. */
186
187void
188region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
189 bool multiline) const
190{
191 auto_vec<const region *> regs;
192 for (iterator iter = begin (); iter != end (); ++iter)
193 regs.safe_push ((*iter).first);
194 regs.qsort (region::cmp_ptr_ptr);
195 if (multiline)
196 pp_newline (pp);
197 else
198 pp_string (pp, " {");
199 unsigned i;
200 const region *reg;
201 FOR_EACH_VEC_ELT (regs, i, reg)
202 {
203 if (multiline)
204 pp_string (pp, " ");
205 else if (i > 0)
206 pp_string (pp, ", ");
207 reg->dump_to_pp (pp, simple);
208 pp_string (pp, ": ");
209 const svalue *sval = *get (reg);
210 sval->dump_to_pp (pp, true);
211 if (multiline)
212 pp_newline (pp);
213 }
214 if (!multiline)
215 pp_string (pp, "}");
216}
217
218/* Dump this object to stderr. */
219
220DEBUG_FUNCTION void
221region_to_value_map::dump (bool simple) const
222{
223 pretty_printer pp;
224 pp_format_decoder (&pp) = default_tree_printer;
225 pp_show_color (&pp) = pp_show_color (global_dc->printer);
226 pp.buffer->stream = stderr;
227 dump_to_pp (&pp, simple, true);
228 pp_newline (&pp);
229 pp_flush (&pp);
230}
231
7abc7aae
DM
232/* Generate a JSON value for this region_to_value_map.
233 This is intended for debugging the analyzer rather than
234 serialization. */
235
236json::object *
237region_to_value_map::to_json () const
238{
239 json::object *map_obj = new json::object ();
240
241 auto_vec<const region *> regs;
242 for (iterator iter = begin (); iter != end (); ++iter)
243 regs.safe_push ((*iter).first);
244 regs.qsort (region::cmp_ptr_ptr);
245
246 unsigned i;
247 const region *reg;
248 FOR_EACH_VEC_ELT (regs, i, reg)
249 {
250 label_text reg_desc = reg->get_desc ();
251 const svalue *sval = *get (reg);
252 map_obj->set (reg_desc.get (), sval->to_json ());
253 }
254
255 return map_obj;
256}
d726a57b
DM
257
258/* Attempt to merge THIS with OTHER, writing the result
259 to OUT.
260
261 For now, write (region, value) mappings that are in common between THIS
ce917b04
DM
262 and OTHER to OUT, effectively taking the intersection.
263
264 Reject merger of different values. */
d726a57b
DM
265
266bool
267region_to_value_map::can_merge_with_p (const region_to_value_map &other,
268 region_to_value_map *out) const
269{
270 for (auto iter : *this)
271 {
272 const region *iter_reg = iter.first;
273 const svalue *iter_sval = iter.second;
274 const svalue * const * other_slot = other.get (iter_reg);
275 if (other_slot)
ce917b04
DM
276 {
277 if (iter_sval == *other_slot)
278 out->put (iter_reg, iter_sval);
279 else
280 return false;
281 }
d726a57b
DM
282 }
283 return true;
284}
285
33255ad3
DM
286/* Purge any state involving SVAL. */
287
288void
289region_to_value_map::purge_state_involving (const svalue *sval)
290{
291 auto_vec<const region *> to_purge;
292 for (auto iter : *this)
293 {
294 const region *iter_reg = iter.first;
295 const svalue *iter_sval = iter.second;
296 if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
297 to_purge.safe_push (iter_reg);
298 }
299 for (auto iter : to_purge)
300 m_hash_map.remove (iter);
301}
302
757bf1df
DM
303/* class region_model. */
304
808f4dfe 305/* Ctor for region_model: construct an "empty" model. */
757bf1df 306
808f4dfe 307region_model::region_model (region_model_manager *mgr)
9a2c9579
DM
308: m_mgr (mgr), m_store (), m_current_frame (NULL),
309 m_dynamic_extents ()
757bf1df 310{
808f4dfe 311 m_constraints = new constraint_manager (mgr);
757bf1df
DM
312}
313
314/* region_model's copy ctor. */
315
316region_model::region_model (const region_model &other)
808f4dfe
DM
317: m_mgr (other.m_mgr), m_store (other.m_store),
318 m_constraints (new constraint_manager (*other.m_constraints)),
9a2c9579
DM
319 m_current_frame (other.m_current_frame),
320 m_dynamic_extents (other.m_dynamic_extents)
757bf1df 321{
757bf1df
DM
322}
323
324/* region_model's dtor. */
325
326region_model::~region_model ()
327{
328 delete m_constraints;
329}
330
331/* region_model's assignment operator. */
332
333region_model &
334region_model::operator= (const region_model &other)
335{
808f4dfe
DM
336 /* m_mgr is const. */
337 gcc_assert (m_mgr == other.m_mgr);
757bf1df 338
808f4dfe 339 m_store = other.m_store;
757bf1df
DM
340
341 delete m_constraints;
808f4dfe 342 m_constraints = new constraint_manager (*other.m_constraints);
757bf1df 343
808f4dfe 344 m_current_frame = other.m_current_frame;
757bf1df 345
9a2c9579
DM
346 m_dynamic_extents = other.m_dynamic_extents;
347
757bf1df
DM
348 return *this;
349}
350
351/* Equality operator for region_model.
352
808f4dfe
DM
353 Amongst other things this directly compares the stores and the constraint
354 managers, so for this to be meaningful both this and OTHER should
757bf1df
DM
355 have been canonicalized. */
356
357bool
358region_model::operator== (const region_model &other) const
359{
808f4dfe
DM
360 /* We can only compare instances that use the same manager. */
361 gcc_assert (m_mgr == other.m_mgr);
757bf1df 362
808f4dfe 363 if (m_store != other.m_store)
757bf1df
DM
364 return false;
365
366 if (*m_constraints != *other.m_constraints)
367 return false;
368
808f4dfe
DM
369 if (m_current_frame != other.m_current_frame)
370 return false;
757bf1df 371
9a2c9579
DM
372 if (m_dynamic_extents != other.m_dynamic_extents)
373 return false;
374
757bf1df
DM
375 gcc_checking_assert (hash () == other.hash ());
376
377 return true;
378}
379
380/* Generate a hash value for this region_model. */
381
382hashval_t
808f4dfe
DM
383region_model::hash () const
384{
385 hashval_t result = m_store.hash ();
386 result ^= m_constraints->hash ();
387 return result;
757bf1df
DM
388}
389
808f4dfe
DM
390/* Dump a representation of this model to PP, showing the
391 stack, the store, and any constraints.
392 Use SIMPLE to control how svalues and regions are printed. */
757bf1df
DM
393
394void
808f4dfe
DM
395region_model::dump_to_pp (pretty_printer *pp, bool simple,
396 bool multiline) const
757bf1df 397{
808f4dfe
DM
398 /* Dump stack. */
399 pp_printf (pp, "stack depth: %i", get_stack_depth ());
400 if (multiline)
401 pp_newline (pp);
402 else
403 pp_string (pp, " {");
404 for (const frame_region *iter_frame = m_current_frame; iter_frame;
405 iter_frame = iter_frame->get_calling_frame ())
406 {
407 if (multiline)
408 pp_string (pp, " ");
409 else if (iter_frame != m_current_frame)
410 pp_string (pp, ", ");
411 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
412 iter_frame->dump_to_pp (pp, simple);
413 if (multiline)
414 pp_newline (pp);
415 }
416 if (!multiline)
417 pp_string (pp, "}");
418
419 /* Dump store. */
420 if (!multiline)
421 pp_string (pp, ", {");
422 m_store.dump_to_pp (pp, simple, multiline,
423 m_mgr->get_store_manager ());
424 if (!multiline)
425 pp_string (pp, "}");
426
427 /* Dump constraints. */
428 pp_string (pp, "constraint_manager:");
429 if (multiline)
430 pp_newline (pp);
431 else
432 pp_string (pp, " {");
433 m_constraints->dump_to_pp (pp, multiline);
434 if (!multiline)
435 pp_string (pp, "}");
9a2c9579
DM
436
437 /* Dump sizes of dynamic regions, if any are known. */
438 if (!m_dynamic_extents.is_empty ())
439 {
440 pp_string (pp, "dynamic_extents:");
441 m_dynamic_extents.dump_to_pp (pp, simple, multiline);
442 }
808f4dfe 443}
757bf1df 444
808f4dfe 445/* Dump a representation of this model to FILE. */
757bf1df 446
808f4dfe
DM
447void
448region_model::dump (FILE *fp, bool simple, bool multiline) const
449{
450 pretty_printer pp;
451 pp_format_decoder (&pp) = default_tree_printer;
452 pp_show_color (&pp) = pp_show_color (global_dc->printer);
453 pp.buffer->stream = fp;
454 dump_to_pp (&pp, simple, multiline);
455 pp_newline (&pp);
456 pp_flush (&pp);
757bf1df
DM
457}
458
808f4dfe 459/* Dump a multiline representation of this model to stderr. */
757bf1df 460
808f4dfe
DM
461DEBUG_FUNCTION void
462region_model::dump (bool simple) const
463{
464 dump (stderr, simple, true);
465}
757bf1df 466
808f4dfe 467/* Dump a multiline representation of this model to stderr. */
757bf1df 468
808f4dfe
DM
469DEBUG_FUNCTION void
470region_model::debug () const
757bf1df 471{
808f4dfe 472 dump (true);
757bf1df
DM
473}
474
7abc7aae
DM
475/* Generate a JSON value for this region_model.
476 This is intended for debugging the analyzer rather than
477 serialization. */
478
479json::object *
480region_model::to_json () const
481{
482 json::object *model_obj = new json::object ();
483 model_obj->set ("store", m_store.to_json ());
484 model_obj->set ("constraints", m_constraints->to_json ());
485 if (m_current_frame)
486 model_obj->set ("current_frame", m_current_frame->to_json ());
487 model_obj->set ("dynamic_extents", m_dynamic_extents.to_json ());
488 return model_obj;
489}
490
e61ffa20
DM
491/* Assert that this object is valid. */
492
493void
494region_model::validate () const
495{
496 m_store.validate ();
497}
498
808f4dfe
DM
499/* Canonicalize the store and constraints, to maximize the chance of
500 equality between region_model instances. */
757bf1df
DM
501
502void
808f4dfe 503region_model::canonicalize ()
757bf1df 504{
808f4dfe
DM
505 m_store.canonicalize (m_mgr->get_store_manager ());
506 m_constraints->canonicalize ();
757bf1df
DM
507}
508
509/* Return true if this region_model is in canonical form. */
510
511bool
512region_model::canonicalized_p () const
513{
514 region_model copy (*this);
808f4dfe 515 copy.canonicalize ();
757bf1df
DM
516 return *this == copy;
517}
518
808f4dfe
DM
519/* See the comment for store::loop_replay_fixup. */
520
521void
522region_model::loop_replay_fixup (const region_model *dst_state)
523{
524 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
525}
526
757bf1df
DM
527/* A subclass of pending_diagnostic for complaining about uses of
528 poisoned values. */
529
530class poisoned_value_diagnostic
531: public pending_diagnostic_subclass<poisoned_value_diagnostic>
532{
533public:
00e7d024 534 poisoned_value_diagnostic (tree expr, enum poison_kind pkind,
b03a10b0
DM
535 const region *src_region,
536 tree check_expr)
00e7d024 537 : m_expr (expr), m_pkind (pkind),
b03a10b0
DM
538 m_src_region (src_region),
539 m_check_expr (check_expr)
757bf1df
DM
540 {}
541
ff171cb1 542 const char *get_kind () const final override { return "poisoned_value_diagnostic"; }
757bf1df 543
ff171cb1 544 bool use_of_uninit_p () const final override
33255ad3
DM
545 {
546 return m_pkind == POISON_KIND_UNINIT;
547 }
548
757bf1df
DM
549 bool operator== (const poisoned_value_diagnostic &other) const
550 {
00e7d024
DM
551 return (m_expr == other.m_expr
552 && m_pkind == other.m_pkind
553 && m_src_region == other.m_src_region);
757bf1df
DM
554 }
555
ff171cb1 556 int get_controlling_option () const final override
7fd6e36e
DM
557 {
558 switch (m_pkind)
559 {
560 default:
561 gcc_unreachable ();
562 case POISON_KIND_UNINIT:
563 return OPT_Wanalyzer_use_of_uninitialized_value;
564 case POISON_KIND_FREED:
e7b26744 565 case POISON_KIND_DELETED:
7fd6e36e
DM
566 return OPT_Wanalyzer_use_after_free;
567 case POISON_KIND_POPPED_STACK:
568 return OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame;
569 }
570 }
571
8f636915
DM
572 bool terminate_path_p () const final override { return true; }
573
12b67d1e 574 bool emit (diagnostic_emission_context &ctxt) final override
757bf1df
DM
575 {
576 switch (m_pkind)
577 {
578 default:
579 gcc_unreachable ();
33255ad3
DM
580 case POISON_KIND_UNINIT:
581 {
12b67d1e
DM
582 ctxt.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
583 return ctxt.warn ("use of uninitialized value %qE",
584 m_expr);
33255ad3
DM
585 }
586 break;
757bf1df
DM
587 case POISON_KIND_FREED:
588 {
12b67d1e
DM
589 ctxt.add_cwe (416); /* "CWE-416: Use After Free". */
590 return ctxt.warn ("use after %<free%> of %qE",
591 m_expr);
757bf1df
DM
592 }
593 break;
e7b26744 594 case POISON_KIND_DELETED:
595 {
12b67d1e
DM
596 ctxt.add_cwe (416); /* "CWE-416: Use After Free". */
597 return ctxt.warn ("use after %<delete%> of %qE",
598 m_expr);
e7b26744 599 }
600 break;
757bf1df
DM
601 case POISON_KIND_POPPED_STACK:
602 {
757bf1df 603 /* TODO: which CWE? */
12b67d1e
DM
604 return ctxt.warn
605 ("dereferencing pointer %qE to within stale stack frame",
808f4dfe 606 m_expr);
757bf1df
DM
607 }
608 break;
609 }
610 }
611
ff171cb1 612 label_text describe_final_event (const evdesc::final_event &ev) final override
757bf1df
DM
613 {
614 switch (m_pkind)
615 {
616 default:
617 gcc_unreachable ();
33255ad3
DM
618 case POISON_KIND_UNINIT:
619 return ev.formatted_print ("use of uninitialized value %qE here",
620 m_expr);
757bf1df
DM
621 case POISON_KIND_FREED:
622 return ev.formatted_print ("use after %<free%> of %qE here",
623 m_expr);
e7b26744 624 case POISON_KIND_DELETED:
625 return ev.formatted_print ("use after %<delete%> of %qE here",
626 m_expr);
757bf1df
DM
627 case POISON_KIND_POPPED_STACK:
628 return ev.formatted_print
808f4dfe 629 ("dereferencing pointer %qE to within stale stack frame",
757bf1df
DM
630 m_expr);
631 }
632 }
633
ff171cb1 634 void mark_interesting_stuff (interesting_t *interest) final override
00e7d024
DM
635 {
636 if (m_src_region)
637 interest->add_region_creation (m_src_region);
638 }
639
b03a10b0
DM
640 /* Attempt to suppress false positives.
641 Reject paths where the value of the underlying region isn't poisoned.
642 This can happen due to state merging when exploring the exploded graph,
643 where the more precise analysis during feasibility analysis finds that
644 the region is in fact valid.
645 To do this we need to get the value from the fgraph. Unfortunately
646 we can't simply query the state of m_src_region (from the enode),
647 since it might be a different region in the fnode state (e.g. with
648 heap-allocated regions, the numbering could be different).
649 Hence we access m_check_expr, if available. */
650
651 bool check_valid_fpath_p (const feasible_node &fnode,
652 const gimple *emission_stmt)
653 const final override
654 {
655 if (!m_check_expr)
656 return true;
657
658 /* We've reached the enode, but not necessarily the right function_point.
659 Try to get the state at the correct stmt. */
660 region_model emission_model (fnode.get_model ().get_manager());
661 if (!fnode.get_state_at_stmt (emission_stmt, &emission_model))
662 /* Couldn't get state; accept this diagnostic. */
663 return true;
664
665 const svalue *fsval = emission_model.get_rvalue (m_check_expr, NULL);
666 /* Check to see if the expr is also poisoned in FNODE (and in the
667 same way). */
668 const poisoned_svalue * fspval = fsval->dyn_cast_poisoned_svalue ();
669 if (!fspval)
670 return false;
671 if (fspval->get_poison_kind () != m_pkind)
672 return false;
673 return true;
674 }
675
757bf1df
DM
676private:
677 tree m_expr;
678 enum poison_kind m_pkind;
00e7d024 679 const region *m_src_region;
b03a10b0 680 tree m_check_expr;
757bf1df
DM
681};
682
5e00ad3f
DM
683/* A subclass of pending_diagnostic for complaining about shifts
684 by negative counts. */
685
686class shift_count_negative_diagnostic
687: public pending_diagnostic_subclass<shift_count_negative_diagnostic>
688{
689public:
690 shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
691 : m_assign (assign), m_count_cst (count_cst)
692 {}
693
ff171cb1 694 const char *get_kind () const final override
5e00ad3f
DM
695 {
696 return "shift_count_negative_diagnostic";
697 }
698
699 bool operator== (const shift_count_negative_diagnostic &other) const
700 {
701 return (m_assign == other.m_assign
702 && same_tree_p (m_count_cst, other.m_count_cst));
703 }
704
ff171cb1 705 int get_controlling_option () const final override
7fd6e36e
DM
706 {
707 return OPT_Wanalyzer_shift_count_negative;
708 }
709
12b67d1e 710 bool emit (diagnostic_emission_context &ctxt) final override
5e00ad3f 711 {
12b67d1e 712 return ctxt.warn ("shift by negative count (%qE)", m_count_cst);
5e00ad3f
DM
713 }
714
ff171cb1 715 label_text describe_final_event (const evdesc::final_event &ev) final override
5e00ad3f
DM
716 {
717 return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst);
718 }
719
720private:
721 const gassign *m_assign;
722 tree m_count_cst;
723};
724
725/* A subclass of pending_diagnostic for complaining about shifts
726 by counts >= the width of the operand type. */
727
728class shift_count_overflow_diagnostic
729: public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
730{
731public:
732 shift_count_overflow_diagnostic (const gassign *assign,
733 int operand_precision,
734 tree count_cst)
735 : m_assign (assign), m_operand_precision (operand_precision),
736 m_count_cst (count_cst)
737 {}
738
ff171cb1 739 const char *get_kind () const final override
5e00ad3f
DM
740 {
741 return "shift_count_overflow_diagnostic";
742 }
743
744 bool operator== (const shift_count_overflow_diagnostic &other) const
745 {
746 return (m_assign == other.m_assign
747 && m_operand_precision == other.m_operand_precision
748 && same_tree_p (m_count_cst, other.m_count_cst));
749 }
750
ff171cb1 751 int get_controlling_option () const final override
7fd6e36e
DM
752 {
753 return OPT_Wanalyzer_shift_count_overflow;
754 }
755
12b67d1e 756 bool emit (diagnostic_emission_context &ctxt) final override
5e00ad3f 757 {
12b67d1e
DM
758 return ctxt.warn ("shift by count (%qE) >= precision of type (%qi)",
759 m_count_cst, m_operand_precision);
5e00ad3f
DM
760 }
761
ff171cb1 762 label_text describe_final_event (const evdesc::final_event &ev) final override
5e00ad3f
DM
763 {
764 return ev.formatted_print ("shift by count %qE here", m_count_cst);
765 }
766
767private:
768 const gassign *m_assign;
769 int m_operand_precision;
770 tree m_count_cst;
771};
772
808f4dfe
DM
773/* If ASSIGN is a stmt that can be modelled via
774 set_value (lhs_reg, SVALUE, CTXT)
775 for some SVALUE, get the SVALUE.
776 Otherwise return NULL. */
757bf1df 777
808f4dfe
DM
778const svalue *
779region_model::get_gassign_result (const gassign *assign,
780 region_model_context *ctxt)
757bf1df
DM
781{
782 tree lhs = gimple_assign_lhs (assign);
6319b5b2
DM
783
784 if (gimple_has_volatile_ops (assign)
785 && !gimple_clobber_p (assign))
786 {
787 conjured_purge p (this, ctxt);
788 return m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs),
789 assign,
790 get_lvalue (lhs, ctxt),
791 p);
792 }
793
757bf1df 794 tree rhs1 = gimple_assign_rhs1 (assign);
757bf1df
DM
795 enum tree_code op = gimple_assign_rhs_code (assign);
796 switch (op)
797 {
798 default:
808f4dfe 799 return NULL;
757bf1df
DM
800
801 case POINTER_PLUS_EXPR:
802 {
803 /* e.g. "_1 = a_10(D) + 12;" */
804 tree ptr = rhs1;
805 tree offset = gimple_assign_rhs2 (assign);
806
808f4dfe
DM
807 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
808 const svalue *offset_sval = get_rvalue (offset, ctxt);
809 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
810 is an integer of type sizetype". */
811 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
812
813 const svalue *sval_binop
814 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
815 ptr_sval, offset_sval);
816 return sval_binop;
757bf1df
DM
817 }
818 break;
819
820 case POINTER_DIFF_EXPR:
821 {
822 /* e.g. "_1 = p_2(D) - q_3(D);". */
808f4dfe
DM
823 tree rhs2 = gimple_assign_rhs2 (assign);
824 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
825 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 826
808f4dfe 827 // TODO: perhaps fold to zero if they're known to be equal?
757bf1df 828
808f4dfe
DM
829 const svalue *sval_binop
830 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
831 rhs1_sval, rhs2_sval);
832 return sval_binop;
757bf1df
DM
833 }
834 break;
835
808f4dfe
DM
836 /* Assignments of the form
837 set_value (lvalue (LHS), rvalue (EXPR))
838 for various EXPR.
839 We already have the lvalue for the LHS above, as "lhs_reg". */
840 case ADDR_EXPR: /* LHS = &RHS; */
841 case BIT_FIELD_REF:
842 case COMPONENT_REF: /* LHS = op0.op1; */
757bf1df 843 case MEM_REF:
757bf1df 844 case REAL_CST:
808f4dfe
DM
845 case COMPLEX_CST:
846 case VECTOR_CST:
757bf1df
DM
847 case INTEGER_CST:
848 case ARRAY_REF:
808f4dfe
DM
849 case SSA_NAME: /* LHS = VAR; */
850 case VAR_DECL: /* LHS = VAR; */
851 case PARM_DECL:/* LHS = VAR; */
852 case REALPART_EXPR:
853 case IMAGPART_EXPR:
854 return get_rvalue (rhs1, ctxt);
855
856 case ABS_EXPR:
857 case ABSU_EXPR:
858 case CONJ_EXPR:
859 case BIT_NOT_EXPR:
757bf1df
DM
860 case FIX_TRUNC_EXPR:
861 case FLOAT_EXPR:
808f4dfe 862 case NEGATE_EXPR:
757bf1df 863 case NOP_EXPR:
808f4dfe 864 case VIEW_CONVERT_EXPR:
757bf1df 865 {
808f4dfe
DM
866 /* Unary ops. */
867 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
868 const svalue *sval_unaryop
869 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
870 return sval_unaryop;
757bf1df 871 }
757bf1df
DM
872
873 case EQ_EXPR:
874 case GE_EXPR:
875 case LE_EXPR:
876 case NE_EXPR:
877 case GT_EXPR:
878 case LT_EXPR:
808f4dfe
DM
879 case UNORDERED_EXPR:
880 case ORDERED_EXPR:
757bf1df
DM
881 {
882 tree rhs2 = gimple_assign_rhs2 (assign);
883
808f4dfe
DM
884 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
885 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 886
2f5951bd 887 if (TREE_TYPE (lhs) == boolean_type_node)
808f4dfe 888 {
2f5951bd
DM
889 /* Consider constraints between svalues. */
890 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
891 if (t.is_known ())
892 return m_mgr->get_or_create_constant_svalue
893 (t.is_true () ? boolean_true_node : boolean_false_node);
808f4dfe 894 }
2f5951bd
DM
895
896 /* Otherwise, generate a symbolic binary op. */
897 const svalue *sval_binop
898 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
899 rhs1_sval, rhs2_sval);
900 return sval_binop;
757bf1df
DM
901 }
902 break;
903
904 case PLUS_EXPR:
905 case MINUS_EXPR:
906 case MULT_EXPR:
808f4dfe 907 case MULT_HIGHPART_EXPR:
757bf1df 908 case TRUNC_DIV_EXPR:
808f4dfe
DM
909 case CEIL_DIV_EXPR:
910 case FLOOR_DIV_EXPR:
911 case ROUND_DIV_EXPR:
757bf1df 912 case TRUNC_MOD_EXPR:
808f4dfe
DM
913 case CEIL_MOD_EXPR:
914 case FLOOR_MOD_EXPR:
915 case ROUND_MOD_EXPR:
916 case RDIV_EXPR:
917 case EXACT_DIV_EXPR:
757bf1df
DM
918 case LSHIFT_EXPR:
919 case RSHIFT_EXPR:
808f4dfe
DM
920 case LROTATE_EXPR:
921 case RROTATE_EXPR:
757bf1df
DM
922 case BIT_IOR_EXPR:
923 case BIT_XOR_EXPR:
924 case BIT_AND_EXPR:
925 case MIN_EXPR:
926 case MAX_EXPR:
808f4dfe 927 case COMPLEX_EXPR:
757bf1df
DM
928 {
929 /* Binary ops. */
930 tree rhs2 = gimple_assign_rhs2 (assign);
931
808f4dfe
DM
932 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
933 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 934
5e00ad3f
DM
935 if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
936 {
937 /* "INT34-C. Do not shift an expression by a negative number of bits
938 or by greater than or equal to the number of bits that exist in
939 the operand." */
940 if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
a4913a19
DM
941 if (TREE_CODE (rhs2_cst) == INTEGER_CST
942 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
5e00ad3f
DM
943 {
944 if (tree_int_cst_sgn (rhs2_cst) < 0)
6341f14e
DM
945 ctxt->warn
946 (make_unique<shift_count_negative_diagnostic>
947 (assign, rhs2_cst));
5e00ad3f
DM
948 else if (compare_tree_int (rhs2_cst,
949 TYPE_PRECISION (TREE_TYPE (rhs1)))
950 >= 0)
6341f14e
DM
951 ctxt->warn
952 (make_unique<shift_count_overflow_diagnostic>
953 (assign,
954 int (TYPE_PRECISION (TREE_TYPE (rhs1))),
955 rhs2_cst));
5e00ad3f
DM
956 }
957 }
958
808f4dfe
DM
959 const svalue *sval_binop
960 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
961 rhs1_sval, rhs2_sval);
962 return sval_binop;
963 }
964
965 /* Vector expressions. In theory we could implement these elementwise,
966 but for now, simply return unknown values. */
967 case VEC_DUPLICATE_EXPR:
968 case VEC_SERIES_EXPR:
969 case VEC_COND_EXPR:
970 case VEC_PERM_EXPR:
1b0be822
DM
971 case VEC_WIDEN_MULT_HI_EXPR:
972 case VEC_WIDEN_MULT_LO_EXPR:
973 case VEC_WIDEN_MULT_EVEN_EXPR:
974 case VEC_WIDEN_MULT_ODD_EXPR:
975 case VEC_UNPACK_HI_EXPR:
976 case VEC_UNPACK_LO_EXPR:
977 case VEC_UNPACK_FLOAT_HI_EXPR:
978 case VEC_UNPACK_FLOAT_LO_EXPR:
979 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
980 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
981 case VEC_PACK_TRUNC_EXPR:
982 case VEC_PACK_SAT_EXPR:
983 case VEC_PACK_FIX_TRUNC_EXPR:
984 case VEC_PACK_FLOAT_EXPR:
985 case VEC_WIDEN_LSHIFT_HI_EXPR:
986 case VEC_WIDEN_LSHIFT_LO_EXPR:
808f4dfe
DM
987 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
988 }
989}
990
1e2fe671
DM
991/* Workaround for discarding certain false positives from
992 -Wanalyzer-use-of-uninitialized-value
993 of the form:
994 ((A OR-IF B) OR-IF C)
995 and:
996 ((A AND-IF B) AND-IF C)
997 where evaluating B is redundant, but could involve simple accesses of
998 uninitialized locals.
999
1000 When optimization is turned on the FE can immediately fold compound
1001 conditionals. Specifically, c_parser_condition parses this condition:
1002 ((A OR-IF B) OR-IF C)
1003 and calls c_fully_fold on the condition.
1004 Within c_fully_fold, fold_truth_andor is called, which bails when
1005 optimization is off, but if any optimization is turned on can convert the
1006 ((A OR-IF B) OR-IF C)
1007 into:
1008 ((A OR B) OR_IF C)
1009 for sufficiently simple B
1010 i.e. the inner OR-IF becomes an OR.
1011 At gimplification time the inner OR becomes BIT_IOR_EXPR (in gimplify_expr),
1012 giving this for the inner condition:
1013 tmp = A | B;
1014 if (tmp)
1015 thus effectively synthesizing a redundant access of B when optimization
1016 is turned on, when compared to:
1017 if (A) goto L1; else goto L4;
1018 L1: if (B) goto L2; else goto L4;
1019 L2: if (C) goto L3; else goto L4;
1020 for the unoptimized case.
1021
1022 Return true if CTXT appears to be handling such a short-circuitable stmt,
1023 such as the def-stmt for B for the:
1024 tmp = A | B;
1025 case above, for the case where A is true and thus B would have been
1026 short-circuited without optimization, using MODEL for the value of A. */
1027
1028static bool
1029within_short_circuited_stmt_p (const region_model *model,
b33dd787 1030 const gassign *assign_stmt)
1e2fe671 1031{
1e2fe671 1032 /* We must have an assignment to a temporary of _Bool type. */
1e2fe671
DM
1033 tree lhs = gimple_assign_lhs (assign_stmt);
1034 if (TREE_TYPE (lhs) != boolean_type_node)
1035 return false;
1036 if (TREE_CODE (lhs) != SSA_NAME)
1037 return false;
1038 if (SSA_NAME_VAR (lhs) != NULL_TREE)
1039 return false;
1040
1041 /* The temporary bool must be used exactly once: as the second arg of
1042 a BIT_IOR_EXPR or BIT_AND_EXPR. */
1043 use_operand_p use_op;
1044 gimple *use_stmt;
1045 if (!single_imm_use (lhs, &use_op, &use_stmt))
1046 return false;
1047 const gassign *use_assign = dyn_cast <const gassign *> (use_stmt);
1048 if (!use_assign)
1049 return false;
1050 enum tree_code op = gimple_assign_rhs_code (use_assign);
1051 if (!(op == BIT_IOR_EXPR ||op == BIT_AND_EXPR))
1052 return false;
1053 if (!(gimple_assign_rhs1 (use_assign) != lhs
1054 && gimple_assign_rhs2 (use_assign) == lhs))
1055 return false;
1056
1057 /* The first arg of the bitwise stmt must have a known value in MODEL
1058 that implies that the value of the second arg doesn't matter, i.e.
1059 1 for bitwise or, 0 for bitwise and. */
1060 tree other_arg = gimple_assign_rhs1 (use_assign);
1061 /* Use a NULL ctxt here to avoid generating warnings. */
1062 const svalue *other_arg_sval = model->get_rvalue (other_arg, NULL);
1063 tree other_arg_cst = other_arg_sval->maybe_get_constant ();
1064 if (!other_arg_cst)
1065 return false;
1066 switch (op)
1067 {
1068 default:
1069 gcc_unreachable ();
1070 case BIT_IOR_EXPR:
1071 if (zerop (other_arg_cst))
1072 return false;
1073 break;
1074 case BIT_AND_EXPR:
1075 if (!zerop (other_arg_cst))
1076 return false;
1077 break;
1078 }
1079
1080 /* All tests passed. We appear to be in a stmt that generates a boolean
1081 temporary with a value that won't matter. */
1082 return true;
1083}
1084
b33dd787
DM
1085/* Workaround for discarding certain false positives from
1086 -Wanalyzer-use-of-uninitialized-value
1087 seen with -ftrivial-auto-var-init=.
1088
1089 -ftrivial-auto-var-init= will generate calls to IFN_DEFERRED_INIT.
1090
1091 If the address of the var is taken, gimplification will give us
1092 something like:
1093
1094 _1 = .DEFERRED_INIT (4, 2, &"len"[0]);
1095 len = _1;
1096
1097 The result of DEFERRED_INIT will be an uninit value; we don't
1098 want to emit a false positive for "len = _1;"
1099
1100 Return true if ASSIGN_STMT is such a stmt. */
1101
1102static bool
1103due_to_ifn_deferred_init_p (const gassign *assign_stmt)
1104
1105{
1106 /* We must have an assignment to a decl from an SSA name that's the
1107 result of a IFN_DEFERRED_INIT call. */
1108 if (gimple_assign_rhs_code (assign_stmt) != SSA_NAME)
1109 return false;
1110 tree lhs = gimple_assign_lhs (assign_stmt);
1111 if (TREE_CODE (lhs) != VAR_DECL)
1112 return false;
1113 tree rhs = gimple_assign_rhs1 (assign_stmt);
1114 if (TREE_CODE (rhs) != SSA_NAME)
1115 return false;
1116 const gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1117 const gcall *call = dyn_cast <const gcall *> (def_stmt);
1118 if (!call)
1119 return false;
1120 if (gimple_call_internal_p (call)
1121 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1122 return true;
1123 return false;
1124}
1125
33255ad3
DM
1126/* Check for SVAL being poisoned, adding a warning to CTXT.
1127 Return SVAL, or, if a warning is added, another value, to avoid
2fdc8546
DM
1128 repeatedly complaining about the same poisoned value in followup code.
1129 SRC_REGION is a hint about where SVAL came from, and can be NULL. */
33255ad3
DM
1130
1131const svalue *
1132region_model::check_for_poison (const svalue *sval,
1133 tree expr,
2fdc8546 1134 const region *src_region,
33255ad3
DM
1135 region_model_context *ctxt) const
1136{
1137 if (!ctxt)
1138 return sval;
1139
1140 if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
1141 {
cc68ad87
DM
1142 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
1143
1144 /* Ignore uninitialized uses of empty types; there's nothing
1145 to initialize. */
1146 if (pkind == POISON_KIND_UNINIT
1147 && sval->get_type ()
1148 && is_empty_type (sval->get_type ()))
1149 return sval;
1150
b33dd787
DM
1151 if (pkind == POISON_KIND_UNINIT)
1152 if (const gimple *curr_stmt = ctxt->get_stmt ())
1153 if (const gassign *assign_stmt
1154 = dyn_cast <const gassign *> (curr_stmt))
1155 {
1156 /* Special case to avoid certain false positives. */
1157 if (within_short_circuited_stmt_p (this, assign_stmt))
1158 return sval;
1159
1160 /* Special case to avoid false positive on
1161 -ftrivial-auto-var-init=. */
1162 if (due_to_ifn_deferred_init_p (assign_stmt))
1163 return sval;
1164 }
1e2fe671 1165
33255ad3
DM
1166 /* If we have an SSA name for a temporary, we don't want to print
1167 '<unknown>'.
1168 Poisoned values are shared by type, and so we can't reconstruct
1169 the tree other than via the def stmts, using
1170 fixup_tree_for_diagnostic. */
1171 tree diag_arg = fixup_tree_for_diagnostic (expr);
2fdc8546 1172 if (src_region == NULL && pkind == POISON_KIND_UNINIT)
00e7d024 1173 src_region = get_region_for_poisoned_expr (expr);
b03a10b0
DM
1174
1175 /* Can we reliably get the poisoned value from "expr"?
1176 This is for use by poisoned_value_diagnostic::check_valid_fpath_p.
1177 Unfortunately, we might not have a reliable value for EXPR.
1178 Hence we only query its value now, and only use it if we get the
1179 poisoned value back again. */
1180 tree check_expr = expr;
1181 const svalue *foo_sval = get_rvalue (expr, NULL);
1182 if (foo_sval == sval)
1183 check_expr = expr;
1184 else
1185 check_expr = NULL;
6341f14e
DM
1186 if (ctxt->warn (make_unique<poisoned_value_diagnostic> (diag_arg,
1187 pkind,
b03a10b0
DM
1188 src_region,
1189 check_expr)))
33255ad3
DM
1190 {
1191 /* We only want to report use of a poisoned value at the first
1192 place it gets used; return an unknown value to avoid generating
1193 a chain of followup warnings. */
1194 sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
1195 }
1196
1197 return sval;
1198 }
1199
1200 return sval;
1201}
1202
00e7d024
DM
1203/* Attempt to get a region for describing EXPR, the source of region of
1204 a poisoned_svalue for use in a poisoned_value_diagnostic.
1205 Return NULL if there is no good region to use. */
1206
1207const region *
1208region_model::get_region_for_poisoned_expr (tree expr) const
1209{
1210 if (TREE_CODE (expr) == SSA_NAME)
1211 {
1212 tree decl = SSA_NAME_VAR (expr);
1213 if (decl && DECL_P (decl))
1214 expr = decl;
1215 else
1216 return NULL;
1217 }
1218 return get_lvalue (expr, NULL);
1219}
1220
808f4dfe
DM
1221/* Update this model for the ASSIGN stmt, using CTXT to report any
1222 diagnostics. */
1223
1224void
1225region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
1226{
1227 tree lhs = gimple_assign_lhs (assign);
1228 tree rhs1 = gimple_assign_rhs1 (assign);
1229
1230 const region *lhs_reg = get_lvalue (lhs, ctxt);
1231
841008d3
DM
1232 /* Any writes other than to the stack are treated
1233 as externally visible. */
1234 if (ctxt)
1235 {
1236 enum memory_space memspace = lhs_reg->get_memory_space ();
1237 if (memspace != MEMSPACE_STACK)
1238 ctxt->maybe_did_work ();
1239 }
1240
808f4dfe
DM
1241 /* Most assignments are handled by:
1242 set_value (lhs_reg, SVALUE, CTXT)
1243 for some SVALUE. */
1244 if (const svalue *sval = get_gassign_result (assign, ctxt))
1245 {
33255ad3 1246 tree expr = get_diagnostic_tree_for_gassign (assign);
2fdc8546 1247 check_for_poison (sval, expr, NULL, ctxt);
808f4dfe
DM
1248 set_value (lhs_reg, sval, ctxt);
1249 return;
1250 }
1251
1252 enum tree_code op = gimple_assign_rhs_code (assign);
1253 switch (op)
1254 {
1255 default:
1256 {
1b0be822 1257 if (0)
808f4dfe
DM
1258 sorry_at (assign->location, "unhandled assignment op: %qs",
1259 get_tree_code_name (op));
1b0be822
DM
1260 const svalue *unknown_sval
1261 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1262 set_value (lhs_reg, unknown_sval, ctxt);
757bf1df
DM
1263 }
1264 break;
1265
808f4dfe
DM
1266 case CONSTRUCTOR:
1267 {
1268 if (TREE_CLOBBER_P (rhs1))
1269 {
1270 /* e.g. "x ={v} {CLOBBER};" */
1271 clobber_region (lhs_reg);
1272 }
1273 else
1274 {
1275 /* Any CONSTRUCTOR that survives to this point is either
1276 just a zero-init of everything, or a vector. */
1277 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
b923978a 1278 zero_fill_region (lhs_reg, ctxt);
808f4dfe
DM
1279 unsigned ix;
1280 tree index;
1281 tree val;
1282 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
1283 {
1284 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
1285 if (!index)
1286 index = build_int_cst (integer_type_node, ix);
1287 gcc_assert (TREE_CODE (index) == INTEGER_CST);
1288 const svalue *index_sval
1289 = m_mgr->get_or_create_constant_svalue (index);
1290 gcc_assert (index_sval);
1291 const region *sub_reg
1292 = m_mgr->get_element_region (lhs_reg,
1293 TREE_TYPE (val),
1294 index_sval);
1295 const svalue *val_sval = get_rvalue (val, ctxt);
1296 set_value (sub_reg, val_sval, ctxt);
1297 }
1298 }
1299 }
1300 break;
1301
1302 case STRING_CST:
757bf1df 1303 {
808f4dfe 1304 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
808f4dfe
DM
1305 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1306 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 1307 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
1308 }
1309 break;
1310 }
1311}
1312
33255ad3 1313/* Handle the pre-sm-state part of STMT, modifying this object in-place.
33255ad3
DM
1314 Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
1315 side effects. */
1316
1317void
1318region_model::on_stmt_pre (const gimple *stmt,
33255ad3
DM
1319 bool *out_unknown_side_effects,
1320 region_model_context *ctxt)
1321{
1322 switch (gimple_code (stmt))
1323 {
1324 default:
1325 /* No-op for now. */
1326 break;
1327
cc7aebff
DM
1328 case GIMPLE_DEBUG:
1329 /* We should have stripped these out when building the supergraph. */
1330 gcc_unreachable ();
1331 break;
1332
33255ad3
DM
1333 case GIMPLE_ASSIGN:
1334 {
1335 const gassign *assign = as_a <const gassign *> (stmt);
1336 on_assignment (assign, ctxt);
1337 }
1338 break;
1339
1340 case GIMPLE_ASM:
ded2c2c0
DM
1341 {
1342 const gasm *asm_stmt = as_a <const gasm *> (stmt);
1343 on_asm_stmt (asm_stmt, ctxt);
841008d3
DM
1344 if (ctxt)
1345 ctxt->maybe_did_work ();
ded2c2c0 1346 }
33255ad3
DM
1347 break;
1348
1349 case GIMPLE_CALL:
1350 {
1351 /* Track whether we have a gcall to a function that's not recognized by
1352 anything, for which we don't have a function body, or for which we
1353 don't know the fndecl. */
1354 const gcall *call = as_a <const gcall *> (stmt);
6bd31b33 1355 *out_unknown_side_effects = on_call_pre (call, ctxt);
33255ad3
DM
1356 }
1357 break;
1358
1359 case GIMPLE_RETURN:
1360 {
1361 const greturn *return_ = as_a <const greturn *> (stmt);
1362 on_return (return_, ctxt);
1363 }
1364 break;
1365 }
1366}
1367
3b691e01
DM
1368/* Given a call CD with function attribute FORMAT_ATTR, check that the
1369 format arg to the call is a valid null-terminated string. */
1370
1371void
1372region_model::check_call_format_attr (const call_details &cd,
1373 tree format_attr) const
1374{
1375 /* We assume that FORMAT_ATTR has already been validated. */
1376
1377 /* arg0 of the attribute should be kind of format strings
1378 that this function expects (e.g. "printf"). */
1379 const tree arg0_tree_list = TREE_VALUE (format_attr);
1380 if (!arg0_tree_list)
1381 return;
1382
1383 /* arg1 of the attribute should be the 1-based parameter index
1384 to treat as the format string. */
1385 const tree arg1_tree_list = TREE_CHAIN (arg0_tree_list);
1386 if (!arg1_tree_list)
1387 return;
1388 const tree arg1_value = TREE_VALUE (arg1_tree_list);
1389 if (!arg1_value)
1390 return;
1391
1392 unsigned format_arg_idx = TREE_INT_CST_LOW (arg1_value) - 1;
1393 if (cd.num_args () <= format_arg_idx)
1394 return;
1395
1396 /* Subclass of annotating_context that
1397 adds a note about the format attr to any saved diagnostics. */
1398 class annotating_ctxt : public annotating_context
1399 {
1400 public:
1401 annotating_ctxt (const call_details &cd,
1402 unsigned fmt_param_idx)
1403 : annotating_context (cd.get_ctxt ()),
1404 m_cd (cd),
1405 m_fmt_param_idx (fmt_param_idx)
1406 {
1407 }
1408 void add_annotations () final override
1409 {
1410 class reason_format_attr
1411 : public pending_note_subclass<reason_format_attr>
1412 {
1413 public:
1414 reason_format_attr (const call_arg_details &arg_details)
1415 : m_arg_details (arg_details)
1416 {
1417 }
1418
1419 const char *get_kind () const final override
1420 {
1421 return "reason_format_attr";
1422 }
1423
1424 void emit () const final override
1425 {
1426 inform (DECL_SOURCE_LOCATION (m_arg_details.m_called_fndecl),
1427 "parameter %i of %qD marked as a format string"
1428 " via %qs attribute",
1429 m_arg_details.m_arg_idx + 1, m_arg_details.m_called_fndecl,
1430 "format");
1431 }
1432
1433 bool operator== (const reason_format_attr &other) const
1434 {
1435 return m_arg_details == other.m_arg_details;
1436 }
1437
1438 private:
1439 call_arg_details m_arg_details;
1440 };
1441
1442 call_arg_details arg_details (m_cd, m_fmt_param_idx);
1443 add_note (make_unique<reason_format_attr> (arg_details));
1444 }
1445 private:
1446 const call_details &m_cd;
1447 unsigned m_fmt_param_idx;
1448 };
1449
1450 annotating_ctxt my_ctxt (cd, format_arg_idx);
1451 call_details my_cd (cd, &my_ctxt);
1452 my_cd.check_for_null_terminated_string_arg (format_arg_idx);
1453}
1454
9ff3e236 1455/* Ensure that all arguments at the call described by CD are checked
3b691e01
DM
1456 for poisoned values, by calling get_rvalue on each argument.
1457
1458 Check that calls to functions with "format" attribute have valid
1459 null-terminated strings for their format argument. */
9ff3e236
DM
1460
1461void
1462region_model::check_call_args (const call_details &cd) const
1463{
1464 for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++)
1465 cd.get_arg_svalue (arg_idx);
3b691e01
DM
1466
1467 /* Handle attribute "format". */
1468 if (tree format_attr = cd.lookup_function_attribute ("format"))
1469 check_call_format_attr (cd, format_attr);
9ff3e236
DM
1470}
1471
792f039f
DM
1472/* Update this model for an outcome of a call that returns a specific
1473 integer constant.
07e30160
DM
1474 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1475 the state-merger code from merging success and failure outcomes. */
1476
1477void
792f039f
DM
1478region_model::update_for_int_cst_return (const call_details &cd,
1479 int retval,
1480 bool unmergeable)
07e30160
DM
1481{
1482 if (!cd.get_lhs_type ())
1483 return;
4e4e45a4
DM
1484 if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1485 return;
07e30160 1486 const svalue *result
792f039f 1487 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), retval);
07e30160
DM
1488 if (unmergeable)
1489 result = m_mgr->get_or_create_unmergeable (result);
1490 set_value (cd.get_lhs_region (), result, cd.get_ctxt ());
1491}
1492
792f039f
DM
1493/* Update this model for an outcome of a call that returns zero.
1494 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1495 the state-merger code from merging success and failure outcomes. */
1496
1497void
1498region_model::update_for_zero_return (const call_details &cd,
1499 bool unmergeable)
1500{
1501 update_for_int_cst_return (cd, 0, unmergeable);
1502}
1503
73da34a5
DM
1504/* Update this model for an outcome of a call that returns non-zero.
1505 Specifically, assign an svalue to the LHS, and add a constraint that
1506 that svalue is non-zero. */
07e30160
DM
1507
1508void
1509region_model::update_for_nonzero_return (const call_details &cd)
1510{
1511 if (!cd.get_lhs_type ())
1512 return;
4e4e45a4
DM
1513 if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1514 return;
73da34a5 1515 cd.set_any_lhs_with_defaults ();
07e30160
DM
1516 const svalue *zero
1517 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), 0);
1518 const svalue *result
1519 = get_store_value (cd.get_lhs_region (), cd.get_ctxt ());
1520 add_constraint (result, NE_EXPR, zero, cd.get_ctxt ());
1521}
1522
1523/* Subroutine of region_model::maybe_get_copy_bounds.
1524 The Linux kernel commonly uses
1525 min_t([unsigned] long, VAR, sizeof(T));
1526 to set an upper bound on the size of a copy_to_user.
1527 Attempt to simplify such sizes by trying to get the upper bound as a
1528 constant.
1529 Return the simplified svalue if possible, or NULL otherwise. */
1530
1531static const svalue *
1532maybe_simplify_upper_bound (const svalue *num_bytes_sval,
1533 region_model_manager *mgr)
1534{
1535 tree type = num_bytes_sval->get_type ();
1536 while (const svalue *raw = num_bytes_sval->maybe_undo_cast ())
1537 num_bytes_sval = raw;
1538 if (const binop_svalue *binop_sval = num_bytes_sval->dyn_cast_binop_svalue ())
1539 if (binop_sval->get_op () == MIN_EXPR)
1540 if (binop_sval->get_arg1 ()->get_kind () == SK_CONSTANT)
1541 {
1542 return mgr->get_or_create_cast (type, binop_sval->get_arg1 ());
1543 /* TODO: we might want to also capture the constraint
1544 when recording the diagnostic, or note that we're using
1545 the upper bound. */
1546 }
1547 return NULL;
1548}
1549
1550/* Attempt to get an upper bound for the size of a copy when simulating a
1551 copy function.
1552
1553 NUM_BYTES_SVAL is the symbolic value for the size of the copy.
1554 Use it if it's constant, otherwise try to simplify it. Failing
1555 that, use the size of SRC_REG if constant.
1556
1557 Return a symbolic value for an upper limit on the number of bytes
1558 copied, or NULL if no such value could be determined. */
1559
1560const svalue *
1561region_model::maybe_get_copy_bounds (const region *src_reg,
1562 const svalue *num_bytes_sval)
1563{
1564 if (num_bytes_sval->maybe_get_constant ())
1565 return num_bytes_sval;
1566
1567 if (const svalue *simplified
1568 = maybe_simplify_upper_bound (num_bytes_sval, m_mgr))
1569 num_bytes_sval = simplified;
1570
1571 if (num_bytes_sval->maybe_get_constant ())
1572 return num_bytes_sval;
1573
1574 /* For now, try just guessing the size as the capacity of the
1575 base region of the src.
1576 This is a hack; we might get too large a value. */
1577 const region *src_base_reg = src_reg->get_base_region ();
1578 num_bytes_sval = get_capacity (src_base_reg);
1579
1580 if (num_bytes_sval->maybe_get_constant ())
1581 return num_bytes_sval;
1582
1583 /* Non-constant: give up. */
1584 return NULL;
1585}
1586
6bd31b33
DM
1587/* Get any known_function for FNDECL for call CD.
1588
1589 The call must match all assumptions made by the known_function (such as
1590 e.g. "argument 1's type must be a pointer type").
1591
1592 Return NULL if no known_function is found, or it does not match the
1593 assumption(s). */
1594
1595const known_function *
1596region_model::get_known_function (tree fndecl, const call_details &cd) const
1597{
1598 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
1599 return known_fn_mgr->get_match (fndecl, cd);
1600}
1601
1602/* Get any known_function for IFN, or NULL. */
07e30160
DM
1603
1604const known_function *
6bd31b33 1605region_model::get_known_function (enum internal_fn ifn) const
07e30160
DM
1606{
1607 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
6bd31b33 1608 return known_fn_mgr->get_internal_fn (ifn);
07e30160
DM
1609}
1610
55f6a7d9 1611/* Get any builtin_known_function for CALL and emit any warning to CTXT
1612 if not NULL.
1613
1614 The call must match all assumptions made by the known_function (such as
1615 e.g. "argument 1's type must be a pointer type").
1616
1617 Return NULL if no builtin_known_function is found, or it does
1618 not match the assumption(s).
1619
1620 Internally calls get_known_function to find a known_function and cast it
1621 to a builtin_known_function.
1622
1623 For instance, calloc is a C builtin, defined in gcc/builtins.def
1624 by the DEF_LIB_BUILTIN macro. Such builtins are recognized by the
1625 analyzer by their name, so that even in C++ or if the user redeclares
1626 them but mismatch their signature, they are still recognized as builtins.
1627
1628 Cases when a supposed builtin is not flagged as one by the FE:
1629
1630 The C++ FE does not recognize calloc as a builtin if it has not been
1631 included from a standard header, but the C FE does. Hence in C++ if
1632 CALL comes from a calloc and stdlib is not included,
1633 gcc/tree.h:fndecl_built_in_p (CALL) would be false.
1634
1635 In C code, a __SIZE_TYPE__ calloc (__SIZE_TYPE__, __SIZE_TYPE__) user
1636 declaration has obviously a mismatching signature from the standard, and
1637 its function_decl tree won't be unified by
1638 gcc/c-decl.cc:match_builtin_function_types.
1639
1640 Yet in both cases the analyzer should treat the calls as a builtin calloc
1641 so that extra attributes unspecified by the standard but added by GCC
1642 (e.g. sprintf attributes in gcc/builtins.def), useful for the detection of
1643 dangerous behavior, are indeed processed.
1644
1645 Therefore for those cases when a "builtin flag" is not added by the FE,
1646 builtins' kf are derived from builtin_known_function, whose method
1647 builtin_known_function::builtin_decl returns the builtin's
1648 function_decl tree as defined in gcc/builtins.def, with all the extra
1649 attributes. */
1650
1651const builtin_known_function *
1652region_model::get_builtin_kf (const gcall *call,
1653 region_model_context *ctxt /* = NULL */) const
1654{
1655 region_model *mut_this = const_cast <region_model *> (this);
1656 tree callee_fndecl = mut_this->get_fndecl_for_call (call, ctxt);
1657 if (! callee_fndecl)
1658 return NULL;
1659
1660 call_details cd (call, mut_this, ctxt);
1661 if (const known_function *kf = get_known_function (callee_fndecl, cd))
1662 return kf->dyn_cast_builtin_kf ();
1663
1664 return NULL;
1665}
1666
757bf1df
DM
1667/* Update this model for the CALL stmt, using CTXT to report any
1668 diagnostics - the first half.
1669
1670 Updates to the region_model that should be made *before* sm-states
1671 are updated are done here; other updates to the region_model are done
ef7827b0 1672 in region_model::on_call_post.
757bf1df 1673
ef7827b0
DM
1674 Return true if the function call has unknown side effects (it wasn't
1675 recognized and we don't have a body for it, or are unable to tell which
6bd31b33 1676 fndecl it is). */
ef7827b0
DM
1677
1678bool
6bd31b33 1679region_model::on_call_pre (const gcall *call, region_model_context *ctxt)
757bf1df 1680{
48e8a7a6
DM
1681 call_details cd (call, this, ctxt);
1682
9b4eee5f
DM
1683 /* Special-case for IFN_DEFERRED_INIT.
1684 We want to report uninitialized variables with -fanalyzer (treating
1685 -ftrivial-auto-var-init= as purely a mitigation feature).
1686 Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the
1687 lhs of the call, so that it is still uninitialized from the point of
1688 view of the analyzer. */
1689 if (gimple_call_internal_p (call)
1690 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
24ebc540 1691 return false; /* No side effects. */
9b4eee5f 1692
bddd8d86
DM
1693 /* Get svalues for all of the arguments at the callsite, to ensure that we
1694 complain about any uninitialized arguments. This might lead to
1695 duplicates if any of the handling below also looks up the svalues,
1696 but the deduplication code should deal with that. */
1697 if (ctxt)
ca123e01 1698 check_call_args (cd);
bddd8d86 1699
688fc162
DM
1700 tree callee_fndecl = get_fndecl_for_call (call, ctxt);
1701
48e8a7a6 1702 if (gimple_call_internal_p (call))
6bd31b33
DM
1703 if (const known_function *kf
1704 = get_known_function (gimple_call_internal_fn (call)))
1705 {
1706 kf->impl_call_pre (cd);
24ebc540 1707 return false; /* No further side effects. */
6bd31b33 1708 }
808f4dfe 1709
24ebc540 1710 if (!callee_fndecl)
73da34a5
DM
1711 {
1712 cd.set_any_lhs_with_defaults ();
1713 return true; /* Unknown side effects. */
1714 }
ee7bfbe5 1715
24ebc540
DM
1716 if (const known_function *kf = get_known_function (callee_fndecl, cd))
1717 {
1718 kf->impl_call_pre (cd);
1719 return false; /* No further side effects. */
757bf1df 1720 }
757bf1df 1721
73da34a5
DM
1722 cd.set_any_lhs_with_defaults ();
1723
24ebc540
DM
1724 const int callee_fndecl_flags = flags_from_decl_or_type (callee_fndecl);
1725 if (callee_fndecl_flags & (ECF_CONST | ECF_PURE))
1726 return false; /* No side effects. */
1727
1728 if (fndecl_built_in_p (callee_fndecl))
1729 return true; /* Unknown side effects. */
1730
1731 if (!fndecl_has_gimple_body_p (callee_fndecl))
1732 return true; /* Unknown side effects. */
1733
1734 return false; /* No side effects. */
757bf1df
DM
1735}
1736
1737/* Update this model for the CALL stmt, using CTXT to report any
1738 diagnostics - the second half.
1739
1740 Updates to the region_model that should be made *after* sm-states
1741 are updated are done here; other updates to the region_model are done
ef7827b0
DM
1742 in region_model::on_call_pre.
1743
1744 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
1745 to purge state. */
757bf1df
DM
1746
1747void
ef7827b0
DM
1748region_model::on_call_post (const gcall *call,
1749 bool unknown_side_effects,
1750 region_model_context *ctxt)
757bf1df 1751{
757bf1df 1752 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1690a839 1753 {
eafa9d96 1754 call_details cd (call, this, ctxt);
6bd31b33 1755 if (const known_function *kf = get_known_function (callee_fndecl, cd))
55e04240 1756 {
6bd31b33 1757 kf->impl_call_post (cd);
55e04240
DM
1758 return;
1759 }
c7e276b8
DM
1760 /* Was this fndecl referenced by
1761 __attribute__((malloc(FOO)))? */
1762 if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
1763 {
c7e276b8
DM
1764 impl_deallocation_call (cd);
1765 return;
1766 }
1690a839 1767 }
ef7827b0
DM
1768
1769 if (unknown_side_effects)
841008d3
DM
1770 {
1771 handle_unrecognized_call (call, ctxt);
1772 if (ctxt)
1773 ctxt->maybe_did_work ();
1774 }
ef7827b0
DM
1775}
1776
33255ad3
DM
1777/* Purge state involving SVAL from this region_model, using CTXT
1778 (if non-NULL) to purge other state in a program_state.
1779
1780 For example, if we're at the def-stmt of an SSA name, then we need to
1781 purge any state for svalues that involve that SSA name. This avoids
1782 false positives in loops, since a symbolic value referring to the
1783 SSA name will be referring to the previous value of that SSA name.
1784
1785 For example, in:
1786 while ((e = hashmap_iter_next(&iter))) {
1787 struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
1788 free (e_strbuf->value);
1789 }
1790 at the def-stmt of e_8:
1791 e_8 = hashmap_iter_next (&iter);
1792 we should purge the "freed" state of:
1793 INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
1794 which is the "e_strbuf->value" value from the previous iteration,
1795 or we will erroneously report a double-free - the "e_8" within it
1796 refers to the previous value. */
1797
1798void
1799region_model::purge_state_involving (const svalue *sval,
1800 region_model_context *ctxt)
1801{
a113b143
DM
1802 if (!sval->can_have_associated_state_p ())
1803 return;
33255ad3
DM
1804 m_store.purge_state_involving (sval, m_mgr);
1805 m_constraints->purge_state_involving (sval);
1806 m_dynamic_extents.purge_state_involving (sval);
1807 if (ctxt)
1808 ctxt->purge_state_involving (sval);
1809}
1810
c65d3c7f
DM
1811/* A pending_note subclass for adding a note about an
1812 __attribute__((access, ...)) to a diagnostic. */
1813
1814class reason_attr_access : public pending_note_subclass<reason_attr_access>
1815{
1816public:
1817 reason_attr_access (tree callee_fndecl, const attr_access &access)
1818 : m_callee_fndecl (callee_fndecl),
1819 m_ptr_argno (access.ptrarg),
1820 m_access_str (TREE_STRING_POINTER (access.to_external_string ()))
1821 {
1822 }
1823
ff171cb1 1824 const char *get_kind () const final override { return "reason_attr_access"; }
c65d3c7f 1825
2ac1459f 1826 void emit () const final override
c65d3c7f
DM
1827 {
1828 inform (DECL_SOURCE_LOCATION (m_callee_fndecl),
1829 "parameter %i of %qD marked with attribute %qs",
1830 m_ptr_argno + 1, m_callee_fndecl, m_access_str);
1831 }
1832
1833 bool operator== (const reason_attr_access &other) const
1834 {
1835 return (m_callee_fndecl == other.m_callee_fndecl
1836 && m_ptr_argno == other.m_ptr_argno
1837 && !strcmp (m_access_str, other.m_access_str));
1838 }
1839
1840private:
1841 tree m_callee_fndecl;
1842 unsigned m_ptr_argno;
1843 const char *m_access_str;
1844};
1845
b6eaf90c
DM
1846/* Check CALL a call to external function CALLEE_FNDECL based on
1847 any __attribute__ ((access, ....) on the latter, complaining to
1848 CTXT about any issues.
1849
1850 Currently we merely call check_region_for_write on any regions
1851 pointed to by arguments marked with a "write_only" or "read_write"
1852 attribute. */
1853
1854void
cd7dadcd
DM
1855region_model::check_function_attr_access (const gcall *call,
1856 tree callee_fndecl,
1857 region_model_context *ctxt,
1858 rdwr_map &rdwr_idx) const
b6eaf90c
DM
1859{
1860 gcc_assert (call);
1861 gcc_assert (callee_fndecl);
1862 gcc_assert (ctxt);
1863
1864 tree fntype = TREE_TYPE (callee_fndecl);
cd7dadcd 1865 gcc_assert (fntype);
b6eaf90c
DM
1866
1867 unsigned argno = 0;
1868
1869 for (tree iter = TYPE_ARG_TYPES (fntype); iter;
1870 iter = TREE_CHAIN (iter), ++argno)
1871 {
1872 const attr_access* access = rdwr_idx.get (argno);
1873 if (!access)
1874 continue;
1875
1876 /* Ignore any duplicate entry in the map for the size argument. */
1877 if (access->ptrarg != argno)
1878 continue;
1879
1880 if (access->mode == access_write_only
1881 || access->mode == access_read_write)
1882 {
e40a935d 1883 /* Subclass of annotating_context that
c65d3c7f 1884 adds a note about the attr access to any saved diagnostics. */
e40a935d 1885 class annotating_ctxt : public annotating_context
c65d3c7f
DM
1886 {
1887 public:
1888 annotating_ctxt (tree callee_fndecl,
1889 const attr_access &access,
1890 region_model_context *ctxt)
e40a935d 1891 : annotating_context (ctxt),
c65d3c7f
DM
1892 m_callee_fndecl (callee_fndecl),
1893 m_access (access)
1894 {
1895 }
e40a935d 1896 void add_annotations () final override
c65d3c7f 1897 {
e40a935d
DM
1898 add_note (make_unique<reason_attr_access>
1899 (m_callee_fndecl, m_access));
c65d3c7f
DM
1900 }
1901 private:
1902 tree m_callee_fndecl;
1903 const attr_access &m_access;
1904 };
1905
1906 /* Use this ctxt below so that any diagnostics get the
1907 note added to them. */
1908 annotating_ctxt my_ctxt (callee_fndecl, *access, ctxt);
1909
b6eaf90c 1910 tree ptr_tree = gimple_call_arg (call, access->ptrarg);
c65d3c7f
DM
1911 const svalue *ptr_sval = get_rvalue (ptr_tree, &my_ctxt);
1912 const region *reg = deref_rvalue (ptr_sval, ptr_tree, &my_ctxt);
0e466e97 1913 check_region_for_write (reg, nullptr, &my_ctxt);
b6eaf90c
DM
1914 /* We don't use the size arg for now. */
1915 }
1916 }
1917}
1918
cd7dadcd
DM
1919/* Subroutine of region_model::check_function_attr_null_terminated_string_arg,
1920 checking one instance of __attribute__((null_terminated_string_arg)). */
1921
1922void
1923region_model::
1924check_one_function_attr_null_terminated_string_arg (const gcall *call,
1925 tree callee_fndecl,
1926 region_model_context *ctxt,
1927 rdwr_map &rdwr_idx,
1928 tree attr)
1929{
1930 gcc_assert (call);
1931 gcc_assert (callee_fndecl);
1932 gcc_assert (ctxt);
1933 gcc_assert (attr);
1934
1935 tree arg = TREE_VALUE (attr);
1936 if (!arg)
1937 return;
1938
1939 /* Convert from 1-based to 0-based index. */
1940 unsigned int arg_idx = TREE_INT_CST_LOW (TREE_VALUE (arg)) - 1;
1941
1942 /* If there's also an "access" attribute on the ptr param
1943 for reading with a size param specified, then that size
1944 limits the size of the possible read from the pointer. */
1945 if (const attr_access* access = rdwr_idx.get (arg_idx))
1946 if ((access->mode == access_read_only
1947 || access->mode == access_read_write)
1948 && access->sizarg != UINT_MAX)
1949 {
cd7dadcd
DM
1950 call_details cd_checked (call, this, ctxt);
1951 const svalue *limit_sval
1952 = cd_checked.get_arg_svalue (access->sizarg);
1953 const svalue *ptr_sval
1954 = cd_checked.get_arg_svalue (arg_idx);
1955 /* Try reading all of the bytes expressed by the size param,
c71028c9 1956 but without emitting warnings (via a null context). */
cd7dadcd
DM
1957 const svalue *limited_sval
1958 = read_bytes (deref_rvalue (ptr_sval, NULL_TREE, nullptr),
1959 NULL_TREE,
1960 limit_sval,
1961 nullptr);
1962 if (limited_sval->get_kind () == SK_POISONED)
1963 {
1964 /* Reading up to the truncation limit caused issues.
1965 Assume that the string is meant to be terminated
1966 before then, so perform a *checked* check for the
1967 terminator. */
1968 check_for_null_terminated_string_arg (cd_checked,
1969 arg_idx);
1970 }
1971 else
1972 {
1973 /* Reading up to the truncation limit seems OK; repeat
1974 the read, but with checking enabled. */
c71028c9
DM
1975 read_bytes (deref_rvalue (ptr_sval, NULL_TREE, ctxt),
1976 NULL_TREE,
1977 limit_sval,
1978 ctxt);
cd7dadcd
DM
1979 }
1980 return;
1981 }
1982
1983 /* Otherwise, we don't have an access-attribute limiting the read.
1984 Simulate a read up to the null terminator (if any). */
1985
1986 call_details cd (call, this, ctxt);
1987 check_for_null_terminated_string_arg (cd, arg_idx);
1988}
1989
1990/* Check CALL a call to external function CALLEE_FNDECL for any uses
1991 of __attribute__ ((null_terminated_string_arg)), compaining
1992 to CTXT about any issues.
1993
1994 Use RDWR_IDX for tracking uses of __attribute__ ((access, ....). */
1995
1996void
1997region_model::
1998check_function_attr_null_terminated_string_arg (const gcall *call,
1999 tree callee_fndecl,
2000 region_model_context *ctxt,
2001 rdwr_map &rdwr_idx)
2002{
2003 gcc_assert (call);
2004 gcc_assert (callee_fndecl);
2005 gcc_assert (ctxt);
2006
2007 tree fntype = TREE_TYPE (callee_fndecl);
2008 gcc_assert (fntype);
2009
2010 /* A function declaration can specify multiple attribute
2011 null_terminated_string_arg, each with one argument. */
2012 for (tree attr = TYPE_ATTRIBUTES (fntype); attr; attr = TREE_CHAIN (attr))
2013 {
2014 attr = lookup_attribute ("null_terminated_string_arg", attr);
2015 if (!attr)
2016 return;
2017
2018 check_one_function_attr_null_terminated_string_arg (call, callee_fndecl,
2019 ctxt, rdwr_idx,
2020 attr);
2021 }
2022}
2023
2024/* Check CALL a call to external function CALLEE_FNDECL for any
2025 function attributes, complaining to CTXT about any issues. */
2026
2027void
2028region_model::check_function_attrs (const gcall *call,
2029 tree callee_fndecl,
2030 region_model_context *ctxt)
2031{
2032 gcc_assert (call);
2033 gcc_assert (callee_fndecl);
2034 gcc_assert (ctxt);
2035
2036 tree fntype = TREE_TYPE (callee_fndecl);
2037 if (!fntype)
2038 return;
2039
2040 if (!TYPE_ATTRIBUTES (fntype))
2041 return;
2042
2043 /* Initialize a map of attribute access specifications for arguments
2044 to the function call. */
2045 rdwr_map rdwr_idx;
2046 init_attr_rdwr_indices (&rdwr_idx, TYPE_ATTRIBUTES (fntype));
2047
2048 check_function_attr_access (call, callee_fndecl, ctxt, rdwr_idx);
2049 check_function_attr_null_terminated_string_arg (call, callee_fndecl,
2050 ctxt, rdwr_idx);
2051}
2052
ef7827b0
DM
2053/* Handle a call CALL to a function with unknown behavior.
2054
2055 Traverse the regions in this model, determining what regions are
2056 reachable from pointer arguments to CALL and from global variables,
2057 recursively.
2058
2059 Set all reachable regions to new unknown values and purge sm-state
2060 from their values, and from values that point to them. */
2061
2062void
2063region_model::handle_unrecognized_call (const gcall *call,
2064 region_model_context *ctxt)
2065{
2066 tree fndecl = get_fndecl_for_call (call, ctxt);
2067
b6eaf90c 2068 if (fndecl && ctxt)
cd7dadcd 2069 check_function_attrs (call, fndecl, ctxt);
b6eaf90c 2070
c710051a 2071 reachable_regions reachable_regs (this);
ef7827b0
DM
2072
2073 /* Determine the reachable regions and their mutability. */
2074 {
808f4dfe
DM
2075 /* Add globals and regions that already escaped in previous
2076 unknown calls. */
2077 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
2078 &reachable_regs);
ef7827b0
DM
2079
2080 /* Params that are pointers. */
2081 tree iter_param_types = NULL_TREE;
2082 if (fndecl)
2083 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2084 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
2085 {
2086 /* Track expected param type, where available. */
2087 tree param_type = NULL_TREE;
2088 if (iter_param_types)
2089 {
2090 param_type = TREE_VALUE (iter_param_types);
2091 gcc_assert (param_type);
2092 iter_param_types = TREE_CHAIN (iter_param_types);
2093 }
2094
2095 tree parm = gimple_call_arg (call, arg_idx);
808f4dfe
DM
2096 const svalue *parm_sval = get_rvalue (parm, ctxt);
2097 reachable_regs.handle_parm (parm_sval, param_type);
ef7827b0
DM
2098 }
2099 }
2100
33255ad3 2101 uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULL;
3a66c289 2102
808f4dfe
DM
2103 /* Purge sm-state for the svalues that were reachable,
2104 both in non-mutable and mutable form. */
2105 for (svalue_set::iterator iter
2106 = reachable_regs.begin_reachable_svals ();
2107 iter != reachable_regs.end_reachable_svals (); ++iter)
ef7827b0 2108 {
808f4dfe 2109 const svalue *sval = (*iter);
33255ad3
DM
2110 if (ctxt)
2111 ctxt->on_unknown_change (sval, false);
808f4dfe
DM
2112 }
2113 for (svalue_set::iterator iter
2114 = reachable_regs.begin_mutable_svals ();
2115 iter != reachable_regs.end_mutable_svals (); ++iter)
2116 {
2117 const svalue *sval = (*iter);
33255ad3
DM
2118 if (ctxt)
2119 ctxt->on_unknown_change (sval, true);
3a66c289
DM
2120 if (uncertainty)
2121 uncertainty->on_mutable_sval_at_unknown_call (sval);
808f4dfe 2122 }
ef7827b0 2123
808f4dfe 2124 /* Mark any clusters that have escaped. */
af66094d 2125 reachable_regs.mark_escaped_clusters (ctxt);
ef7827b0 2126
808f4dfe
DM
2127 /* Update bindings for all clusters that have escaped, whether above,
2128 or previously. */
3734527d
DM
2129 m_store.on_unknown_fncall (call, m_mgr->get_store_manager (),
2130 conjured_purge (this, ctxt));
9a2c9579
DM
2131
2132 /* Purge dynamic extents from any regions that have escaped mutably:
2133 realloc could have been called on them. */
2134 for (hash_set<const region *>::iterator
2135 iter = reachable_regs.begin_mutable_base_regs ();
2136 iter != reachable_regs.end_mutable_base_regs ();
2137 ++iter)
2138 {
2139 const region *base_reg = (*iter);
2140 unset_dynamic_extents (base_reg);
2141 }
808f4dfe 2142}
ef7827b0 2143
808f4dfe
DM
2144/* Traverse the regions in this model, determining what regions are
2145 reachable from the store and populating *OUT.
ef7827b0 2146
808f4dfe
DM
2147 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
2148 for reachability (for handling return values from functions when
2149 analyzing return of the only function on the stack).
2150
3a66c289
DM
2151 If UNCERTAINTY is non-NULL, treat any svalues that were recorded
2152 within it as being maybe-bound as additional "roots" for reachability.
2153
808f4dfe
DM
2154 Find svalues that haven't leaked. */
2155
2156void
2157region_model::get_reachable_svalues (svalue_set *out,
3a66c289
DM
2158 const svalue *extra_sval,
2159 const uncertainty_t *uncertainty)
808f4dfe 2160{
c710051a 2161 reachable_regions reachable_regs (this);
808f4dfe
DM
2162
2163 /* Add globals and regions that already escaped in previous
2164 unknown calls. */
2165 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
2166 &reachable_regs);
2167
2168 if (extra_sval)
2169 reachable_regs.handle_sval (extra_sval);
ef7827b0 2170
3a66c289
DM
2171 if (uncertainty)
2172 for (uncertainty_t::iterator iter
2173 = uncertainty->begin_maybe_bound_svals ();
2174 iter != uncertainty->end_maybe_bound_svals (); ++iter)
2175 reachable_regs.handle_sval (*iter);
2176
808f4dfe
DM
2177 /* Get regions for locals that have explicitly bound values. */
2178 for (store::cluster_map_t::iterator iter = m_store.begin ();
2179 iter != m_store.end (); ++iter)
2180 {
2181 const region *base_reg = (*iter).first;
2182 if (const region *parent = base_reg->get_parent_region ())
2183 if (parent->get_kind () == RK_FRAME)
2184 reachable_regs.add (base_reg, false);
2185 }
2186
2187 /* Populate *OUT based on the values that were reachable. */
2188 for (svalue_set::iterator iter
2189 = reachable_regs.begin_reachable_svals ();
2190 iter != reachable_regs.end_reachable_svals (); ++iter)
2191 out->add (*iter);
757bf1df
DM
2192}
2193
2194/* Update this model for the RETURN_STMT, using CTXT to report any
2195 diagnostics. */
2196
2197void
2198region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
2199{
2200 tree callee = get_current_function ()->decl;
2201 tree lhs = DECL_RESULT (callee);
2202 tree rhs = gimple_return_retval (return_stmt);
2203
2204 if (lhs && rhs)
13ad6d9f
DM
2205 {
2206 const svalue *sval = get_rvalue (rhs, ctxt);
2207 const region *ret_reg = get_lvalue (lhs, ctxt);
2208 set_value (ret_reg, sval, ctxt);
2209 }
757bf1df
DM
2210}
2211
342e14ff
DM
2212/* Update this model for a call and return of setjmp/sigsetjmp at CALL within
2213 ENODE, using CTXT to report any diagnostics.
757bf1df 2214
342e14ff
DM
2215 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
2216 0), as opposed to any second return due to longjmp/sigsetjmp. */
757bf1df
DM
2217
2218void
2219region_model::on_setjmp (const gcall *call, const exploded_node *enode,
2220 region_model_context *ctxt)
2221{
808f4dfe
DM
2222 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
2223 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
2224 ctxt);
757bf1df 2225
808f4dfe
DM
2226 /* Create a setjmp_svalue for this call and store it in BUF_REG's
2227 region. */
2228 if (buf_reg)
757bf1df 2229 {
fd9982bb 2230 setjmp_record r (enode, call);
808f4dfe
DM
2231 const svalue *sval
2232 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
2233 set_value (buf_reg, sval, ctxt);
757bf1df
DM
2234 }
2235
2236 /* Direct calls to setjmp return 0. */
2237 if (tree lhs = gimple_call_lhs (call))
2238 {
1aff29d4
DM
2239 const svalue *new_sval
2240 = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
808f4dfe
DM
2241 const region *lhs_reg = get_lvalue (lhs, ctxt);
2242 set_value (lhs_reg, new_sval, ctxt);
757bf1df
DM
2243 }
2244}
2245
2246/* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
2247 to a "setjmp" at SETJMP_CALL where the final stack depth should be
808f4dfe
DM
2248 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
2249 done, and should be done by the caller. */
757bf1df
DM
2250
2251void
2252region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
808f4dfe 2253 int setjmp_stack_depth, region_model_context *ctxt)
757bf1df
DM
2254{
2255 /* Evaluate the val, using the frame of the "longjmp". */
2256 tree fake_retval = gimple_call_arg (longjmp_call, 1);
808f4dfe 2257 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
757bf1df
DM
2258
2259 /* Pop any frames until we reach the stack depth of the function where
2260 setjmp was called. */
2261 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
2262 while (get_stack_depth () > setjmp_stack_depth)
430d7d88 2263 pop_frame (NULL, NULL, ctxt, false);
757bf1df
DM
2264
2265 gcc_assert (get_stack_depth () == setjmp_stack_depth);
2266
2267 /* Assign to LHS of "setjmp" in new_state. */
2268 if (tree lhs = gimple_call_lhs (setjmp_call))
2269 {
2270 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
1aff29d4
DM
2271 const svalue *zero_sval
2272 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
808f4dfe 2273 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
757bf1df
DM
2274 /* If we have 0, use 1. */
2275 if (eq_zero.is_true ())
2276 {
808f4dfe 2277 const svalue *one_sval
1aff29d4 2278 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
808f4dfe 2279 fake_retval_sval = one_sval;
757bf1df
DM
2280 }
2281 else
2282 {
2283 /* Otherwise note that the value is nonzero. */
808f4dfe 2284 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
757bf1df
DM
2285 }
2286
808f4dfe
DM
2287 /* Decorate the return value from setjmp as being unmergeable,
2288 so that we don't attempt to merge states with it as zero
2289 with states in which it's nonzero, leading to a clean distinction
2290 in the exploded_graph betweeen the first return and the second
2291 return. */
2292 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
757bf1df 2293
808f4dfe
DM
2294 const region *lhs_reg = get_lvalue (lhs, ctxt);
2295 set_value (lhs_reg, fake_retval_sval, ctxt);
2296 }
757bf1df
DM
2297}
2298
2299/* Update this region_model for a phi stmt of the form
2300 LHS = PHI <...RHS...>.
e0a7a675
DM
2301 where RHS is for the appropriate edge.
2302 Get state from OLD_STATE so that all of the phi stmts for a basic block
2303 are effectively handled simultaneously. */
757bf1df
DM
2304
2305void
8525d1f5 2306region_model::handle_phi (const gphi *phi,
808f4dfe 2307 tree lhs, tree rhs,
e0a7a675 2308 const region_model &old_state,
841008d3 2309 hash_set<const svalue *> &svals_changing_meaning,
757bf1df
DM
2310 region_model_context *ctxt)
2311{
2312 /* For now, don't bother tracking the .MEM SSA names. */
2313 if (tree var = SSA_NAME_VAR (lhs))
2314 if (TREE_CODE (var) == VAR_DECL)
2315 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
2316 return;
2317
e0a7a675
DM
2318 const svalue *src_sval = old_state.get_rvalue (rhs, ctxt);
2319 const region *dst_reg = old_state.get_lvalue (lhs, ctxt);
757bf1df 2320
841008d3
DM
2321 const svalue *sval = old_state.get_rvalue (lhs, nullptr);
2322 if (sval->get_kind () == SK_WIDENING)
2323 svals_changing_meaning.add (sval);
2324
e0a7a675 2325 set_value (dst_reg, src_sval, ctxt);
8525d1f5
DM
2326
2327 if (ctxt)
2328 ctxt->on_phi (phi, rhs);
757bf1df
DM
2329}
2330
2331/* Implementation of region_model::get_lvalue; the latter adds type-checking.
2332
2333 Get the id of the region for PV within this region_model,
2334 emitting any diagnostics to CTXT. */
2335
808f4dfe 2336const region *
53cb324c 2337region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2338{
2339 tree expr = pv.m_tree;
2340
2341 gcc_assert (expr);
2342
2343 switch (TREE_CODE (expr))
2344 {
2345 default:
808f4dfe
DM
2346 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
2347 dump_location_t ());
757bf1df
DM
2348
2349 case ARRAY_REF:
2350 {
2351 tree array = TREE_OPERAND (expr, 0);
2352 tree index = TREE_OPERAND (expr, 1);
757bf1df 2353
808f4dfe
DM
2354 const region *array_reg = get_lvalue (array, ctxt);
2355 const svalue *index_sval = get_rvalue (index, ctxt);
2356 return m_mgr->get_element_region (array_reg,
2357 TREE_TYPE (TREE_TYPE (array)),
2358 index_sval);
757bf1df
DM
2359 }
2360 break;
2361
93e759fc
DM
2362 case BIT_FIELD_REF:
2363 {
2364 tree inner_expr = TREE_OPERAND (expr, 0);
2365 const region *inner_reg = get_lvalue (inner_expr, ctxt);
2366 tree num_bits = TREE_OPERAND (expr, 1);
2367 tree first_bit_offset = TREE_OPERAND (expr, 2);
2368 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2369 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2370 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2371 TREE_INT_CST_LOW (num_bits));
2372 return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits);
2373 }
2374 break;
2375
757bf1df
DM
2376 case MEM_REF:
2377 {
2378 tree ptr = TREE_OPERAND (expr, 0);
2379 tree offset = TREE_OPERAND (expr, 1);
808f4dfe
DM
2380 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2381 const svalue *offset_sval = get_rvalue (offset, ctxt);
2382 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
2383 return m_mgr->get_offset_region (star_ptr,
2384 TREE_TYPE (expr),
2385 offset_sval);
757bf1df
DM
2386 }
2387 break;
2388
808f4dfe
DM
2389 case FUNCTION_DECL:
2390 return m_mgr->get_region_for_fndecl (expr);
2391
2392 case LABEL_DECL:
2393 return m_mgr->get_region_for_label (expr);
2394
757bf1df
DM
2395 case VAR_DECL:
2396 /* Handle globals. */
2397 if (is_global_var (expr))
808f4dfe 2398 return m_mgr->get_region_for_global (expr);
757bf1df
DM
2399
2400 /* Fall through. */
2401
2402 case SSA_NAME:
2403 case PARM_DECL:
2404 case RESULT_DECL:
2405 {
2406 gcc_assert (TREE_CODE (expr) == SSA_NAME
2407 || TREE_CODE (expr) == PARM_DECL
778aca1b 2408 || VAR_P (expr)
757bf1df
DM
2409 || TREE_CODE (expr) == RESULT_DECL);
2410
808f4dfe
DM
2411 int stack_index = pv.m_stack_depth;
2412 const frame_region *frame = get_frame_at_index (stack_index);
757bf1df 2413 gcc_assert (frame);
4cebae09 2414 return frame->get_region_for_local (m_mgr, expr, ctxt);
757bf1df
DM
2415 }
2416
2417 case COMPONENT_REF:
2418 {
2419 /* obj.field */
2420 tree obj = TREE_OPERAND (expr, 0);
2421 tree field = TREE_OPERAND (expr, 1);
808f4dfe
DM
2422 const region *obj_reg = get_lvalue (obj, ctxt);
2423 return m_mgr->get_field_region (obj_reg, field);
41a9e940
DM
2424 }
2425 break;
2426
757bf1df 2427 case STRING_CST:
808f4dfe 2428 return m_mgr->get_region_for_string (expr);
757bf1df
DM
2429 }
2430}
2431
2432/* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
2433
09bea584
DM
2434static void
2435assert_compat_types (tree src_type, tree dst_type)
2436{
2437 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
808f4dfe
DM
2438 {
2439#if CHECKING_P
2440 if (!(useless_type_conversion_p (src_type, dst_type)))
2441 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
2442#endif
2443 }
09bea584 2444}
757bf1df 2445
ea4e3218
DM
2446/* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
2447
e66b9f67 2448bool
ea4e3218
DM
2449compat_types_p (tree src_type, tree dst_type)
2450{
2451 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2452 if (!(useless_type_conversion_p (src_type, dst_type)))
2453 return false;
2454 return true;
2455}
2456
808f4dfe 2457/* Get the region for PV within this region_model,
757bf1df
DM
2458 emitting any diagnostics to CTXT. */
2459
808f4dfe 2460const region *
53cb324c 2461region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2462{
2463 if (pv.m_tree == NULL_TREE)
808f4dfe 2464 return NULL;
757bf1df 2465
808f4dfe
DM
2466 const region *result_reg = get_lvalue_1 (pv, ctxt);
2467 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
2468 return result_reg;
757bf1df
DM
2469}
2470
808f4dfe 2471/* Get the region for EXPR within this region_model (assuming the most
757bf1df
DM
2472 recent stack frame if it's a local). */
2473
808f4dfe 2474const region *
53cb324c 2475region_model::get_lvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
2476{
2477 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2478}
2479
2480/* Implementation of region_model::get_rvalue; the latter adds type-checking.
2481
2482 Get the value of PV within this region_model,
2483 emitting any diagnostics to CTXT. */
2484
808f4dfe 2485const svalue *
53cb324c 2486region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2487{
2488 gcc_assert (pv.m_tree);
2489
2490 switch (TREE_CODE (pv.m_tree))
2491 {
2492 default:
2242b975 2493 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
757bf1df
DM
2494
2495 case ADDR_EXPR:
2496 {
2497 /* "&EXPR". */
2498 tree expr = pv.m_tree;
2499 tree op0 = TREE_OPERAND (expr, 0);
808f4dfe
DM
2500 const region *expr_reg = get_lvalue (op0, ctxt);
2501 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
757bf1df
DM
2502 }
2503 break;
2504
808f4dfe 2505 case BIT_FIELD_REF:
d3b1ef7a
DM
2506 {
2507 tree expr = pv.m_tree;
2508 tree op0 = TREE_OPERAND (expr, 0);
2509 const region *reg = get_lvalue (op0, ctxt);
2510 tree num_bits = TREE_OPERAND (expr, 1);
2511 tree first_bit_offset = TREE_OPERAND (expr, 2);
2512 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2513 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2514 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2515 TREE_INT_CST_LOW (num_bits));
9faf8348 2516 return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
d3b1ef7a 2517 }
808f4dfe 2518
808f4dfe 2519 case VAR_DECL:
20bd258d
DM
2520 if (DECL_HARD_REGISTER (pv.m_tree))
2521 {
2522 /* If it has a hard register, it doesn't have a memory region
2523 and can't be referred to as an lvalue. */
2524 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
2525 }
2526 /* Fall through. */
808f4dfe 2527 case PARM_DECL:
20bd258d 2528 case SSA_NAME:
808f4dfe 2529 case RESULT_DECL:
757bf1df
DM
2530 case ARRAY_REF:
2531 {
da7c2773 2532 const region *reg = get_lvalue (pv, ctxt);
9faf8348 2533 return get_store_value (reg, ctxt);
757bf1df
DM
2534 }
2535
808f4dfe
DM
2536 case REALPART_EXPR:
2537 case IMAGPART_EXPR:
2538 case VIEW_CONVERT_EXPR:
2539 {
2540 tree expr = pv.m_tree;
2541 tree arg = TREE_OPERAND (expr, 0);
2542 const svalue *arg_sval = get_rvalue (arg, ctxt);
2543 const svalue *sval_unaryop
2544 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
2545 arg_sval);
2546 return sval_unaryop;
2547 };
2548
757bf1df
DM
2549 case INTEGER_CST:
2550 case REAL_CST:
808f4dfe
DM
2551 case COMPLEX_CST:
2552 case VECTOR_CST:
757bf1df 2553 case STRING_CST:
808f4dfe
DM
2554 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
2555
2556 case POINTER_PLUS_EXPR:
2557 {
2558 tree expr = pv.m_tree;
2559 tree ptr = TREE_OPERAND (expr, 0);
2560 tree offset = TREE_OPERAND (expr, 1);
2561 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2562 const svalue *offset_sval = get_rvalue (offset, ctxt);
2563 const svalue *sval_binop
2564 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
2565 ptr_sval, offset_sval);
2566 return sval_binop;
2567 }
2568
2569 /* Binary ops. */
2570 case PLUS_EXPR:
2571 case MULT_EXPR:
4d3b7be2
DM
2572 case BIT_AND_EXPR:
2573 case BIT_IOR_EXPR:
2574 case BIT_XOR_EXPR:
808f4dfe
DM
2575 {
2576 tree expr = pv.m_tree;
2577 tree arg0 = TREE_OPERAND (expr, 0);
2578 tree arg1 = TREE_OPERAND (expr, 1);
2579 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
2580 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
2581 const svalue *sval_binop
2582 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
2583 arg0_sval, arg1_sval);
2584 return sval_binop;
2585 }
757bf1df
DM
2586
2587 case COMPONENT_REF:
2588 case MEM_REF:
757bf1df 2589 {
808f4dfe 2590 const region *ref_reg = get_lvalue (pv, ctxt);
9faf8348 2591 return get_store_value (ref_reg, ctxt);
757bf1df 2592 }
1b342485
AS
2593 case OBJ_TYPE_REF:
2594 {
2595 tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
2596 return get_rvalue (expr, ctxt);
2597 }
757bf1df
DM
2598 }
2599}
2600
2601/* Get the value of PV within this region_model,
2602 emitting any diagnostics to CTXT. */
2603
808f4dfe 2604const svalue *
53cb324c 2605region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2606{
2607 if (pv.m_tree == NULL_TREE)
808f4dfe 2608 return NULL;
757bf1df 2609
808f4dfe 2610 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
757bf1df 2611
808f4dfe
DM
2612 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
2613
2fdc8546 2614 result_sval = check_for_poison (result_sval, pv.m_tree, NULL, ctxt);
33255ad3 2615
808f4dfe 2616 return result_sval;
757bf1df
DM
2617}
2618
2619/* Get the value of EXPR within this region_model (assuming the most
2620 recent stack frame if it's a local). */
2621
808f4dfe 2622const svalue *
53cb324c 2623region_model::get_rvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
2624{
2625 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2626}
2627
623bc027
DM
2628/* Return true if this model is on a path with "main" as the entrypoint
2629 (as opposed to one in which we're merely analyzing a subset of the
2630 path through the code). */
2631
2632bool
2633region_model::called_from_main_p () const
2634{
2635 if (!m_current_frame)
2636 return false;
2637 /* Determine if the oldest stack frame in this model is for "main". */
2638 const frame_region *frame0 = get_frame_at_index (0);
2639 gcc_assert (frame0);
c0d8a64e 2640 return id_equal (DECL_NAME (frame0->get_function ().decl), "main");
623bc027
DM
2641}
2642
2643/* Subroutine of region_model::get_store_value for when REG is (or is within)
2644 a global variable that hasn't been touched since the start of this path
2645 (or was implicitly touched due to a call to an unknown function). */
2646
2647const svalue *
2648region_model::get_initial_value_for_global (const region *reg) const
2649{
2650 /* Get the decl that REG is for (or is within). */
2651 const decl_region *base_reg
2652 = reg->get_base_region ()->dyn_cast_decl_region ();
2653 gcc_assert (base_reg);
2654 tree decl = base_reg->get_decl ();
2655
2656 /* Special-case: to avoid having to explicitly update all previously
2657 untracked globals when calling an unknown fn, they implicitly have
2658 an unknown value if an unknown call has occurred, unless this is
2659 static to-this-TU and hasn't escaped. Globals that have escaped
2660 are explicitly tracked, so we shouldn't hit this case for them. */
af66094d
DM
2661 if (m_store.called_unknown_fn_p ()
2662 && TREE_PUBLIC (decl)
2663 && !TREE_READONLY (decl))
623bc027
DM
2664 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2665
2666 /* If we are on a path from the entrypoint from "main" and we have a
2667 global decl defined in this TU that hasn't been touched yet, then
2668 the initial value of REG can be taken from the initialization value
2669 of the decl. */
16ad9ae8 2670 if (called_from_main_p () || TREE_READONLY (decl))
fe9771b5 2671 return reg->get_initial_value_at_main (m_mgr);
623bc027
DM
2672
2673 /* Otherwise, return INIT_VAL(REG). */
2674 return m_mgr->get_or_create_initial_value (reg);
2675}
2676
808f4dfe 2677/* Get a value for REG, looking it up in the store, or otherwise falling
9faf8348
DM
2678 back to "initial" or "unknown" values.
2679 Use CTXT to report any warnings associated with reading from REG. */
757bf1df 2680
808f4dfe 2681const svalue *
9faf8348
DM
2682region_model::get_store_value (const region *reg,
2683 region_model_context *ctxt) const
757bf1df 2684{
dfe2ef7f
DM
2685 /* Getting the value of an empty region gives an unknown_svalue. */
2686 if (reg->empty_p ())
2687 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2688
1eb90f46 2689 bool check_poisoned = true;
9589a46d 2690 if (check_region_for_read (reg, ctxt))
1eb90f46 2691 check_poisoned = false;
9faf8348 2692
2867118d
DM
2693 /* Special-case: handle var_decls in the constant pool. */
2694 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
2695 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
2696 return sval;
2697
808f4dfe
DM
2698 const svalue *sval
2699 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
2700 if (sval)
757bf1df 2701 {
808f4dfe
DM
2702 if (reg->get_type ())
2703 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
2704 return sval;
757bf1df 2705 }
757bf1df 2706
808f4dfe
DM
2707 /* Special-case: read at a constant index within a STRING_CST. */
2708 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
2709 if (tree byte_offset_cst
2710 = offset_reg->get_byte_offset ()->maybe_get_constant ())
2711 if (const string_region *str_reg
2712 = reg->get_parent_region ()->dyn_cast_string_region ())
757bf1df 2713 {
808f4dfe
DM
2714 tree string_cst = str_reg->get_string_cst ();
2715 if (const svalue *char_sval
2716 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2717 byte_offset_cst))
2718 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
757bf1df 2719 }
757bf1df 2720
808f4dfe
DM
2721 /* Special-case: read the initial char of a STRING_CST. */
2722 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
2723 if (const string_region *str_reg
2724 = cast_reg->get_original_region ()->dyn_cast_string_region ())
2725 {
2726 tree string_cst = str_reg->get_string_cst ();
59067ddf 2727 tree byte_offset_cst = integer_zero_node;
808f4dfe
DM
2728 if (const svalue *char_sval
2729 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2730 byte_offset_cst))
2731 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2732 }
757bf1df 2733
808f4dfe
DM
2734 /* Otherwise we implicitly have the initial value of the region
2735 (if the cluster had been touched, binding_cluster::get_any_binding,
2736 would have returned UNKNOWN, and we would already have returned
2737 that above). */
757bf1df 2738
623bc027
DM
2739 /* Handle globals. */
2740 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
2741 == RK_GLOBALS)
2742 return get_initial_value_for_global (reg);
757bf1df 2743
1eb90f46 2744 return m_mgr->get_or_create_initial_value (reg, check_poisoned);
757bf1df
DM
2745}
2746
808f4dfe
DM
2747/* Return false if REG does not exist, true if it may do.
2748 This is for detecting regions within the stack that don't exist anymore
2749 after frames are popped. */
757bf1df 2750
808f4dfe
DM
2751bool
2752region_model::region_exists_p (const region *reg) const
757bf1df 2753{
808f4dfe
DM
2754 /* If within a stack frame, check that the stack frame is live. */
2755 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
757bf1df 2756 {
808f4dfe
DM
2757 /* Check that the current frame is the enclosing frame, or is called
2758 by it. */
2759 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
2760 iter_frame = iter_frame->get_calling_frame ())
2761 if (iter_frame == enclosing_frame)
2762 return true;
2763 return false;
757bf1df 2764 }
808f4dfe
DM
2765
2766 return true;
757bf1df
DM
2767}
2768
808f4dfe
DM
2769/* Get a region for referencing PTR_SVAL, creating a region if need be, and
2770 potentially generating warnings via CTXT.
35e3f082 2771 PTR_SVAL must be of pointer type.
808f4dfe 2772 PTR_TREE if non-NULL can be used when emitting diagnostics. */
757bf1df 2773
808f4dfe
DM
2774const region *
2775region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
021077b9
DM
2776 region_model_context *ctxt,
2777 bool add_nonnull_constraint) const
757bf1df 2778{
808f4dfe 2779 gcc_assert (ptr_sval);
35e3f082 2780 gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
757bf1df 2781
49bfbf18
DM
2782 /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
2783 as a constraint. This suppresses false positives from
2784 -Wanalyzer-null-dereference for the case where we later have an
2785 if (PTR_SVAL) that would occur if we considered the false branch
2786 and transitioned the malloc state machine from start->null. */
021077b9
DM
2787 if (add_nonnull_constraint)
2788 {
2789 tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
2790 const svalue *null_ptr
2791 = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
2792 m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
2793 }
49bfbf18 2794
808f4dfe 2795 switch (ptr_sval->get_kind ())
757bf1df 2796 {
808f4dfe 2797 default:
23ebfda0 2798 break;
808f4dfe 2799
757bf1df
DM
2800 case SK_REGION:
2801 {
808f4dfe
DM
2802 const region_svalue *region_sval
2803 = as_a <const region_svalue *> (ptr_sval);
757bf1df
DM
2804 return region_sval->get_pointee ();
2805 }
2806
808f4dfe
DM
2807 case SK_BINOP:
2808 {
2809 const binop_svalue *binop_sval
2810 = as_a <const binop_svalue *> (ptr_sval);
2811 switch (binop_sval->get_op ())
2812 {
2813 case POINTER_PLUS_EXPR:
2814 {
2815 /* If we have a symbolic value expressing pointer arithmentic,
2816 try to convert it to a suitable region. */
2817 const region *parent_region
2818 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
2819 const svalue *offset = binop_sval->get_arg1 ();
2820 tree type= TREE_TYPE (ptr_sval->get_type ());
2821 return m_mgr->get_offset_region (parent_region, type, offset);
2822 }
2823 default:
23ebfda0 2824 break;
808f4dfe
DM
2825 }
2826 }
23ebfda0 2827 break;
757bf1df
DM
2828
2829 case SK_POISONED:
2830 {
2831 if (ctxt)
808f4dfe
DM
2832 {
2833 tree ptr = get_representative_tree (ptr_sval);
2834 /* If we can't get a representative tree for PTR_SVAL
2835 (e.g. if it hasn't been bound into the store), then
2836 fall back on PTR_TREE, if non-NULL. */
2837 if (!ptr)
2838 ptr = ptr_tree;
2839 if (ptr)
2840 {
2841 const poisoned_svalue *poisoned_sval
2842 = as_a <const poisoned_svalue *> (ptr_sval);
2843 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
0e466e97
DM
2844 ctxt->warn (::make_unique<poisoned_value_diagnostic>
2845 (ptr, pkind, nullptr, nullptr));
808f4dfe
DM
2846 }
2847 }
757bf1df 2848 }
23ebfda0 2849 break;
757bf1df
DM
2850 }
2851
23ebfda0 2852 return m_mgr->get_symbolic_region (ptr_sval);
757bf1df
DM
2853}
2854
d3b1ef7a
DM
2855/* Attempt to get BITS within any value of REG, as TYPE.
2856 In particular, extract values from compound_svalues for the case
2857 where there's a concrete binding at BITS.
9faf8348
DM
2858 Return an unknown svalue if we can't handle the given case.
2859 Use CTXT to report any warnings associated with reading from REG. */
d3b1ef7a
DM
2860
2861const svalue *
2862region_model::get_rvalue_for_bits (tree type,
2863 const region *reg,
9faf8348
DM
2864 const bit_range &bits,
2865 region_model_context *ctxt) const
d3b1ef7a 2866{
9faf8348 2867 const svalue *sval = get_store_value (reg, ctxt);
e61ffa20 2868 return m_mgr->get_or_create_bits_within (type, bits, sval);
d3b1ef7a
DM
2869}
2870
3175d40f
DM
2871/* A subclass of pending_diagnostic for complaining about writes to
2872 constant regions of memory. */
2873
2874class write_to_const_diagnostic
2875: public pending_diagnostic_subclass<write_to_const_diagnostic>
2876{
2877public:
2878 write_to_const_diagnostic (const region *reg, tree decl)
2879 : m_reg (reg), m_decl (decl)
2880 {}
2881
ff171cb1 2882 const char *get_kind () const final override
3175d40f
DM
2883 {
2884 return "write_to_const_diagnostic";
2885 }
2886
2887 bool operator== (const write_to_const_diagnostic &other) const
2888 {
2889 return (m_reg == other.m_reg
2890 && m_decl == other.m_decl);
2891 }
2892
ff171cb1 2893 int get_controlling_option () const final override
7fd6e36e
DM
2894 {
2895 return OPT_Wanalyzer_write_to_const;
2896 }
2897
12b67d1e 2898 bool emit (diagnostic_emission_context &ctxt) final override
3175d40f 2899 {
111fd515
DM
2900 auto_diagnostic_group d;
2901 bool warned;
2902 switch (m_reg->get_kind ())
2903 {
2904 default:
12b67d1e 2905 warned = ctxt.warn ("write to %<const%> object %qE", m_decl);
111fd515
DM
2906 break;
2907 case RK_FUNCTION:
12b67d1e 2908 warned = ctxt.warn ("write to function %qE", m_decl);
111fd515
DM
2909 break;
2910 case RK_LABEL:
12b67d1e 2911 warned = ctxt.warn ("write to label %qE", m_decl);
111fd515
DM
2912 break;
2913 }
3175d40f
DM
2914 if (warned)
2915 inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
2916 return warned;
2917 }
2918
ff171cb1 2919 label_text describe_final_event (const evdesc::final_event &ev) final override
3175d40f 2920 {
111fd515
DM
2921 switch (m_reg->get_kind ())
2922 {
2923 default:
2924 return ev.formatted_print ("write to %<const%> object %qE here", m_decl);
2925 case RK_FUNCTION:
2926 return ev.formatted_print ("write to function %qE here", m_decl);
2927 case RK_LABEL:
2928 return ev.formatted_print ("write to label %qE here", m_decl);
2929 }
3175d40f
DM
2930 }
2931
2932private:
2933 const region *m_reg;
2934 tree m_decl;
2935};
2936
2937/* A subclass of pending_diagnostic for complaining about writes to
2938 string literals. */
2939
2940class write_to_string_literal_diagnostic
2941: public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
2942{
2943public:
2944 write_to_string_literal_diagnostic (const region *reg)
2945 : m_reg (reg)
2946 {}
2947
ff171cb1 2948 const char *get_kind () const final override
3175d40f
DM
2949 {
2950 return "write_to_string_literal_diagnostic";
2951 }
2952
2953 bool operator== (const write_to_string_literal_diagnostic &other) const
2954 {
2955 return m_reg == other.m_reg;
2956 }
2957
ff171cb1 2958 int get_controlling_option () const final override
7fd6e36e
DM
2959 {
2960 return OPT_Wanalyzer_write_to_string_literal;
2961 }
2962
12b67d1e 2963 bool emit (diagnostic_emission_context &ctxt) final override
3175d40f 2964 {
12b67d1e 2965 return ctxt.warn ("write to string literal");
3175d40f
DM
2966 /* Ideally we would show the location of the STRING_CST as well,
2967 but it is not available at this point. */
2968 }
2969
ff171cb1 2970 label_text describe_final_event (const evdesc::final_event &ev) final override
3175d40f
DM
2971 {
2972 return ev.formatted_print ("write to string literal here");
2973 }
2974
2975private:
2976 const region *m_reg;
2977};
2978
2979/* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
2980
2981void
2982region_model::check_for_writable_region (const region* dest_reg,
2983 region_model_context *ctxt) const
2984{
2985 /* Fail gracefully if CTXT is NULL. */
2986 if (!ctxt)
2987 return;
2988
2989 const region *base_reg = dest_reg->get_base_region ();
2990 switch (base_reg->get_kind ())
2991 {
2992 default:
2993 break;
111fd515
DM
2994 case RK_FUNCTION:
2995 {
2996 const function_region *func_reg = as_a <const function_region *> (base_reg);
2997 tree fndecl = func_reg->get_fndecl ();
6341f14e
DM
2998 ctxt->warn (make_unique<write_to_const_diagnostic>
2999 (func_reg, fndecl));
111fd515
DM
3000 }
3001 break;
3002 case RK_LABEL:
3003 {
3004 const label_region *label_reg = as_a <const label_region *> (base_reg);
3005 tree label = label_reg->get_label ();
6341f14e
DM
3006 ctxt->warn (make_unique<write_to_const_diagnostic>
3007 (label_reg, label));
111fd515
DM
3008 }
3009 break;
3175d40f
DM
3010 case RK_DECL:
3011 {
3012 const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
3013 tree decl = decl_reg->get_decl ();
3014 /* Warn about writes to const globals.
3015 Don't warn for writes to const locals, and params in particular,
3016 since we would warn in push_frame when setting them up (e.g the
3017 "this" param is "T* const"). */
3018 if (TREE_READONLY (decl)
3019 && is_global_var (decl))
6341f14e 3020 ctxt->warn (make_unique<write_to_const_diagnostic> (dest_reg, decl));
3175d40f
DM
3021 }
3022 break;
3023 case RK_STRING:
6341f14e 3024 ctxt->warn (make_unique<write_to_string_literal_diagnostic> (dest_reg));
3175d40f
DM
3025 break;
3026 }
3027}
3028
9a2c9579
DM
3029/* Get the capacity of REG in bytes. */
3030
3031const svalue *
3032region_model::get_capacity (const region *reg) const
3033{
3034 switch (reg->get_kind ())
3035 {
3036 default:
3037 break;
3038 case RK_DECL:
3039 {
3040 const decl_region *decl_reg = as_a <const decl_region *> (reg);
3041 tree decl = decl_reg->get_decl ();
3042 if (TREE_CODE (decl) == SSA_NAME)
3043 {
3044 tree type = TREE_TYPE (decl);
3045 tree size = TYPE_SIZE (type);
3046 return get_rvalue (size, NULL);
3047 }
3048 else
3049 {
3050 tree size = decl_init_size (decl, false);
3051 if (size)
3052 return get_rvalue (size, NULL);
3053 }
3054 }
3055 break;
e61ffa20
DM
3056 case RK_SIZED:
3057 /* Look through sized regions to get at the capacity
3058 of the underlying regions. */
3059 return get_capacity (reg->get_parent_region ());
0e466e97
DM
3060 case RK_STRING:
3061 {
3062 /* "Capacity" here means "size". */
3063 const string_region *string_reg = as_a <const string_region *> (reg);
3064 tree string_cst = string_reg->get_string_cst ();
3065 return m_mgr->get_or_create_int_cst (size_type_node,
3066 TREE_STRING_LENGTH (string_cst));
3067 }
3068 break;
9a2c9579
DM
3069 }
3070
3071 if (const svalue *recorded = get_dynamic_extents (reg))
3072 return recorded;
3073
3074 return m_mgr->get_or_create_unknown_svalue (sizetype);
3075}
3076
9faf8348 3077/* If CTXT is non-NULL, use it to warn about any problems accessing REG,
9589a46d 3078 using DIR to determine if this access is a read or write.
1eb90f46 3079 Return TRUE if an OOB access was detected.
0e466e97
DM
3080 If SVAL_HINT is non-NULL, use it as a hint in diagnostics
3081 about the value that would be written to REG. */
9faf8348 3082
9589a46d 3083bool
9faf8348
DM
3084region_model::check_region_access (const region *reg,
3085 enum access_direction dir,
0e466e97 3086 const svalue *sval_hint,
9faf8348
DM
3087 region_model_context *ctxt) const
3088{
3089 /* Fail gracefully if CTXT is NULL. */
3090 if (!ctxt)
9589a46d 3091 return false;
9faf8348 3092
1eb90f46 3093 bool oob_access_detected = false;
b9365b93 3094 check_region_for_taint (reg, dir, ctxt);
0e466e97 3095 if (!check_region_bounds (reg, dir, sval_hint, ctxt))
1eb90f46 3096 oob_access_detected = true;
b9365b93 3097
9faf8348
DM
3098 switch (dir)
3099 {
3100 default:
3101 gcc_unreachable ();
3102 case DIR_READ:
3103 /* Currently a no-op. */
3104 break;
3105 case DIR_WRITE:
3106 check_for_writable_region (reg, ctxt);
3107 break;
3108 }
1eb90f46 3109 return oob_access_detected;
9faf8348
DM
3110}
3111
3112/* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
3113
3114void
3115region_model::check_region_for_write (const region *dest_reg,
0e466e97 3116 const svalue *sval_hint,
9faf8348
DM
3117 region_model_context *ctxt) const
3118{
0e466e97 3119 check_region_access (dest_reg, DIR_WRITE, sval_hint, ctxt);
9faf8348
DM
3120}
3121
9589a46d 3122/* If CTXT is non-NULL, use it to warn about any problems reading from REG.
1eb90f46 3123 Returns TRUE if an OOB read was detected. */
9faf8348 3124
9589a46d 3125bool
9faf8348
DM
3126region_model::check_region_for_read (const region *src_reg,
3127 region_model_context *ctxt) const
3128{
0e466e97 3129 return check_region_access (src_reg, DIR_READ, NULL, ctxt);
9faf8348
DM
3130}
3131
e6c3bb37
TL
3132/* Concrete subclass for casts of pointers that lead to trailing bytes. */
3133
3134class dubious_allocation_size
3135: public pending_diagnostic_subclass<dubious_allocation_size>
3136{
3137public:
021077b9 3138 dubious_allocation_size (const region *lhs, const region *rhs,
181f753d 3139 const svalue *capacity_sval, tree expr,
021077b9 3140 const gimple *stmt)
181f753d
DM
3141 : m_lhs (lhs), m_rhs (rhs),
3142 m_capacity_sval (capacity_sval), m_expr (expr),
3143 m_stmt (stmt),
f5758fe5 3144 m_has_allocation_event (false)
181f753d
DM
3145 {
3146 gcc_assert (m_capacity_sval);
3147 }
e6c3bb37
TL
3148
3149 const char *get_kind () const final override
3150 {
3151 return "dubious_allocation_size";
3152 }
3153
3154 bool operator== (const dubious_allocation_size &other) const
3155 {
021077b9
DM
3156 return (m_stmt == other.m_stmt
3157 && pending_diagnostic::same_tree_p (m_expr, other.m_expr));
e6c3bb37
TL
3158 }
3159
3160 int get_controlling_option () const final override
3161 {
3162 return OPT_Wanalyzer_allocation_size;
3163 }
3164
12b67d1e 3165 bool emit (diagnostic_emission_context &ctxt) final override
e6c3bb37 3166 {
12b67d1e 3167 ctxt.add_cwe (131);
e6c3bb37 3168
12b67d1e
DM
3169 return ctxt.warn ("allocated buffer size is not a multiple"
3170 " of the pointee's size");
e6c3bb37
TL
3171 }
3172
e6c3bb37
TL
3173 label_text describe_final_event (const evdesc::final_event &ev) final
3174 override
3175 {
3176 tree pointee_type = TREE_TYPE (m_lhs->get_type ());
f5758fe5 3177 if (m_has_allocation_event)
e6c3bb37
TL
3178 return ev.formatted_print ("assigned to %qT here;"
3179 " %<sizeof (%T)%> is %qE",
3180 m_lhs->get_type (), pointee_type,
3181 size_in_bytes (pointee_type));
f5758fe5
DM
3182 /* Fallback: Typically, we should always see an allocation_event
3183 before. */
e6c3bb37
TL
3184 if (m_expr)
3185 {
3186 if (TREE_CODE (m_expr) == INTEGER_CST)
3187 return ev.formatted_print ("allocated %E bytes and assigned to"
3188 " %qT here; %<sizeof (%T)%> is %qE",
3189 m_expr, m_lhs->get_type (), pointee_type,
3190 size_in_bytes (pointee_type));
3191 else
3192 return ev.formatted_print ("allocated %qE bytes and assigned to"
3193 " %qT here; %<sizeof (%T)%> is %qE",
3194 m_expr, m_lhs->get_type (), pointee_type,
3195 size_in_bytes (pointee_type));
3196 }
3197
3198 return ev.formatted_print ("allocated and assigned to %qT here;"
3199 " %<sizeof (%T)%> is %qE",
3200 m_lhs->get_type (), pointee_type,
3201 size_in_bytes (pointee_type));
3202 }
3203
f5758fe5
DM
3204 void
3205 add_region_creation_events (const region *,
3206 tree capacity,
e24fe128 3207 const event_loc_info &loc_info,
f5758fe5
DM
3208 checker_path &emission_path) final override
3209 {
3210 emission_path.add_event
e24fe128 3211 (make_unique<region_creation_event_allocation_size> (capacity, loc_info));
f5758fe5
DM
3212
3213 m_has_allocation_event = true;
3214 }
3215
e6c3bb37
TL
3216 void mark_interesting_stuff (interesting_t *interest) final override
3217 {
3218 interest->add_region_creation (m_rhs);
3219 }
3220
181f753d
DM
3221 void maybe_add_sarif_properties (sarif_object &result_obj)
3222 const final override
3223 {
3224 sarif_property_bag &props = result_obj.get_or_create_properties ();
3225#define PROPERTY_PREFIX "gcc/analyzer/dubious_allocation_size/"
3226 props.set (PROPERTY_PREFIX "lhs", m_lhs->to_json ());
3227 props.set (PROPERTY_PREFIX "rhs", m_rhs->to_json ());
3228 props.set (PROPERTY_PREFIX "capacity_sval", m_capacity_sval->to_json ());
3229#undef PROPERTY_PREFIX
3230 }
3231
e6c3bb37
TL
3232private:
3233 const region *m_lhs;
3234 const region *m_rhs;
181f753d 3235 const svalue *m_capacity_sval;
e6c3bb37 3236 const tree m_expr;
021077b9 3237 const gimple *m_stmt;
f5758fe5 3238 bool m_has_allocation_event;
e6c3bb37
TL
3239};
3240
3241/* Return true on dubious allocation sizes for constant sizes. */
3242
3243static bool
3244capacity_compatible_with_type (tree cst, tree pointee_size_tree,
3245 bool is_struct)
3246{
3247 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
3248 gcc_assert (TREE_CODE (pointee_size_tree) == INTEGER_CST);
3249
3250 unsigned HOST_WIDE_INT pointee_size = TREE_INT_CST_LOW (pointee_size_tree);
3251 unsigned HOST_WIDE_INT alloc_size = TREE_INT_CST_LOW (cst);
3252
3253 if (is_struct)
b4cc945c 3254 return alloc_size == 0 || alloc_size >= pointee_size;
e6c3bb37
TL
3255 return alloc_size % pointee_size == 0;
3256}
3257
3258static bool
3259capacity_compatible_with_type (tree cst, tree pointee_size_tree)
3260{
3261 return capacity_compatible_with_type (cst, pointee_size_tree, false);
3262}
3263
3264/* Checks whether SVAL could be a multiple of SIZE_CST.
3265
3266 It works by visiting all svalues inside SVAL until it reaches
3267 atomic nodes. From those, it goes back up again and adds each
1d57a223 3268 node that is not a multiple of SIZE_CST to the RESULT_SET. */
e6c3bb37
TL
3269
3270class size_visitor : public visitor
3271{
3272public:
c83e9731
TL
3273 size_visitor (tree size_cst, const svalue *root_sval, constraint_manager *cm)
3274 : m_size_cst (size_cst), m_root_sval (root_sval), m_cm (cm)
e6c3bb37 3275 {
c83e9731 3276 m_root_sval->accept (this);
e6c3bb37
TL
3277 }
3278
1d57a223 3279 bool is_dubious_capacity ()
e6c3bb37 3280 {
c83e9731 3281 return result_set.contains (m_root_sval);
e6c3bb37
TL
3282 }
3283
3284 void visit_constant_svalue (const constant_svalue *sval) final override
3285 {
c83e9731 3286 check_constant (sval->get_constant (), sval);
e6c3bb37
TL
3287 }
3288
bdd385b2 3289 void visit_unaryop_svalue (const unaryop_svalue *sval) final override
e6c3bb37 3290 {
1d57a223
TL
3291 if (CONVERT_EXPR_CODE_P (sval->get_op ())
3292 && result_set.contains (sval->get_arg ()))
e6c3bb37
TL
3293 result_set.add (sval);
3294 }
3295
3296 void visit_binop_svalue (const binop_svalue *sval) final override
3297 {
3298 const svalue *arg0 = sval->get_arg0 ();
3299 const svalue *arg1 = sval->get_arg1 ();
3300
1d57a223 3301 switch (sval->get_op ())
e6c3bb37 3302 {
1d57a223
TL
3303 case MULT_EXPR:
3304 if (result_set.contains (arg0) && result_set.contains (arg1))
3305 result_set.add (sval);
3306 break;
3307 case PLUS_EXPR:
3308 case MINUS_EXPR:
3309 if (result_set.contains (arg0) || result_set.contains (arg1))
3310 result_set.add (sval);
3311 break;
3312 default:
3313 break;
e6c3bb37
TL
3314 }
3315 }
3316
e6c3bb37
TL
3317 void visit_unmergeable_svalue (const unmergeable_svalue *sval) final override
3318 {
e6c3bb37
TL
3319 if (result_set.contains (sval->get_arg ()))
3320 result_set.add (sval);
3321 }
3322
3323 void visit_widening_svalue (const widening_svalue *sval) final override
3324 {
3325 const svalue *base = sval->get_base_svalue ();
3326 const svalue *iter = sval->get_iter_svalue ();
3327
1d57a223 3328 if (result_set.contains (base) || result_set.contains (iter))
e6c3bb37
TL
3329 result_set.add (sval);
3330 }
3331
1d57a223 3332 void visit_initial_svalue (const initial_svalue *sval) final override
e6c3bb37 3333 {
1d57a223 3334 equiv_class_id id = equiv_class_id::null ();
e6c3bb37
TL
3335 if (m_cm->get_equiv_class_by_svalue (sval, &id))
3336 {
c83e9731
TL
3337 if (tree cst = id.get_obj (*m_cm).get_any_constant ())
3338 check_constant (cst, sval);
1d57a223
TL
3339 }
3340 else if (!m_cm->sval_constrained_p (sval))
3341 {
3342 result_set.add (sval);
e6c3bb37
TL
3343 }
3344 }
3345
1d57a223 3346 void visit_conjured_svalue (const conjured_svalue *sval) final override
e6c3bb37 3347 {
1d57a223
TL
3348 equiv_class_id id = equiv_class_id::null ();
3349 if (m_cm->get_equiv_class_by_svalue (sval, &id))
3350 if (tree cst = id.get_obj (*m_cm).get_any_constant ())
3351 check_constant (cst, sval);
e6c3bb37
TL
3352 }
3353
3354private:
c83e9731
TL
3355 void check_constant (tree cst, const svalue *sval)
3356 {
3357 switch (TREE_CODE (cst))
3358 {
3359 default:
3360 /* Assume all unhandled operands are compatible. */
c83e9731
TL
3361 break;
3362 case INTEGER_CST:
1d57a223 3363 if (!capacity_compatible_with_type (cst, m_size_cst))
c83e9731
TL
3364 result_set.add (sval);
3365 break;
3366 }
3367 }
3368
e6c3bb37 3369 tree m_size_cst;
c83e9731 3370 const svalue *m_root_sval;
e6c3bb37
TL
3371 constraint_manager *m_cm;
3372 svalue_set result_set; /* Used as a mapping of svalue*->bool. */
3373};
3374
9f382376
DM
3375/* Return true if SIZE_CST is a power of 2, and we have
3376 CAPACITY_SVAL == ((X | (Y - 1) ) + 1), since it is then a multiple
3377 of SIZE_CST, as used by Linux kernel's round_up macro. */
3378
3379static bool
3380is_round_up (tree size_cst,
3381 const svalue *capacity_sval)
3382{
3383 if (!integer_pow2p (size_cst))
3384 return false;
3385 const binop_svalue *binop_sval = capacity_sval->dyn_cast_binop_svalue ();
3386 if (!binop_sval)
3387 return false;
3388 if (binop_sval->get_op () != PLUS_EXPR)
3389 return false;
3390 tree rhs_cst = binop_sval->get_arg1 ()->maybe_get_constant ();
3391 if (!rhs_cst)
3392 return false;
3393 if (!integer_onep (rhs_cst))
3394 return false;
3395
3396 /* We have CAPACITY_SVAL == (LHS + 1) for some LHS expression. */
3397
3398 const binop_svalue *lhs_binop_sval
3399 = binop_sval->get_arg0 ()->dyn_cast_binop_svalue ();
3400 if (!lhs_binop_sval)
3401 return false;
3402 if (lhs_binop_sval->get_op () != BIT_IOR_EXPR)
3403 return false;
3404
3405 tree inner_rhs_cst = lhs_binop_sval->get_arg1 ()->maybe_get_constant ();
3406 if (!inner_rhs_cst)
3407 return false;
3408
3409 if (wi::to_widest (inner_rhs_cst) + 1 != wi::to_widest (size_cst))
3410 return false;
3411 return true;
3412}
3413
3414/* Return true if CAPACITY_SVAL is known to be a multiple of SIZE_CST. */
3415
3416static bool
3417is_multiple_p (tree size_cst,
3418 const svalue *capacity_sval)
3419{
3420 if (const svalue *sval = capacity_sval->maybe_undo_cast ())
3421 return is_multiple_p (size_cst, sval);
3422
3423 if (is_round_up (size_cst, capacity_sval))
3424 return true;
3425
3426 return false;
3427}
3428
3429/* Return true if we should emit a dubious_allocation_size warning
3430 on assigning a region of capacity CAPACITY_SVAL bytes to a pointer
3431 of type with size SIZE_CST, where CM expresses known constraints. */
3432
3433static bool
3434is_dubious_capacity (tree size_cst,
3435 const svalue *capacity_sval,
3436 constraint_manager *cm)
3437{
3438 if (is_multiple_p (size_cst, capacity_sval))
3439 return false;
3440 size_visitor v (size_cst, capacity_sval, cm);
3441 return v.is_dubious_capacity ();
3442}
3443
3444
e6c3bb37
TL
3445/* Return true if a struct or union either uses the inheritance pattern,
3446 where the first field is a base struct, or the flexible array member
3447 pattern, where the last field is an array without a specified size. */
3448
3449static bool
3450struct_or_union_with_inheritance_p (tree struc)
3451{
3452 tree iter = TYPE_FIELDS (struc);
3453 if (iter == NULL_TREE)
3454 return false;
3455 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (iter)))
3456 return true;
3457
3458 tree last_field;
3459 while (iter != NULL_TREE)
3460 {
3461 last_field = iter;
3462 iter = DECL_CHAIN (iter);
3463 }
3464
3465 if (last_field != NULL_TREE
3466 && TREE_CODE (TREE_TYPE (last_field)) == ARRAY_TYPE)
3467 return true;
3468
3469 return false;
3470}
3471
3472/* Return true if the lhs and rhs of an assignment have different types. */
3473
3474static bool
3475is_any_cast_p (const gimple *stmt)
3476{
c83e9731 3477 if (const gassign *assign = dyn_cast <const gassign *> (stmt))
e6c3bb37
TL
3478 return gimple_assign_cast_p (assign)
3479 || !pending_diagnostic::same_tree_p (
3480 TREE_TYPE (gimple_assign_lhs (assign)),
3481 TREE_TYPE (gimple_assign_rhs1 (assign)));
c83e9731 3482 else if (const gcall *call = dyn_cast <const gcall *> (stmt))
e6c3bb37
TL
3483 {
3484 tree lhs = gimple_call_lhs (call);
3485 return lhs != NULL_TREE && !pending_diagnostic::same_tree_p (
3486 TREE_TYPE (gimple_call_lhs (call)),
3487 gimple_call_return_type (call));
3488 }
3489
3490 return false;
3491}
3492
3493/* On pointer assignments, check whether the buffer size of
3494 RHS_SVAL is compatible with the type of the LHS_REG.
3495 Use a non-null CTXT to report allocation size warnings. */
3496
3497void
3498region_model::check_region_size (const region *lhs_reg, const svalue *rhs_sval,
3499 region_model_context *ctxt) const
3500{
3501 if (!ctxt || ctxt->get_stmt () == NULL)
3502 return;
3503 /* Only report warnings on assignments that actually change the type. */
3504 if (!is_any_cast_p (ctxt->get_stmt ()))
3505 return;
3506
e6c3bb37
TL
3507 tree pointer_type = lhs_reg->get_type ();
3508 if (pointer_type == NULL_TREE || !POINTER_TYPE_P (pointer_type))
3509 return;
3510
3511 tree pointee_type = TREE_TYPE (pointer_type);
3512 /* Make sure that the type on the left-hand size actually has a size. */
3513 if (pointee_type == NULL_TREE || VOID_TYPE_P (pointee_type)
3514 || TYPE_SIZE_UNIT (pointee_type) == NULL_TREE)
3515 return;
3516
67e1433a
SSF
3517 /* Bail out early on function pointers. */
3518 if (TREE_CODE (pointee_type) == FUNCTION_TYPE)
3519 return;
3520
e6c3bb37
TL
3521 /* Bail out early on pointers to structs where we can
3522 not deduce whether the buffer size is compatible. */
3523 bool is_struct = RECORD_OR_UNION_TYPE_P (pointee_type);
3524 if (is_struct && struct_or_union_with_inheritance_p (pointee_type))
3525 return;
3526
3527 tree pointee_size_tree = size_in_bytes (pointee_type);
3528 /* We give up if the type size is not known at compile-time or the
3529 type size is always compatible regardless of the buffer size. */
3530 if (TREE_CODE (pointee_size_tree) != INTEGER_CST
3531 || integer_zerop (pointee_size_tree)
3532 || integer_onep (pointee_size_tree))
3533 return;
3534
021077b9 3535 const region *rhs_reg = deref_rvalue (rhs_sval, NULL_TREE, ctxt, false);
e6c3bb37
TL
3536 const svalue *capacity = get_capacity (rhs_reg);
3537 switch (capacity->get_kind ())
3538 {
3539 case svalue_kind::SK_CONSTANT:
3540 {
3541 const constant_svalue *cst_cap_sval
c83e9731 3542 = as_a <const constant_svalue *> (capacity);
e6c3bb37 3543 tree cst_cap = cst_cap_sval->get_constant ();
c83e9731
TL
3544 if (TREE_CODE (cst_cap) == INTEGER_CST
3545 && !capacity_compatible_with_type (cst_cap, pointee_size_tree,
3546 is_struct))
6341f14e 3547 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg, rhs_reg,
181f753d 3548 capacity, cst_cap,
021077b9 3549 ctxt->get_stmt ()));
e6c3bb37
TL
3550 }
3551 break;
3552 default:
3553 {
3554 if (!is_struct)
3555 {
9f382376
DM
3556 if (is_dubious_capacity (pointee_size_tree,
3557 capacity,
3558 m_constraints))
e6c3bb37
TL
3559 {
3560 tree expr = get_representative_tree (capacity);
6341f14e
DM
3561 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg,
3562 rhs_reg,
181f753d 3563 capacity, expr,
021077b9 3564 ctxt->get_stmt ()));
e6c3bb37
TL
3565 }
3566 }
3567 break;
3568 }
3569 }
3570}
3571
808f4dfe 3572/* Set the value of the region given by LHS_REG to the value given
9faf8348
DM
3573 by RHS_SVAL.
3574 Use CTXT to report any warnings associated with writing to LHS_REG. */
757bf1df 3575
808f4dfe
DM
3576void
3577region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
3175d40f 3578 region_model_context *ctxt)
757bf1df 3579{
808f4dfe
DM
3580 gcc_assert (lhs_reg);
3581 gcc_assert (rhs_sval);
3582
dfe2ef7f
DM
3583 /* Setting the value of an empty region is a no-op. */
3584 if (lhs_reg->empty_p ())
3585 return;
3586
e6c3bb37
TL
3587 check_region_size (lhs_reg, rhs_sval, ctxt);
3588
0e466e97 3589 check_region_for_write (lhs_reg, rhs_sval, ctxt);
3175d40f 3590
808f4dfe 3591 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 3592 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
3593}
3594
808f4dfe 3595/* Set the value of the region given by LHS to the value given by RHS. */
757bf1df
DM
3596
3597void
808f4dfe 3598region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
757bf1df 3599{
808f4dfe
DM
3600 const region *lhs_reg = get_lvalue (lhs, ctxt);
3601 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
3602 gcc_assert (lhs_reg);
3603 gcc_assert (rhs_sval);
3604 set_value (lhs_reg, rhs_sval, ctxt);
757bf1df
DM
3605}
3606
325f9e88
DM
3607/* Issue a note specifying that a particular function parameter is expected
3608 to be a valid null-terminated string. */
3609
3610static void
3611inform_about_expected_null_terminated_string_arg (const call_arg_details &ad)
3612{
3613 // TODO: ideally we'd underline the param here
3614 inform (DECL_SOURCE_LOCATION (ad.m_called_fndecl),
3615 "argument %d of %qD must be a pointer to a null-terminated string",
3616 ad.m_arg_idx + 1, ad.m_called_fndecl);
3617}
3618
fe97f09a 3619/* A binding of a specific svalue at a concrete byte range. */
325f9e88 3620
fe97f09a 3621struct fragment
325f9e88 3622{
fe97f09a
DM
3623 fragment ()
3624 : m_byte_range (0, 0), m_sval (nullptr)
325f9e88 3625 {
325f9e88
DM
3626 }
3627
fe97f09a
DM
3628 fragment (const byte_range &bytes, const svalue *sval)
3629 : m_byte_range (bytes), m_sval (sval)
325f9e88 3630 {
325f9e88
DM
3631 }
3632
fe97f09a 3633 static int cmp_ptrs (const void *p1, const void *p2)
325f9e88 3634 {
fe97f09a
DM
3635 const fragment *f1 = (const fragment *)p1;
3636 const fragment *f2 = (const fragment *)p2;
3637 return byte_range::cmp (f1->m_byte_range, f2->m_byte_range);
325f9e88
DM
3638 }
3639
84096e66
DM
3640 void
3641 dump_to_pp (pretty_printer *pp) const
325f9e88 3642 {
84096e66
DM
3643 pp_string (pp, "fragment(");
3644 m_byte_range.dump_to_pp (pp);
3645 pp_string (pp, ", sval: ");
3646 if (m_sval)
3647 m_sval->dump_to_pp (pp, true);
3648 else
3649 pp_string (pp, "nullptr");
3650 pp_string (pp, ")");
3651 }
3652
3653 byte_range m_byte_range;
3654 const svalue *m_sval;
3655};
3656
3657/* Determine if there is a zero terminator somewhere in the
3658 part of STRING_CST covered by BYTES (where BYTES is relative to the
3659 start of the constant).
3660
3661 Return a tristate:
3662 - true if there definitely is a zero byte, writing to *OUT_BYTES_READ
3663 the number of bytes from that would be read, including the zero byte.
3664 - false if there definitely isn't a zero byte
3665 - unknown if we don't know. */
3666
3667static tristate
3668string_cst_has_null_terminator (tree string_cst,
3669 const byte_range &bytes,
3670 byte_offset_t *out_bytes_read)
3671{
3672 gcc_assert (bytes.m_start_byte_offset >= 0);
0a6a5f86
DM
3673
3674 /* If we're beyond the string_cst, reads are unsuccessful. */
3675 if (tree cst_size = get_string_cst_size (string_cst))
3676 if (TREE_CODE (cst_size) == INTEGER_CST)
3677 if (bytes.m_start_byte_offset >= TREE_INT_CST_LOW (cst_size))
3678 return tristate::unknown ();
3679
3680 /* Assume all bytes after TREE_STRING_LENGTH are zero. This handles
3681 the case where an array is initialized with a string_cst that isn't
3682 as long as the array, where the remaining elements are
3683 empty-initialized and thus zeroed. */
3684 if (bytes.m_start_byte_offset >= TREE_STRING_LENGTH (string_cst))
3685 {
3686 *out_bytes_read = 1;
3687 return tristate (true);
3688 }
84096e66
DM
3689
3690 /* Look for the first 0 byte within STRING_CST
3691 from START_READ_OFFSET onwards. */
3692 const byte_offset_t num_bytes_to_search
3693 = std::min<byte_offset_t> ((TREE_STRING_LENGTH (string_cst)
3694 - bytes.m_start_byte_offset),
3695 bytes.m_size_in_bytes);
3696 const char *start = (TREE_STRING_POINTER (string_cst)
3697 + bytes.m_start_byte_offset.slow ());
3698 if (num_bytes_to_search >= 0)
3699 if (const void *p = memchr (start, 0, bytes.m_size_in_bytes.slow ()))
fe97f09a 3700 {
84096e66
DM
3701 *out_bytes_read = (const char *)p - start + 1;
3702 return tristate (true);
3703 }
fe97f09a 3704
84096e66
DM
3705 *out_bytes_read = bytes.m_size_in_bytes;
3706 return tristate (false);
3707}
d99d73c7 3708
84096e66
DM
3709static tristate
3710svalue_byte_range_has_null_terminator (const svalue *sval,
3711 const byte_range &bytes,
3712 byte_offset_t *out_bytes_read,
3713 logger *logger);
d99d73c7 3714
84096e66
DM
3715/* Determine if there is a zero terminator somewhere in the
3716 part of SVAL covered by BYTES (where BYTES is relative to the svalue).
2bad0eeb 3717
84096e66
DM
3718 Return a tristate:
3719 - true if there definitely is a zero byte, writing to *OUT_BYTES_READ
3720 the number of bytes from that would be read, including the zero byte.
3721 - false if there definitely isn't a zero byte
3722 - unknown if we don't know.
3723
3724 Use LOGGER (if non-null) for any logging. */
3725
3726static tristate
3727svalue_byte_range_has_null_terminator_1 (const svalue *sval,
3728 const byte_range &bytes,
3729 byte_offset_t *out_bytes_read,
3730 logger *logger)
3731{
d5604feb
DM
3732 if (bytes.m_start_byte_offset == 0
3733 && sval->all_zeroes_p ())
3734 {
3735 /* The initial byte of an all-zeroes SVAL is a zero byte. */
3736 *out_bytes_read = 1;
3737 return tristate (true);
3738 }
3739
84096e66
DM
3740 switch (sval->get_kind ())
3741 {
3742 case SK_CONSTANT:
3743 {
3744 tree cst
3745 = as_a <const constant_svalue *> (sval)->get_constant ();
3746 switch (TREE_CODE (cst))
3747 {
3748 case STRING_CST:
3749 return string_cst_has_null_terminator (cst, bytes, out_bytes_read);
3750 case INTEGER_CST:
3751 if (bytes.m_start_byte_offset == 0
3752 && integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (cst))))
3753 {
3754 /* Model accesses to the initial byte of a 1-byte
3755 INTEGER_CST. */
3756 *out_bytes_read = 1;
3757 if (zerop (cst))
3758 return tristate (true);
3759 else
3760 return tristate (false);
3761 }
3762 /* Treat any other access to an INTEGER_CST as unknown. */
3763 return tristate::TS_UNKNOWN;
3764
3765 default:
84096e66
DM
3766 break;
3767 }
fe97f09a 3768 }
84096e66 3769 break;
325f9e88 3770
84096e66
DM
3771 case SK_INITIAL:
3772 {
3773 const initial_svalue *initial_sval = (const initial_svalue *)sval;
3774 const region *reg = initial_sval->get_region ();
3775 if (const string_region *string_reg = reg->dyn_cast_string_region ())
3776 {
3777 tree string_cst = string_reg->get_string_cst ();
3778 return string_cst_has_null_terminator (string_cst,
3779 bytes,
3780 out_bytes_read);
3781 }
3782 return tristate::TS_UNKNOWN;
3783 }
3784 break;
d99d73c7 3785
84096e66
DM
3786 case SK_BITS_WITHIN:
3787 {
3788 const bits_within_svalue *bits_within_sval
3789 = (const bits_within_svalue *)sval;
3790 byte_range bytes_within_inner (0, 0);
3791 if (bits_within_sval->get_bits ().as_byte_range (&bytes_within_inner))
3792 {
3793 /* Consider e.g. looking for null terminator of
3794 bytes 2-4 of BITS_WITHIN(bytes 10-15 of inner_sval)
3795
3796 This is equivalent to looking within bytes 12-14 of
3797 inner_sval. */
3798 const byte_offset_t start_byte_relative_to_inner
3799 = (bytes.m_start_byte_offset
3800 + bytes_within_inner.m_start_byte_offset);
3801 const byte_offset_t next_byte_relative_to_inner
3802 = (bytes.get_next_byte_offset ()
3803 + bytes_within_inner.m_start_byte_offset);
3804 if (next_byte_relative_to_inner > start_byte_relative_to_inner)
3805 {
3806 const byte_range relative_to_inner
3807 (start_byte_relative_to_inner,
3808 next_byte_relative_to_inner - start_byte_relative_to_inner);
3809 const svalue *inner_sval
3810 = bits_within_sval->get_inner_svalue ();
3811 return svalue_byte_range_has_null_terminator (inner_sval,
3812 relative_to_inner,
3813 out_bytes_read,
3814 logger);
3815 }
3816 }
3817 }
3818 break;
d99d73c7 3819
84096e66
DM
3820 default:
3821 // TODO: it may be possible to handle other cases here.
3822 break;
3823 }
3824 return tristate::TS_UNKNOWN;
3825}
3826
3827/* Like svalue_byte_range_has_null_terminator_1, but add logging. */
3828
3829static tristate
3830svalue_byte_range_has_null_terminator (const svalue *sval,
3831 const byte_range &bytes,
3832 byte_offset_t *out_bytes_read,
3833 logger *logger)
3834{
3835 LOG_SCOPE (logger);
3836 if (logger)
3837 {
3838 pretty_printer *pp = logger->get_printer ();
3839 logger->start_log_line ();
3840 bytes.dump_to_pp (pp);
3841 logger->log_partial (" of sval: ");
3842 sval->dump_to_pp (pp, true);
3843 logger->end_log_line ();
3844 }
3845 tristate ts
3846 = svalue_byte_range_has_null_terminator_1 (sval, bytes,
3847 out_bytes_read, logger);
3848 if (logger)
3849 {
3850 pretty_printer *pp = logger->get_printer ();
3851 logger->start_log_line ();
3852 pp_printf (pp, "has null terminator: %s", ts.as_string ());
3853 if (ts.is_true ())
3854 {
3855 pp_string (pp, "; bytes read: ");
3856 pp_wide_int (pp, *out_bytes_read, SIGNED);
3857 }
3858 logger->end_log_line ();
3859 }
3860 return ts;
3861}
fe97f09a
DM
3862
3863/* A frozen copy of a single base region's binding_cluster within a store,
3864 optimized for traversal of the concrete parts in byte order.
3865 This only captures concrete bindings, and is an implementation detail
3866 of region_model::scan_for_null_terminator. */
3867
3868class iterable_cluster
3869{
3870public:
3871 iterable_cluster (const binding_cluster *cluster)
325f9e88 3872 {
fe97f09a
DM
3873 if (!cluster)
3874 return;
3875 for (auto iter : *cluster)
3876 {
3877 const binding_key *key = iter.first;
3878 const svalue *sval = iter.second;
3879
3880 if (const concrete_binding *concrete_key
3881 = key->dyn_cast_concrete_binding ())
3882 {
3883 byte_range fragment_bytes (0, 0);
3884 if (concrete_key->get_byte_range (&fragment_bytes))
3885 m_fragments.safe_push (fragment (fragment_bytes, sval));
3886 }
5ef89c5c
DM
3887 else
3888 m_symbolic_bindings.safe_push (key);
fe97f09a
DM
3889 }
3890 m_fragments.qsort (fragment::cmp_ptrs);
325f9e88
DM
3891 }
3892
fe97f09a
DM
3893 bool
3894 get_fragment_for_byte (byte_offset_t byte, fragment *out_frag) const
325f9e88 3895 {
fe97f09a
DM
3896 /* TODO: binary search rather than linear. */
3897 unsigned iter_idx;
3898 for (iter_idx = 0; iter_idx < m_fragments.length (); iter_idx++)
3899 {
3900 if (m_fragments[iter_idx].m_byte_range.contains_p (byte))
3901 {
3902 *out_frag = m_fragments[iter_idx];
3903 return true;
3904 }
3905 }
3906 return false;
325f9e88
DM
3907 }
3908
5ef89c5c
DM
3909 bool has_symbolic_bindings_p () const
3910 {
3911 return !m_symbolic_bindings.is_empty ();
3912 }
3913
84096e66
DM
3914 void dump_to_pp (pretty_printer *pp) const
3915 {
3916 pp_string (pp, "iterable_cluster (fragments: [");
3917 for (auto const &iter : &m_fragments)
3918 {
3919 if (&iter != m_fragments.begin ())
3920 pp_string (pp, ", ");
3921 iter.dump_to_pp (pp);
3922 }
3923 pp_printf (pp, "], symbolic bindings: [");
3924 for (auto const &iter : m_symbolic_bindings)
3925 {
3926 if (&iter != m_symbolic_bindings.begin ())
3927 pp_string (pp, ", ");
3928 (*iter).dump_to_pp (pp, true);
3929 }
3930 pp_string (pp, "])");
3931 }
3932
325f9e88 3933private:
fe97f09a 3934 auto_vec<fragment> m_fragments;
5ef89c5c 3935 auto_vec<const binding_key *> m_symbolic_bindings;
325f9e88
DM
3936};
3937
fe97f09a
DM
3938/* Simulate reading the bytes at BYTES from BASE_REG.
3939 Complain to CTXT about any issues with the read e.g. out-of-bounds. */
3940
3941const svalue *
3942region_model::get_store_bytes (const region *base_reg,
3943 const byte_range &bytes,
3944 region_model_context *ctxt) const
3945{
0ae07a72
DM
3946 /* Shortcut reading all of a string_region. */
3947 if (bytes.get_start_byte_offset () == 0)
3948 if (const string_region *string_reg = base_reg->dyn_cast_string_region ())
3949 if (bytes.m_size_in_bytes
3950 == TREE_STRING_LENGTH (string_reg->get_string_cst ()))
3951 return m_mgr->get_or_create_initial_value (base_reg);
3952
fe97f09a
DM
3953 const svalue *index_sval
3954 = m_mgr->get_or_create_int_cst (size_type_node,
3955 bytes.get_start_byte_offset ());
3956 const region *offset_reg = m_mgr->get_offset_region (base_reg,
3957 NULL_TREE,
3958 index_sval);
3959 const svalue *byte_size_sval
3960 = m_mgr->get_or_create_int_cst (size_type_node, bytes.m_size_in_bytes);
3961 const region *read_reg = m_mgr->get_sized_region (offset_reg,
3962 NULL_TREE,
3963 byte_size_sval);
3964
3965 /* Simulate reading those bytes from the store. */
3966 const svalue *sval = get_store_value (read_reg, ctxt);
3967 return sval;
3968}
3969
3970static tree
3971get_tree_for_byte_offset (tree ptr_expr, byte_offset_t byte_offset)
3972{
3973 gcc_assert (ptr_expr);
c87f1f3d 3974 tree ptype = build_pointer_type_for_mode (char_type_node, ptr_mode, true);
fe97f09a
DM
3975 return fold_build2 (MEM_REF,
3976 char_type_node,
c87f1f3d 3977 ptr_expr, wide_int_to_tree (ptype, byte_offset));
fe97f09a
DM
3978}
3979
3980/* Simulate a series of reads of REG until we find a 0 byte
3981 (equivalent to calling strlen).
3982
3983 Complain to CTXT and return NULL if:
3984 - the buffer pointed to isn't null-terminated
84096e66 3985 - the buffer pointed to has any uninitialized bytes before any 0-terminator
fe97f09a
DM
3986 - any of the reads aren't within the bounds of the underlying base region
3987
3988 Otherwise, return a svalue for the number of bytes read (strlen + 1),
3989 and, if OUT_SVAL is non-NULL, write to *OUT_SVAL with an svalue
3990 representing the content of REG up to and including the terminator.
3991
3992 Algorithm
3993 =========
3994
3995 Get offset for first byte to read.
3996 Find the binding (if any) that contains it.
3997 Find the size in bits of that binding.
3998 Round to the nearest byte (which way???)
3999 Or maybe give up if we have a partial binding there.
4000 Get the svalue from the binding.
4001 Determine the strlen (if any) of that svalue.
4002 Does it have a 0-terminator within it?
4003 If so, we have a partial read up to and including that terminator
4004 Read those bytes from the store; add to the result in the correct place.
4005 Finish
4006 If not, we have a full read of that svalue
4007 Read those bytes from the store; add to the result in the correct place.
4008 Update read/write offsets
4009 Continue
4010 If unknown:
4011 Result is unknown
4012 Finish
4013*/
4014
4015const svalue *
84096e66
DM
4016region_model::scan_for_null_terminator_1 (const region *reg,
4017 tree expr,
4018 const svalue **out_sval,
4019 region_model_context *ctxt) const
fe97f09a 4020{
84096e66 4021 logger *logger = ctxt ? ctxt->get_logger () : nullptr;
fe97f09a
DM
4022 store_manager *store_mgr = m_mgr->get_store_manager ();
4023
4024 region_offset offset = reg->get_offset (m_mgr);
4025 if (offset.symbolic_p ())
4026 {
4027 if (out_sval)
0ae07a72 4028 *out_sval = get_store_value (reg, nullptr);
84096e66
DM
4029 if (logger)
4030 logger->log ("offset is symbolic");
fe97f09a
DM
4031 return m_mgr->get_or_create_unknown_svalue (size_type_node);
4032 }
4033 byte_offset_t src_byte_offset;
4034 if (!offset.get_concrete_byte_offset (&src_byte_offset))
4035 {
4036 if (out_sval)
0ae07a72 4037 *out_sval = get_store_value (reg, nullptr);
84096e66
DM
4038 if (logger)
4039 logger->log ("can't get concrete byte offset");
fe97f09a
DM
4040 return m_mgr->get_or_create_unknown_svalue (size_type_node);
4041 }
4042 const byte_offset_t initial_src_byte_offset = src_byte_offset;
4043 byte_offset_t dst_byte_offset = 0;
4044
4045 const region *base_reg = reg->get_base_region ();
4046
4047 if (const string_region *str_reg = base_reg->dyn_cast_string_region ())
4048 {
4049 tree string_cst = str_reg->get_string_cst ();
4050 if (const void *p = memchr (TREE_STRING_POINTER (string_cst),
4051 0,
4052 TREE_STRING_LENGTH (string_cst)))
4053 {
4054 size_t num_bytes_read
4055 = (const char *)p - TREE_STRING_POINTER (string_cst) + 1;
4056 /* Simulate the read. */
4057 byte_range bytes_to_read (0, num_bytes_read);
4058 const svalue *sval = get_store_bytes (reg, bytes_to_read, ctxt);
4059 if (out_sval)
4060 *out_sval = sval;
84096e66
DM
4061 if (logger)
4062 logger->log ("using string_cst");
fe97f09a
DM
4063 return m_mgr->get_or_create_int_cst (size_type_node,
4064 num_bytes_read);
4065 }
4066 }
4067
4068 const binding_cluster *cluster = m_store.get_cluster (base_reg);
4069 iterable_cluster c (cluster);
84096e66
DM
4070 if (logger)
4071 {
4072 pretty_printer *pp = logger->get_printer ();
4073 logger->start_log_line ();
4074 c.dump_to_pp (pp);
4075 logger->end_log_line ();
4076 }
4077
fe97f09a
DM
4078 binding_map result;
4079
4080 while (1)
4081 {
4082 fragment f;
4083 if (c.get_fragment_for_byte (src_byte_offset, &f))
4084 {
84096e66
DM
4085 if (logger)
4086 {
4087 logger->start_log_line ();
4088 pretty_printer *pp = logger->get_printer ();
4089 pp_printf (pp, "src_byte_offset: ");
4090 pp_wide_int (pp, src_byte_offset, SIGNED);
4091 pp_string (pp, ": ");
4092 f.dump_to_pp (pp);
4093 logger->end_log_line ();
4094 }
4095 gcc_assert (f.m_byte_range.contains_p (src_byte_offset));
4096 /* src_byte_offset and f.m_byte_range are both expressed relative to
4097 the base region.
4098 Convert to a byte_range relative to the svalue. */
4099 const byte_range bytes_relative_to_svalue
4100 (src_byte_offset - f.m_byte_range.get_start_byte_offset (),
4101 f.m_byte_range.get_next_byte_offset () - src_byte_offset);
fe97f09a
DM
4102 byte_offset_t fragment_bytes_read;
4103 tristate is_terminated
84096e66
DM
4104 = svalue_byte_range_has_null_terminator (f.m_sval,
4105 bytes_relative_to_svalue,
4106 &fragment_bytes_read,
4107 logger);
fe97f09a
DM
4108 if (is_terminated.is_unknown ())
4109 {
4110 if (out_sval)
0ae07a72 4111 *out_sval = get_store_value (reg, nullptr);
fe97f09a
DM
4112 return m_mgr->get_or_create_unknown_svalue (size_type_node);
4113 }
4114
4115 /* Simulate reading those bytes from the store. */
4116 byte_range bytes_to_read (src_byte_offset, fragment_bytes_read);
4117 const svalue *sval = get_store_bytes (base_reg, bytes_to_read, ctxt);
4118 check_for_poison (sval, expr, nullptr, ctxt);
4119
4120 if (out_sval)
4121 {
4122 byte_range bytes_to_write (dst_byte_offset, fragment_bytes_read);
4123 const binding_key *key
4124 = store_mgr->get_concrete_binding (bytes_to_write);
4125 result.put (key, sval);
4126 }
4127
4128 src_byte_offset += fragment_bytes_read;
4129 dst_byte_offset += fragment_bytes_read;
4130
4131 if (is_terminated.is_true ())
4132 {
4133 if (out_sval)
4134 *out_sval = m_mgr->get_or_create_compound_svalue (NULL_TREE,
4135 result);
84096e66
DM
4136 if (logger)
4137 logger->log ("got terminator");
fe97f09a
DM
4138 return m_mgr->get_or_create_int_cst (size_type_node,
4139 dst_byte_offset);
4140 }
4141 }
4142 else
4143 break;
4144 }
4145
4146 /* No binding for this base_region, or no binding at src_byte_offset
4147 (or a symbolic binding). */
4148
5ef89c5c
DM
4149 if (c.has_symbolic_bindings_p ())
4150 {
4151 if (out_sval)
0ae07a72 4152 *out_sval = get_store_value (reg, nullptr);
84096e66
DM
4153 if (logger)
4154 logger->log ("got symbolic binding");
5ef89c5c
DM
4155 return m_mgr->get_or_create_unknown_svalue (size_type_node);
4156 }
4157
fe97f09a
DM
4158 /* TODO: the various special-cases seen in
4159 region_model::get_store_value. */
4160
4161 /* Simulate reading from this byte, then give up. */
4162 byte_range bytes_to_read (src_byte_offset, 1);
4163 const svalue *sval = get_store_bytes (base_reg, bytes_to_read, ctxt);
4164 tree byte_expr
f65f63c4
DM
4165 = (expr
4166 ? get_tree_for_byte_offset (expr,
4167 src_byte_offset - initial_src_byte_offset)
4168 : NULL_TREE);
fe97f09a
DM
4169 check_for_poison (sval, byte_expr, nullptr, ctxt);
4170 if (base_reg->can_have_initial_svalue_p ())
4171 {
4172 if (out_sval)
0ae07a72 4173 *out_sval = get_store_value (reg, nullptr);
fe97f09a
DM
4174 return m_mgr->get_or_create_unknown_svalue (size_type_node);
4175 }
4176 else
4177 return nullptr;
4178}
4179
84096e66
DM
4180/* Like region_model::scan_for_null_terminator_1, but add logging. */
4181
4182const svalue *
4183region_model::scan_for_null_terminator (const region *reg,
4184 tree expr,
4185 const svalue **out_sval,
4186 region_model_context *ctxt) const
4187{
4188 logger *logger = ctxt ? ctxt->get_logger () : nullptr;
4189 LOG_SCOPE (logger);
4190 if (logger)
4191 {
4192 pretty_printer *pp = logger->get_printer ();
4193 logger->start_log_line ();
4194 logger->log_partial ("region: ");
4195 reg->dump_to_pp (pp, true);
4196 logger->end_log_line ();
4197 }
4198 const svalue *sval = scan_for_null_terminator_1 (reg, expr, out_sval, ctxt);
4199 if (logger)
4200 {
4201 pretty_printer *pp = logger->get_printer ();
4202 logger->start_log_line ();
4203 logger->log_partial ("length result: ");
4204 if (sval)
4205 sval->dump_to_pp (pp, true);
4206 else
4207 pp_printf (pp, "NULL");
4208 logger->end_log_line ();
4209 if (out_sval)
4210 {
4211 logger->start_log_line ();
4212 logger->log_partial ("content result: ");
4213 if (*out_sval)
4214 (*out_sval)->dump_to_pp (pp, true);
4215 else
4216 pp_printf (pp, "NULL");
4217 logger->end_log_line ();
4218 }
4219 }
4220 return sval;
4221}
4222
325f9e88
DM
4223/* Check that argument ARG_IDX (0-based) to the call described by CD
4224 is a pointer to a valid null-terminated string.
4225
fe97f09a
DM
4226 Simulate scanning through the buffer, reading until we find a 0 byte
4227 (equivalent to calling strlen).
325f9e88 4228
fe97f09a
DM
4229 Complain and return NULL if:
4230 - the buffer pointed to isn't null-terminated
4231 - the buffer pointed to has any uninitalized bytes before any 0-terminator
4232 - any of the reads aren't within the bounds of the underlying base region
325f9e88 4233
bbdc0e0d
DM
4234 Otherwise, return a svalue for strlen of the buffer (*not* including
4235 the null terminator).
4236
4237 TODO: we should also complain if:
4238 - the pointer is NULL (or could be). */
4239
cd7dadcd 4240const svalue *
bbdc0e0d 4241region_model::check_for_null_terminated_string_arg (const call_details &cd,
cd7dadcd 4242 unsigned arg_idx) const
bbdc0e0d 4243{
cd7dadcd
DM
4244 return check_for_null_terminated_string_arg (cd,
4245 arg_idx,
4246 false, /* include_terminator */
4247 nullptr); // out_sval
bbdc0e0d
DM
4248}
4249
4250
4251/* Check that argument ARG_IDX (0-based) to the call described by CD
4252 is a pointer to a valid null-terminated string.
4253
4254 Simulate scanning through the buffer, reading until we find a 0 byte
4255 (equivalent to calling strlen).
4256
4257 Complain and return NULL if:
4258 - the buffer pointed to isn't null-terminated
4259 - the buffer pointed to has any uninitalized bytes before any 0-terminator
4260 - any of the reads aren't within the bounds of the underlying base region
4261
4262 Otherwise, return a svalue. This will be the number of bytes read
4263 (including the null terminator) if INCLUDE_TERMINATOR is true, or strlen
4264 of the buffer (not including the null terminator) if it is false.
4265
4266 Also, when returning an svalue, if OUT_SVAL is non-NULL, write to
4267 *OUT_SVAL with an svalue representing the content of the buffer up to
4268 and including the terminator.
325f9e88 4269
fe97f09a
DM
4270 TODO: we should also complain if:
4271 - the pointer is NULL (or could be). */
4272
4273const svalue *
325f9e88 4274region_model::check_for_null_terminated_string_arg (const call_details &cd,
fe97f09a 4275 unsigned arg_idx,
bbdc0e0d 4276 bool include_terminator,
cd7dadcd 4277 const svalue **out_sval) const
325f9e88 4278{
fe97f09a
DM
4279 class null_terminator_check_event : public custom_event
4280 {
4281 public:
4282 null_terminator_check_event (const event_loc_info &loc_info,
4283 const call_arg_details &arg_details)
4284 : custom_event (loc_info),
4285 m_arg_details (arg_details)
4286 {
4287 }
4288
4289 label_text get_desc (bool can_colorize) const final override
4290 {
4291 if (m_arg_details.m_arg_expr)
4292 return make_label_text (can_colorize,
4293 "while looking for null terminator"
4294 " for argument %i (%qE) of %qD...",
4295 m_arg_details.m_arg_idx + 1,
4296 m_arg_details.m_arg_expr,
4297 m_arg_details.m_called_fndecl);
4298 else
4299 return make_label_text (can_colorize,
4300 "while looking for null terminator"
4301 " for argument %i of %qD...",
4302 m_arg_details.m_arg_idx + 1,
4303 m_arg_details.m_called_fndecl);
4304 }
4305
4306 private:
4307 const call_arg_details m_arg_details;
4308 };
4309
4310 class null_terminator_check_decl_note
4311 : public pending_note_subclass<null_terminator_check_decl_note>
4312 {
4313 public:
4314 null_terminator_check_decl_note (const call_arg_details &arg_details)
4315 : m_arg_details (arg_details)
4316 {
4317 }
4318
4319 const char *get_kind () const final override
4320 {
4321 return "null_terminator_check_decl_note";
4322 }
4323
4324 void emit () const final override
4325 {
4326 inform_about_expected_null_terminated_string_arg (m_arg_details);
4327 }
4328
4329 bool operator== (const null_terminator_check_decl_note &other) const
4330 {
4331 return m_arg_details == other.m_arg_details;
4332 }
4333
4334 private:
4335 const call_arg_details m_arg_details;
4336 };
4337
4338 /* Subclass of decorated_region_model_context that
4339 adds the above event and note to any saved diagnostics. */
4340 class annotating_ctxt : public annotating_context
4341 {
4342 public:
4343 annotating_ctxt (const call_details &cd,
4344 unsigned arg_idx)
4345 : annotating_context (cd.get_ctxt ()),
4346 m_cd (cd),
4347 m_arg_idx (arg_idx)
4348 {
4349 }
4350 void add_annotations () final override
4351 {
4352 call_arg_details arg_details (m_cd, m_arg_idx);
4353 event_loc_info loc_info (m_cd.get_location (),
4354 m_cd.get_model ()->get_current_function ()->decl,
4355 m_cd.get_model ()->get_stack_depth ());
4356
4357 add_event (make_unique<null_terminator_check_event> (loc_info,
4358 arg_details));
4359 add_note (make_unique <null_terminator_check_decl_note> (arg_details));
4360 }
4361 private:
4362 const call_details &m_cd;
4363 unsigned m_arg_idx;
4364 };
4365
4366 /* Use this ctxt below so that any diagnostics that get added
4367 get annotated. */
4368 annotating_ctxt my_ctxt (cd, arg_idx);
325f9e88
DM
4369
4370 const svalue *arg_sval = cd.get_arg_svalue (arg_idx);
4371 const region *buf_reg
fe97f09a 4372 = deref_rvalue (arg_sval, cd.get_arg_tree (arg_idx), &my_ctxt);
325f9e88 4373
bbdc0e0d
DM
4374 if (const svalue *num_bytes_read_sval
4375 = scan_for_null_terminator (buf_reg,
4376 cd.get_arg_tree (arg_idx),
4377 out_sval,
4378 &my_ctxt))
4379 {
4380 if (include_terminator)
4381 return num_bytes_read_sval;
4382 else
4383 {
4384 /* strlen is (bytes_read - 1). */
4385 const svalue *one = m_mgr->get_or_create_int_cst (size_type_node, 1);
4386 return m_mgr->get_or_create_binop (size_type_node,
4387 MINUS_EXPR,
4388 num_bytes_read_sval,
4389 one);
4390 }
4391 }
4392 else
4393 return nullptr;
325f9e88
DM
4394}
4395
808f4dfe 4396/* Remove all bindings overlapping REG within the store. */
884d9141
DM
4397
4398void
808f4dfe
DM
4399region_model::clobber_region (const region *reg)
4400{
4401 m_store.clobber_region (m_mgr->get_store_manager(), reg);
4402}
4403
4404/* Remove any bindings for REG within the store. */
4405
4406void
4407region_model::purge_region (const region *reg)
4408{
4409 m_store.purge_region (m_mgr->get_store_manager(), reg);
4410}
4411
b923978a
DM
4412/* Fill REG with SVAL.
4413 Use CTXT to report any warnings associated with the write
4414 (e.g. out-of-bounds). */
e61ffa20
DM
4415
4416void
b923978a
DM
4417region_model::fill_region (const region *reg,
4418 const svalue *sval,
4419 region_model_context *ctxt)
e61ffa20 4420{
b923978a 4421 check_region_for_write (reg, nullptr, ctxt);
e61ffa20
DM
4422 m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
4423}
4424
b923978a
DM
4425/* Zero-fill REG.
4426 Use CTXT to report any warnings associated with the write
4427 (e.g. out-of-bounds). */
808f4dfe
DM
4428
4429void
b923978a
DM
4430region_model::zero_fill_region (const region *reg,
4431 region_model_context *ctxt)
808f4dfe 4432{
b923978a 4433 check_region_for_write (reg, nullptr, ctxt);
808f4dfe
DM
4434 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
4435}
4436
0ae07a72
DM
4437/* Copy NUM_BYTES_SVAL of SVAL to DEST_REG.
4438 Use CTXT to report any warnings associated with the copy
4439 (e.g. out-of-bounds writes). */
4440
4441void
4442region_model::write_bytes (const region *dest_reg,
4443 const svalue *num_bytes_sval,
4444 const svalue *sval,
4445 region_model_context *ctxt)
4446{
4447 const region *sized_dest_reg
4448 = m_mgr->get_sized_region (dest_reg, NULL_TREE, num_bytes_sval);
4449 set_value (sized_dest_reg, sval, ctxt);
4450}
4451
8556d001
DM
4452/* Read NUM_BYTES_SVAL from SRC_REG.
4453 Use CTXT to report any warnings associated with the copy
4454 (e.g. out-of-bounds reads, copying of uninitialized values, etc). */
4455
4456const svalue *
4457region_model::read_bytes (const region *src_reg,
4458 tree src_ptr_expr,
4459 const svalue *num_bytes_sval,
4460 region_model_context *ctxt) const
4461{
b51cde34
DM
4462 if (num_bytes_sval->get_kind () == SK_UNKNOWN)
4463 return m_mgr->get_or_create_unknown_svalue (NULL_TREE);
8556d001
DM
4464 const region *sized_src_reg
4465 = m_mgr->get_sized_region (src_reg, NULL_TREE, num_bytes_sval);
4466 const svalue *src_contents_sval = get_store_value (sized_src_reg, ctxt);
4467 check_for_poison (src_contents_sval, src_ptr_expr,
4468 sized_src_reg, ctxt);
4469 return src_contents_sval;
4470}
4471
4472/* Copy NUM_BYTES_SVAL bytes from SRC_REG to DEST_REG.
4473 Use CTXT to report any warnings associated with the copy
4474 (e.g. out-of-bounds reads/writes, copying of uninitialized values,
4475 etc). */
4476
4477void
4478region_model::copy_bytes (const region *dest_reg,
4479 const region *src_reg,
4480 tree src_ptr_expr,
4481 const svalue *num_bytes_sval,
4482 region_model_context *ctxt)
4483{
4484 const svalue *data_sval
4485 = read_bytes (src_reg, src_ptr_expr, num_bytes_sval, ctxt);
4486 write_bytes (dest_reg, num_bytes_sval, data_sval, ctxt);
4487}
4488
808f4dfe
DM
4489/* Mark REG as having unknown content. */
4490
4491void
3a66c289
DM
4492region_model::mark_region_as_unknown (const region *reg,
4493 uncertainty_t *uncertainty)
884d9141 4494{
14f5e56a 4495 svalue_set maybe_live_values;
3a66c289 4496 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
14f5e56a
DM
4497 uncertainty, &maybe_live_values);
4498 m_store.on_maybe_live_values (maybe_live_values);
884d9141
DM
4499}
4500
808f4dfe 4501/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
757bf1df
DM
4502 this model. */
4503
4504tristate
808f4dfe
DM
4505region_model::eval_condition (const svalue *lhs,
4506 enum tree_code op,
4507 const svalue *rhs) const
757bf1df 4508{
757bf1df
DM
4509 gcc_assert (lhs);
4510 gcc_assert (rhs);
4511
808f4dfe
DM
4512 /* For now, make no attempt to capture constraints on floating-point
4513 values. */
4514 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
4515 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
4516 return tristate::unknown ();
4517
9bbcee45
DM
4518 /* See what we know based on the values. */
4519
808f4dfe
DM
4520 /* Unwrap any unmergeable values. */
4521 lhs = lhs->unwrap_any_unmergeable ();
4522 rhs = rhs->unwrap_any_unmergeable ();
4523
4524 if (lhs == rhs)
757bf1df 4525 {
808f4dfe
DM
4526 /* If we have the same svalue, then we have equality
4527 (apart from NaN-handling).
4528 TODO: should this definitely be the case for poisoned values? */
4529 /* Poisoned and unknown values are "unknowable". */
4530 if (lhs->get_kind () == SK_POISONED
4531 || lhs->get_kind () == SK_UNKNOWN)
4532 return tristate::TS_UNKNOWN;
e978955d 4533
808f4dfe 4534 switch (op)
757bf1df 4535 {
808f4dfe
DM
4536 case EQ_EXPR:
4537 case GE_EXPR:
4538 case LE_EXPR:
4539 return tristate::TS_TRUE;
07c86323 4540
808f4dfe
DM
4541 case NE_EXPR:
4542 case GT_EXPR:
4543 case LT_EXPR:
4544 return tristate::TS_FALSE;
4545
4546 default:
4547 /* For other ops, use the logic below. */
4548 break;
757bf1df 4549 }
808f4dfe 4550 }
757bf1df 4551
808f4dfe
DM
4552 /* If we have a pair of region_svalues, compare them. */
4553 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
4554 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
4555 {
4556 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
4557 if (res.is_known ())
4558 return res;
4559 /* Otherwise, only known through constraints. */
4560 }
757bf1df 4561
808f4dfe 4562 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
18faaeb3
DM
4563 {
4564 /* If we have a pair of constants, compare them. */
4565 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
4566 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
4567 else
4568 {
4569 /* When we have one constant, put it on the RHS. */
4570 std::swap (lhs, rhs);
4571 op = swap_tree_comparison (op);
4572 }
4573 }
4574 gcc_assert (lhs->get_kind () != SK_CONSTANT);
757bf1df 4575
e82e0f14
DM
4576 /* Handle comparison against zero. */
4577 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
4578 if (zerop (cst_rhs->get_constant ()))
4579 {
4580 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
4581 {
4582 /* A region_svalue is a non-NULL pointer, except in certain
4583 special cases (see the comment for region::non_null_p). */
4584 const region *pointee = ptr->get_pointee ();
4585 if (pointee->non_null_p ())
4586 {
4587 switch (op)
4588 {
4589 default:
4590 gcc_unreachable ();
4591
4592 case EQ_EXPR:
4593 case GE_EXPR:
4594 case LE_EXPR:
4595 return tristate::TS_FALSE;
4596
4597 case NE_EXPR:
4598 case GT_EXPR:
4599 case LT_EXPR:
4600 return tristate::TS_TRUE;
4601 }
4602 }
4603 }
4604 else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
4605 {
4606 /* Treat offsets from a non-NULL pointer as being non-NULL. This
4607 isn't strictly true, in that eventually ptr++ will wrap
4608 around and be NULL, but it won't occur in practise and thus
4609 can be used to suppress effectively false positives that we
4610 shouldn't warn for. */
4611 if (binop->get_op () == POINTER_PLUS_EXPR)
4612 {
9bbcee45 4613 tristate lhs_ts = eval_condition (binop->get_arg0 (), op, rhs);
e82e0f14
DM
4614 if (lhs_ts.is_known ())
4615 return lhs_ts;
4616 }
4617 }
0b737090
DM
4618 else if (const unaryop_svalue *unaryop
4619 = lhs->dyn_cast_unaryop_svalue ())
4620 {
4621 if (unaryop->get_op () == NEGATE_EXPR)
4622 {
4623 /* e.g. "-X <= 0" is equivalent to X >= 0". */
4624 tristate lhs_ts = eval_condition (unaryop->get_arg (),
4625 swap_tree_comparison (op),
4626 rhs);
4627 if (lhs_ts.is_known ())
4628 return lhs_ts;
4629 }
4630 }
e82e0f14 4631 }
808f4dfe
DM
4632
4633 /* Handle rejection of equality for comparisons of the initial values of
4634 "external" values (such as params) with the address of locals. */
4635 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
4636 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
4637 {
4638 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
4639 if (res.is_known ())
4640 return res;
4641 }
4642 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
4643 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
4644 {
4645 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
4646 if (res.is_known ())
4647 return res;
4648 }
4649
4650 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
4651 if (tree rhs_cst = rhs->maybe_get_constant ())
4652 {
4653 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
4654 if (res.is_known ())
4655 return res;
4656 }
4657
7a6564c9 4658 /* Handle comparisons between two svalues with more than one operand. */
9bbcee45 4659 if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
7a6564c9
TL
4660 {
4661 switch (op)
4662 {
4663 default:
4664 break;
4665 case EQ_EXPR:
4666 {
4667 /* TODO: binops can be equal even if they are not structurally
4668 equal in case of commutative operators. */
4669 tristate res = structural_equality (lhs, rhs);
4670 if (res.is_true ())
4671 return res;
4672 }
4673 break;
4674 case LE_EXPR:
4675 {
4676 tristate res = structural_equality (lhs, rhs);
4677 if (res.is_true ())
4678 return res;
4679 }
4680 break;
4681 case GE_EXPR:
4682 {
4683 tristate res = structural_equality (lhs, rhs);
4684 if (res.is_true ())
4685 return res;
4686 res = symbolic_greater_than (binop, rhs);
4687 if (res.is_true ())
4688 return res;
4689 }
4690 break;
4691 case GT_EXPR:
4692 {
4693 tristate res = symbolic_greater_than (binop, rhs);
4694 if (res.is_true ())
4695 return res;
4696 }
4697 break;
4698 }
4699 }
4700
e7b26744 4701 /* Attempt to unwrap cast if there is one, and the types match. */
4702 tree lhs_type = lhs->get_type ();
4703 tree rhs_type = rhs->get_type ();
4704 if (lhs_type && rhs_type)
4705 {
4706 const unaryop_svalue *lhs_un_op = dyn_cast <const unaryop_svalue *> (lhs);
4707 const unaryop_svalue *rhs_un_op = dyn_cast <const unaryop_svalue *> (rhs);
4708 if (lhs_un_op && CONVERT_EXPR_CODE_P (lhs_un_op->get_op ())
4709 && rhs_un_op && CONVERT_EXPR_CODE_P (rhs_un_op->get_op ())
4710 && lhs_type == rhs_type)
7a5a4a44
DM
4711 {
4712 tristate res = eval_condition (lhs_un_op->get_arg (),
4713 op,
4714 rhs_un_op->get_arg ());
4715 if (res.is_known ())
4716 return res;
4717 }
e7b26744 4718 else if (lhs_un_op && CONVERT_EXPR_CODE_P (lhs_un_op->get_op ())
4719 && lhs_type == rhs_type)
7a5a4a44
DM
4720 {
4721 tristate res = eval_condition (lhs_un_op->get_arg (), op, rhs);
4722 if (res.is_known ())
4723 return res;
4724 }
e7b26744 4725 else if (rhs_un_op && CONVERT_EXPR_CODE_P (rhs_un_op->get_op ())
4726 && lhs_type == rhs_type)
7a5a4a44
DM
4727 {
4728 tristate res = eval_condition (lhs, op, rhs_un_op->get_arg ());
4729 if (res.is_known ())
4730 return res;
4731 }
e7b26744 4732 }
4733
9bbcee45
DM
4734 /* Otherwise, try constraints.
4735 Cast to const to ensure we don't change the constraint_manager as we
4736 do this (e.g. by creating equivalence classes). */
4737 const constraint_manager *constraints = m_constraints;
4738 return constraints->eval_condition (lhs, op, rhs);
808f4dfe
DM
4739}
4740
9bbcee45 4741/* Subroutine of region_model::eval_condition, for rejecting
808f4dfe
DM
4742 equality of INIT_VAL(PARM) with &LOCAL. */
4743
4744tristate
4745region_model::compare_initial_and_pointer (const initial_svalue *init,
4746 const region_svalue *ptr) const
4747{
4748 const region *pointee = ptr->get_pointee ();
4749
4750 /* If we have a pointer to something within a stack frame, it can't be the
4751 initial value of a param. */
4752 if (pointee->maybe_get_frame_region ())
e0139b2a
DM
4753 if (init->initial_value_of_param_p ())
4754 return tristate::TS_FALSE;
757bf1df
DM
4755
4756 return tristate::TS_UNKNOWN;
4757}
4758
7a6564c9
TL
4759/* Return true if SVAL is definitely positive. */
4760
4761static bool
4762is_positive_svalue (const svalue *sval)
4763{
4764 if (tree cst = sval->maybe_get_constant ())
4765 return !zerop (cst) && get_range_pos_neg (cst) == 1;
4766 tree type = sval->get_type ();
4767 if (!type)
4768 return false;
4769 /* Consider a binary operation size_t + int. The analyzer wraps the int in
4770 an unaryop_svalue, converting it to a size_t, but in the dynamic execution
4771 the result is smaller than the first operand. Thus, we have to look if
4772 the argument of the unaryop_svalue is also positive. */
4773 if (const unaryop_svalue *un_op = dyn_cast <const unaryop_svalue *> (sval))
4774 return CONVERT_EXPR_CODE_P (un_op->get_op ()) && TYPE_UNSIGNED (type)
4775 && is_positive_svalue (un_op->get_arg ());
4776 return TYPE_UNSIGNED (type);
4777}
4778
4779/* Return true if A is definitely larger than B.
4780
4781 Limitation: does not account for integer overflows and does not try to
4782 return false, so it can not be used negated. */
4783
4784tristate
4785region_model::symbolic_greater_than (const binop_svalue *bin_a,
4786 const svalue *b) const
4787{
4788 if (bin_a->get_op () == PLUS_EXPR || bin_a->get_op () == MULT_EXPR)
4789 {
4790 /* Eliminate the right-hand side of both svalues. */
4791 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
4792 if (bin_a->get_op () == bin_b->get_op ()
9bbcee45
DM
4793 && eval_condition (bin_a->get_arg1 (),
4794 GT_EXPR,
4795 bin_b->get_arg1 ()).is_true ()
4796 && eval_condition (bin_a->get_arg0 (),
4797 GE_EXPR,
4798 bin_b->get_arg0 ()).is_true ())
7a6564c9
TL
4799 return tristate (tristate::TS_TRUE);
4800
4801 /* Otherwise, try to remove a positive offset or factor from BIN_A. */
4802 if (is_positive_svalue (bin_a->get_arg1 ())
9bbcee45
DM
4803 && eval_condition (bin_a->get_arg0 (),
4804 GE_EXPR, b).is_true ())
7a6564c9
TL
4805 return tristate (tristate::TS_TRUE);
4806 }
4807 return tristate::unknown ();
4808}
4809
4810/* Return true if A and B are equal structurally.
4811
4812 Structural equality means that A and B are equal if the svalues A and B have
4813 the same nodes at the same positions in the tree and the leafs are equal.
4814 Equality for conjured_svalues and initial_svalues is determined by comparing
4815 the pointers while constants are compared by value. That behavior is useful
4816 to check for binaryop_svlaues that evaluate to the same concrete value but
4817 might use one operand with a different type but the same constant value.
4818
4819 For example,
4820 binop_svalue (mult_expr,
4821 initial_svalue (‘size_t’, decl_region (..., 'some_var')),
4822 constant_svalue (‘size_t’, 4))
4823 and
4824 binop_svalue (mult_expr,
4825 initial_svalue (‘size_t’, decl_region (..., 'some_var'),
4826 constant_svalue (‘sizetype’, 4))
4827 are structurally equal. A concrete C code example, where this occurs, can
4828 be found in test7 of out-of-bounds-5.c. */
4829
4830tristate
4831region_model::structural_equality (const svalue *a, const svalue *b) const
4832{
4833 /* If A and B are referentially equal, they are also structurally equal. */
4834 if (a == b)
4835 return tristate (tristate::TS_TRUE);
4836
4837 switch (a->get_kind ())
4838 {
4839 default:
4840 return tristate::unknown ();
4841 /* SK_CONJURED and SK_INITIAL are already handled
4842 by the referential equality above. */
4843 case SK_CONSTANT:
4844 {
4845 tree a_cst = a->maybe_get_constant ();
4846 tree b_cst = b->maybe_get_constant ();
4847 if (a_cst && b_cst)
4848 return tristate (tree_int_cst_equal (a_cst, b_cst));
4849 }
4850 return tristate (tristate::TS_FALSE);
4851 case SK_UNARYOP:
4852 {
4853 const unaryop_svalue *un_a = as_a <const unaryop_svalue *> (a);
4854 if (const unaryop_svalue *un_b = dyn_cast <const unaryop_svalue *> (b))
4855 return tristate (pending_diagnostic::same_tree_p (un_a->get_type (),
4856 un_b->get_type ())
4857 && un_a->get_op () == un_b->get_op ()
4858 && structural_equality (un_a->get_arg (),
4859 un_b->get_arg ()));
4860 }
4861 return tristate (tristate::TS_FALSE);
4862 case SK_BINOP:
4863 {
4864 const binop_svalue *bin_a = as_a <const binop_svalue *> (a);
4865 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
4866 return tristate (bin_a->get_op () == bin_b->get_op ()
4867 && structural_equality (bin_a->get_arg0 (),
4868 bin_b->get_arg0 ())
4869 && structural_equality (bin_a->get_arg1 (),
4870 bin_b->get_arg1 ()));
4871 }
4872 return tristate (tristate::TS_FALSE);
4873 }
4874}
4875
48e8a7a6
DM
4876/* Handle various constraints of the form:
4877 LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
4878 OP : == or !=
4879 RHS: zero
4880 and (with a cast):
4881 LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
4882 OP : == or !=
4883 RHS: zero
4884 by adding constraints for INNER_LHS INNEROP INNER_RHS.
4885
4886 Return true if this function can fully handle the constraint; if
4887 so, add the implied constraint(s) and write true to *OUT if they
4888 are consistent with existing constraints, or write false to *OUT
4889 if they contradicts existing constraints.
4890
4891 Return false for cases that this function doeesn't know how to handle.
4892
4893 For example, if we're checking a stored conditional, we'll have
4894 something like:
4895 LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
4896 OP : NE_EXPR
4897 RHS: zero
4898 which this function can turn into an add_constraint of:
4899 (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
4900
4901 Similarly, optimized && and || conditionals lead to e.g.
4902 if (p && q)
4903 becoming gimple like this:
4904 _1 = p_6 == 0B;
4905 _2 = q_8 == 0B
4906 _3 = _1 | _2
4907 On the "_3 is false" branch we can have constraints of the form:
4908 ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
4909 | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
4910 == 0
4911 which implies that both _1 and _2 are false,
4912 which this function can turn into a pair of add_constraints of
4913 (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
4914 and:
4915 (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
4916
4917bool
4918region_model::add_constraints_from_binop (const svalue *outer_lhs,
4919 enum tree_code outer_op,
4920 const svalue *outer_rhs,
4921 bool *out,
4922 region_model_context *ctxt)
4923{
4924 while (const svalue *cast = outer_lhs->maybe_undo_cast ())
4925 outer_lhs = cast;
4926 const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
4927 if (!binop_sval)
4928 return false;
4929 if (!outer_rhs->all_zeroes_p ())
4930 return false;
4931
4932 const svalue *inner_lhs = binop_sval->get_arg0 ();
4933 enum tree_code inner_op = binop_sval->get_op ();
4934 const svalue *inner_rhs = binop_sval->get_arg1 ();
4935
4936 if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
4937 return false;
4938
4939 /* We have either
4940 - "OUTER_LHS != false" (i.e. OUTER is true), or
4941 - "OUTER_LHS == false" (i.e. OUTER is false). */
4942 bool is_true = outer_op == NE_EXPR;
4943
4944 switch (inner_op)
4945 {
4946 default:
4947 return false;
4948
4949 case EQ_EXPR:
4950 case NE_EXPR:
50b5199c 4951 case GE_EXPR:
4952 case GT_EXPR:
4953 case LE_EXPR:
4954 case LT_EXPR:
48e8a7a6
DM
4955 {
4956 /* ...and "(inner_lhs OP inner_rhs) == 0"
4957 then (inner_lhs OP inner_rhs) must have the same
4958 logical value as LHS. */
4959 if (!is_true)
4960 inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
4961 *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
4962 return true;
4963 }
4964 break;
4965
4966 case BIT_AND_EXPR:
4967 if (is_true)
4968 {
4969 /* ...and "(inner_lhs & inner_rhs) != 0"
4970 then both inner_lhs and inner_rhs must be true. */
4971 const svalue *false_sval
4972 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
4973 bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
4974 bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
4975 *out = sat1 && sat2;
4976 return true;
4977 }
4978 return false;
4979
4980 case BIT_IOR_EXPR:
4981 if (!is_true)
4982 {
4983 /* ...and "(inner_lhs | inner_rhs) == 0"
4984 i.e. "(inner_lhs | inner_rhs)" is false
4985 then both inner_lhs and inner_rhs must be false. */
4986 const svalue *false_sval
4987 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
4988 bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
4989 bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
4990 *out = sat1 && sat2;
4991 return true;
4992 }
4993 return false;
4994 }
4995}
4996
757bf1df
DM
4997/* Attempt to add the constraint "LHS OP RHS" to this region_model.
4998 If it is consistent with existing constraints, add it, and return true.
4999 Return false if it contradicts existing constraints.
5000 Use CTXT for reporting any diagnostics associated with the accesses. */
5001
5002bool
5003region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
5004 region_model_context *ctxt)
5005{
e978955d
DM
5006 /* For now, make no attempt to capture constraints on floating-point
5007 values. */
5008 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
5009 return true;
5010
808f4dfe
DM
5011 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
5012 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
757bf1df 5013
48e8a7a6
DM
5014 return add_constraint (lhs_sval, op, rhs_sval, ctxt);
5015}
5016
841008d3
DM
5017static bool
5018unusable_in_infinite_loop_constraint_p (const svalue *sval)
5019{
5020 if (sval->get_kind () == SK_WIDENING)
5021 return true;
5022 return false;
5023}
5024
48e8a7a6
DM
5025/* Attempt to add the constraint "LHS OP RHS" to this region_model.
5026 If it is consistent with existing constraints, add it, and return true.
5027 Return false if it contradicts existing constraints.
5028 Use CTXT for reporting any diagnostics associated with the accesses. */
5029
5030bool
5031region_model::add_constraint (const svalue *lhs,
5032 enum tree_code op,
5033 const svalue *rhs,
5034 region_model_context *ctxt)
5035{
841008d3
DM
5036 const bool checking_for_infinite_loop
5037 = ctxt ? ctxt->checking_for_infinite_loop_p () : false;
5038
5039 if (checking_for_infinite_loop)
5040 {
5041 if (unusable_in_infinite_loop_constraint_p (lhs)
5042 || unusable_in_infinite_loop_constraint_p (rhs))
5043 {
5044 gcc_assert (ctxt);
5045 ctxt->on_unusable_in_infinite_loop ();
5046 return false;
5047 }
5048 }
5049
48e8a7a6 5050 tristate t_cond = eval_condition (lhs, op, rhs);
757bf1df
DM
5051
5052 /* If we already have the condition, do nothing. */
5053 if (t_cond.is_true ())
5054 return true;
5055
5056 /* Reject a constraint that would contradict existing knowledge, as
5057 unsatisfiable. */
5058 if (t_cond.is_false ())
5059 return false;
5060
841008d3
DM
5061 if (checking_for_infinite_loop)
5062 {
5063 /* Here, we don't have a definite true/false value, so bail out
5064 when checking for infinite loops. */
5065 gcc_assert (ctxt);
5066 ctxt->on_unusable_in_infinite_loop ();
5067 return false;
5068 }
5069
48e8a7a6
DM
5070 bool out;
5071 if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
5072 return out;
757bf1df 5073
c4b8f373
DM
5074 /* Attempt to store the constraint. */
5075 if (!m_constraints->add_constraint (lhs, op, rhs))
5076 return false;
757bf1df
DM
5077
5078 /* Notify the context, if any. This exists so that the state machines
5079 in a program_state can be notified about the condition, and so can
5080 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
5081 when synthesizing constraints as above. */
5082 if (ctxt)
5083 ctxt->on_condition (lhs, op, rhs);
5084
9a2c9579
DM
5085 /* If we have &REGION == NULL, then drop dynamic extents for REGION (for
5086 the case where REGION is heap-allocated and thus could be NULL). */
48e8a7a6
DM
5087 if (tree rhs_cst = rhs->maybe_get_constant ())
5088 if (op == EQ_EXPR && zerop (rhs_cst))
5089 if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
5090 unset_dynamic_extents (region_sval->get_pointee ());
9a2c9579 5091
757bf1df
DM
5092 return true;
5093}
5094
84fb3546
DM
5095/* As above, but when returning false, if OUT is non-NULL, write a
5096 new rejected_constraint to *OUT. */
5097
5098bool
5099region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
5100 region_model_context *ctxt,
8878f7ab 5101 std::unique_ptr<rejected_constraint> *out)
84fb3546
DM
5102{
5103 bool sat = add_constraint (lhs, op, rhs, ctxt);
5104 if (!sat && out)
8878f7ab 5105 *out = make_unique <rejected_op_constraint> (*this, lhs, op, rhs);
84fb3546
DM
5106 return sat;
5107}
5108
757bf1df
DM
5109/* Determine what is known about the condition "LHS OP RHS" within
5110 this model.
5111 Use CTXT for reporting any diagnostics associated with the accesses. */
5112
5113tristate
5114region_model::eval_condition (tree lhs,
5115 enum tree_code op,
5116 tree rhs,
5c6546ca 5117 region_model_context *ctxt) const
757bf1df 5118{
e978955d
DM
5119 /* For now, make no attempt to model constraints on floating-point
5120 values. */
5121 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
5122 return tristate::unknown ();
5123
757bf1df
DM
5124 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
5125}
5126
467a4820
DM
5127/* Implementation of region_model::get_representative_path_var.
5128 Attempt to return a path_var that represents SVAL, or return NULL_TREE.
808f4dfe
DM
5129 Use VISITED to prevent infinite mutual recursion with the overload for
5130 regions. */
757bf1df 5131
808f4dfe 5132path_var
467a4820
DM
5133region_model::get_representative_path_var_1 (const svalue *sval,
5134 svalue_set *visited) const
757bf1df 5135{
467a4820 5136 gcc_assert (sval);
757bf1df 5137
808f4dfe
DM
5138 /* Prevent infinite recursion. */
5139 if (visited->contains (sval))
0e466e97
DM
5140 {
5141 if (sval->get_kind () == SK_CONSTANT)
5142 return path_var (sval->maybe_get_constant (), 0);
5143 else
5144 return path_var (NULL_TREE, 0);
5145 }
808f4dfe 5146 visited->add (sval);
757bf1df 5147
467a4820
DM
5148 /* Handle casts by recursion into get_representative_path_var. */
5149 if (const svalue *cast_sval = sval->maybe_undo_cast ())
5150 {
5151 path_var result = get_representative_path_var (cast_sval, visited);
5152 tree orig_type = sval->get_type ();
5153 /* If necessary, wrap the result in a cast. */
5154 if (result.m_tree && orig_type)
5155 result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
5156 return result;
5157 }
5158
808f4dfe
DM
5159 auto_vec<path_var> pvs;
5160 m_store.get_representative_path_vars (this, visited, sval, &pvs);
757bf1df 5161
808f4dfe
DM
5162 if (tree cst = sval->maybe_get_constant ())
5163 pvs.safe_push (path_var (cst, 0));
757bf1df 5164
90f7c300 5165 /* Handle string literals and various other pointers. */
808f4dfe
DM
5166 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
5167 {
5168 const region *reg = ptr_sval->get_pointee ();
5169 if (path_var pv = get_representative_path_var (reg, visited))
5170 return path_var (build1 (ADDR_EXPR,
467a4820 5171 sval->get_type (),
808f4dfe
DM
5172 pv.m_tree),
5173 pv.m_stack_depth);
5174 }
5175
5176 /* If we have a sub_svalue, look for ways to represent the parent. */
5177 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
90f7c300 5178 {
808f4dfe
DM
5179 const svalue *parent_sval = sub_sval->get_parent ();
5180 const region *subreg = sub_sval->get_subregion ();
5181 if (path_var parent_pv
5182 = get_representative_path_var (parent_sval, visited))
5183 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
5184 return path_var (build3 (COMPONENT_REF,
5185 sval->get_type (),
5186 parent_pv.m_tree,
5187 field_reg->get_field (),
5188 NULL_TREE),
5189 parent_pv.m_stack_depth);
90f7c300
DM
5190 }
5191
b9365b93
DM
5192 /* Handle binops. */
5193 if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
5194 if (path_var lhs_pv
5195 = get_representative_path_var (binop_sval->get_arg0 (), visited))
5196 if (path_var rhs_pv
5197 = get_representative_path_var (binop_sval->get_arg1 (), visited))
5198 return path_var (build2 (binop_sval->get_op (),
5199 sval->get_type (),
5200 lhs_pv.m_tree, rhs_pv.m_tree),
5201 lhs_pv.m_stack_depth);
5202
808f4dfe
DM
5203 if (pvs.length () < 1)
5204 return path_var (NULL_TREE, 0);
5205
5206 pvs.qsort (readability_comparator);
5207 return pvs[0];
757bf1df
DM
5208}
5209
467a4820
DM
5210/* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
5211 Use VISITED to prevent infinite mutual recursion with the overload for
5212 regions
5213
5214 This function defers to get_representative_path_var_1 to do the work;
5215 it adds verification that get_representative_path_var_1 returned a tree
5216 of the correct type. */
5217
5218path_var
5219region_model::get_representative_path_var (const svalue *sval,
5220 svalue_set *visited) const
5221{
5222 if (sval == NULL)
5223 return path_var (NULL_TREE, 0);
5224
5225 tree orig_type = sval->get_type ();
5226
5227 path_var result = get_representative_path_var_1 (sval, visited);
5228
5229 /* Verify that the result has the same type as SVAL, if any. */
5230 if (result.m_tree && orig_type)
5231 gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
5232
5233 return result;
5234}
5235
5236/* Attempt to return a tree that represents SVAL, or return NULL_TREE.
5237
5238 Strip off any top-level cast, to avoid messages like
5239 double-free of '(void *)ptr'
5240 from analyzer diagnostics. */
757bf1df 5241
808f4dfe
DM
5242tree
5243region_model::get_representative_tree (const svalue *sval) const
757bf1df 5244{
808f4dfe 5245 svalue_set visited;
467a4820
DM
5246 tree expr = get_representative_path_var (sval, &visited).m_tree;
5247
5248 /* Strip off any top-level cast. */
7e3b45be
TL
5249 if (expr && TREE_CODE (expr) == NOP_EXPR)
5250 expr = TREE_OPERAND (expr, 0);
5251
5252 return fixup_tree_for_diagnostic (expr);
5253}
5254
5255tree
5256region_model::get_representative_tree (const region *reg) const
5257{
5258 svalue_set visited;
5259 tree expr = get_representative_path_var (reg, &visited).m_tree;
5260
5261 /* Strip off any top-level cast. */
467a4820 5262 if (expr && TREE_CODE (expr) == NOP_EXPR)
e4bb1bd6 5263 expr = TREE_OPERAND (expr, 0);
467a4820 5264
e4bb1bd6 5265 return fixup_tree_for_diagnostic (expr);
808f4dfe
DM
5266}
5267
467a4820
DM
5268/* Implementation of region_model::get_representative_path_var.
5269
5270 Attempt to return a path_var that represents REG, or return
808f4dfe
DM
5271 the NULL path_var.
5272 For example, a region for a field of a local would be a path_var
5273 wrapping a COMPONENT_REF.
5274 Use VISITED to prevent infinite mutual recursion with the overload for
5275 svalues. */
757bf1df 5276
808f4dfe 5277path_var
467a4820
DM
5278region_model::get_representative_path_var_1 (const region *reg,
5279 svalue_set *visited) const
808f4dfe
DM
5280{
5281 switch (reg->get_kind ())
757bf1df 5282 {
808f4dfe
DM
5283 default:
5284 gcc_unreachable ();
e516294a 5285
808f4dfe
DM
5286 case RK_FRAME:
5287 case RK_GLOBALS:
5288 case RK_CODE:
5289 case RK_HEAP:
5290 case RK_STACK:
358dab90 5291 case RK_THREAD_LOCAL:
808f4dfe
DM
5292 case RK_ROOT:
5293 /* Regions that represent memory spaces are not expressible as trees. */
5294 return path_var (NULL_TREE, 0);
757bf1df 5295
808f4dfe 5296 case RK_FUNCTION:
884d9141 5297 {
808f4dfe
DM
5298 const function_region *function_reg
5299 = as_a <const function_region *> (reg);
5300 return path_var (function_reg->get_fndecl (), 0);
884d9141 5301 }
808f4dfe 5302 case RK_LABEL:
9e78634c
DM
5303 {
5304 const label_region *label_reg = as_a <const label_region *> (reg);
5305 return path_var (label_reg->get_label (), 0);
5306 }
90f7c300 5307
808f4dfe
DM
5308 case RK_SYMBOLIC:
5309 {
5310 const symbolic_region *symbolic_reg
5311 = as_a <const symbolic_region *> (reg);
5312 const svalue *pointer = symbolic_reg->get_pointer ();
5313 path_var pointer_pv = get_representative_path_var (pointer, visited);
5314 if (!pointer_pv)
5315 return path_var (NULL_TREE, 0);
5316 tree offset = build_int_cst (pointer->get_type (), 0);
5317 return path_var (build2 (MEM_REF,
5318 reg->get_type (),
5319 pointer_pv.m_tree,
5320 offset),
5321 pointer_pv.m_stack_depth);
5322 }
5323 case RK_DECL:
5324 {
5325 const decl_region *decl_reg = as_a <const decl_region *> (reg);
5326 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
5327 }
5328 case RK_FIELD:
5329 {
5330 const field_region *field_reg = as_a <const field_region *> (reg);
5331 path_var parent_pv
5332 = get_representative_path_var (reg->get_parent_region (), visited);
5333 if (!parent_pv)
5334 return path_var (NULL_TREE, 0);
5335 return path_var (build3 (COMPONENT_REF,
5336 reg->get_type (),
5337 parent_pv.m_tree,
5338 field_reg->get_field (),
5339 NULL_TREE),
5340 parent_pv.m_stack_depth);
5341 }
757bf1df 5342
808f4dfe
DM
5343 case RK_ELEMENT:
5344 {
5345 const element_region *element_reg
5346 = as_a <const element_region *> (reg);
5347 path_var parent_pv
5348 = get_representative_path_var (reg->get_parent_region (), visited);
5349 if (!parent_pv)
5350 return path_var (NULL_TREE, 0);
5351 path_var index_pv
5352 = get_representative_path_var (element_reg->get_index (), visited);
5353 if (!index_pv)
5354 return path_var (NULL_TREE, 0);
5355 return path_var (build4 (ARRAY_REF,
5356 reg->get_type (),
5357 parent_pv.m_tree, index_pv.m_tree,
5358 NULL_TREE, NULL_TREE),
5359 parent_pv.m_stack_depth);
5360 }
757bf1df 5361
808f4dfe 5362 case RK_OFFSET:
757bf1df 5363 {
808f4dfe
DM
5364 const offset_region *offset_reg
5365 = as_a <const offset_region *> (reg);
5366 path_var parent_pv
5367 = get_representative_path_var (reg->get_parent_region (), visited);
5368 if (!parent_pv)
5369 return path_var (NULL_TREE, 0);
5370 path_var offset_pv
5371 = get_representative_path_var (offset_reg->get_byte_offset (),
5372 visited);
29f5db8e 5373 if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
808f4dfe 5374 return path_var (NULL_TREE, 0);
29f5db8e
DM
5375 tree addr_parent = build1 (ADDR_EXPR,
5376 build_pointer_type (reg->get_type ()),
5377 parent_pv.m_tree);
c87f1f3d
JJ
5378 tree ptype = build_pointer_type_for_mode (char_type_node, ptr_mode,
5379 true);
5380 return path_var (build2 (MEM_REF, reg->get_type (), addr_parent,
5381 fold_convert (ptype, offset_pv.m_tree)),
808f4dfe 5382 parent_pv.m_stack_depth);
757bf1df 5383 }
757bf1df 5384
e61ffa20
DM
5385 case RK_SIZED:
5386 return path_var (NULL_TREE, 0);
5387
808f4dfe
DM
5388 case RK_CAST:
5389 {
5390 path_var parent_pv
5391 = get_representative_path_var (reg->get_parent_region (), visited);
5392 if (!parent_pv)
5393 return path_var (NULL_TREE, 0);
5394 return path_var (build1 (NOP_EXPR,
5395 reg->get_type (),
5396 parent_pv.m_tree),
5397 parent_pv.m_stack_depth);
5398 }
757bf1df 5399
808f4dfe
DM
5400 case RK_HEAP_ALLOCATED:
5401 case RK_ALLOCA:
5402 /* No good way to express heap-allocated/alloca regions as trees. */
5403 return path_var (NULL_TREE, 0);
757bf1df 5404
808f4dfe
DM
5405 case RK_STRING:
5406 {
5407 const string_region *string_reg = as_a <const string_region *> (reg);
5408 return path_var (string_reg->get_string_cst (), 0);
5409 }
757bf1df 5410
2402dc6b 5411 case RK_VAR_ARG:
358dab90 5412 case RK_ERRNO:
808f4dfe 5413 case RK_UNKNOWN:
f65f63c4 5414 case RK_PRIVATE:
808f4dfe
DM
5415 return path_var (NULL_TREE, 0);
5416 }
757bf1df
DM
5417}
5418
467a4820
DM
5419/* Attempt to return a path_var that represents REG, or return
5420 the NULL path_var.
5421 For example, a region for a field of a local would be a path_var
5422 wrapping a COMPONENT_REF.
5423 Use VISITED to prevent infinite mutual recursion with the overload for
5424 svalues.
5425
5426 This function defers to get_representative_path_var_1 to do the work;
5427 it adds verification that get_representative_path_var_1 returned a tree
5428 of the correct type. */
5429
5430path_var
5431region_model::get_representative_path_var (const region *reg,
5432 svalue_set *visited) const
5433{
5434 path_var result = get_representative_path_var_1 (reg, visited);
5435
5436 /* Verify that the result has the same type as REG, if any. */
5437 if (result.m_tree && reg->get_type ())
5438 gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
5439
5440 return result;
5441}
5442
757bf1df
DM
5443/* Update this model for any phis in SNODE, assuming we came from
5444 LAST_CFG_SUPEREDGE. */
5445
5446void
5447region_model::update_for_phis (const supernode *snode,
5448 const cfg_superedge *last_cfg_superedge,
5449 region_model_context *ctxt)
5450{
5451 gcc_assert (last_cfg_superedge);
5452
e0a7a675
DM
5453 /* Copy this state and pass it to handle_phi so that all of the phi stmts
5454 are effectively handled simultaneously. */
5455 const region_model old_state (*this);
5456
841008d3
DM
5457 hash_set<const svalue *> svals_changing_meaning;
5458
757bf1df
DM
5459 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
5460 !gsi_end_p (gpi); gsi_next (&gpi))
5461 {
5462 gphi *phi = gpi.phi ();
5463
5464 tree src = last_cfg_superedge->get_phi_arg (phi);
5465 tree lhs = gimple_phi_result (phi);
5466
e0a7a675 5467 /* Update next_state based on phi and old_state. */
841008d3 5468 handle_phi (phi, lhs, src, old_state, svals_changing_meaning, ctxt);
757bf1df 5469 }
841008d3
DM
5470
5471 for (auto iter : svals_changing_meaning)
5472 m_constraints->purge_state_involving (iter);
757bf1df
DM
5473}
5474
5475/* Attempt to update this model for taking EDGE (where the last statement
5476 was LAST_STMT), returning true if the edge can be taken, false
5477 otherwise.
84fb3546
DM
5478 When returning false, if OUT is non-NULL, write a new rejected_constraint
5479 to it.
757bf1df
DM
5480
5481 For CFG superedges where LAST_STMT is a conditional or a switch
5482 statement, attempt to add the relevant conditions for EDGE to this
5483 model, returning true if they are feasible, or false if they are
5484 impossible.
5485
5486 For call superedges, push frame information and store arguments
5487 into parameters.
5488
5489 For return superedges, pop frame information and store return
5490 values into any lhs.
5491
5492 Rejection of call/return superedges happens elsewhere, in
5493 program_point::on_edge (i.e. based on program point, rather
5494 than program state). */
5495
5496bool
5497region_model::maybe_update_for_edge (const superedge &edge,
5498 const gimple *last_stmt,
84fb3546 5499 region_model_context *ctxt,
8878f7ab 5500 std::unique_ptr<rejected_constraint> *out)
757bf1df
DM
5501{
5502 /* Handle frame updates for interprocedural edges. */
5503 switch (edge.m_kind)
5504 {
5505 default:
5506 break;
5507
5508 case SUPEREDGE_CALL:
5509 {
5510 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
5511 update_for_call_superedge (*call_edge, ctxt);
5512 }
5513 break;
5514
5515 case SUPEREDGE_RETURN:
5516 {
5517 const return_superedge *return_edge
5518 = as_a <const return_superedge *> (&edge);
5519 update_for_return_superedge (*return_edge, ctxt);
5520 }
5521 break;
5522
5523 case SUPEREDGE_INTRAPROCEDURAL_CALL:
bfca9505
DM
5524 /* This is a no-op for call summaries; we should already
5525 have handled the effect of the call summary at the call stmt. */
757bf1df
DM
5526 break;
5527 }
5528
5529 if (last_stmt == NULL)
5530 return true;
5531
1b761fed 5532 /* Apply any constraints for conditionals/switch/computed-goto statements. */
757bf1df
DM
5533
5534 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
5535 {
5536 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
84fb3546 5537 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out);
757bf1df
DM
5538 }
5539
5540 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
5541 {
5542 const switch_cfg_superedge *switch_sedge
5543 = as_a <const switch_cfg_superedge *> (&edge);
84fb3546
DM
5544 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt,
5545 ctxt, out);
757bf1df
DM
5546 }
5547
1b761fed
DM
5548 if (const ggoto *goto_stmt = dyn_cast <const ggoto *> (last_stmt))
5549 {
5550 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
5551 return apply_constraints_for_ggoto (*cfg_sedge, goto_stmt, ctxt);
5552 }
5553
1690a839
DM
5554 /* Apply any constraints due to an exception being thrown. */
5555 if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge))
5556 if (cfg_sedge->get_flags () & EDGE_EH)
84fb3546 5557 return apply_constraints_for_exception (last_stmt, ctxt, out);
1690a839 5558
757bf1df
DM
5559 return true;
5560}
5561
5562/* Push a new frame_region on to the stack region.
5563 Populate the frame_region with child regions for the function call's
5564 parameters, using values from the arguments at the callsite in the
5565 caller's frame. */
5566
5567void
aef703cf 5568region_model::update_for_gcall (const gcall *call_stmt,
e92d0ff6
AS
5569 region_model_context *ctxt,
5570 function *callee)
757bf1df 5571{
808f4dfe 5572 /* Build a vec of argument svalues, using the current top
757bf1df 5573 frame for resolving tree expressions. */
808f4dfe 5574 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
757bf1df
DM
5575
5576 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
5577 {
5578 tree arg = gimple_call_arg (call_stmt, i);
808f4dfe 5579 arg_svals.quick_push (get_rvalue (arg, ctxt));
757bf1df
DM
5580 }
5581
e92d0ff6
AS
5582 if(!callee)
5583 {
5584 /* Get the function * from the gcall. */
5585 tree fn_decl = get_fndecl_for_call (call_stmt,ctxt);
5586 callee = DECL_STRUCT_FUNCTION (fn_decl);
5587 }
5588
c0d8a64e
DM
5589 gcc_assert (callee);
5590 push_frame (*callee, &arg_svals, ctxt);
757bf1df
DM
5591}
5592
a96f1c38
DM
5593/* Pop the top-most frame_region from the stack, and copy the return
5594 region's values (if any) into the region for the lvalue of the LHS of
757bf1df 5595 the call (if any). */
aef703cf 5596
757bf1df 5597void
aef703cf
AS
5598region_model::update_for_return_gcall (const gcall *call_stmt,
5599 region_model_context *ctxt)
757bf1df 5600{
4cebae09
DM
5601 /* Get the lvalue for the result of the call, passing it to pop_frame,
5602 so that pop_frame can determine the region with respect to the
5603 *caller* frame. */
757bf1df 5604 tree lhs = gimple_call_lhs (call_stmt);
4cebae09 5605 pop_frame (lhs, NULL, ctxt);
757bf1df
DM
5606}
5607
aef703cf
AS
5608/* Extract calling information from the superedge and update the model for the
5609 call */
5610
5611void
5612region_model::update_for_call_superedge (const call_superedge &call_edge,
5613 region_model_context *ctxt)
5614{
5615 const gcall *call_stmt = call_edge.get_call_stmt ();
e92d0ff6 5616 update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ());
aef703cf
AS
5617}
5618
5619/* Extract calling information from the return superedge and update the model
5620 for the returning call */
5621
5622void
5623region_model::update_for_return_superedge (const return_superedge &return_edge,
5624 region_model_context *ctxt)
5625{
5626 const gcall *call_stmt = return_edge.get_call_stmt ();
5627 update_for_return_gcall (call_stmt, ctxt);
5628}
5629
64aa48ce 5630/* Attempt to use R to replay SUMMARY into this object.
bfca9505 5631 Return true if it is possible. */
757bf1df 5632
bfca9505
DM
5633bool
5634region_model::replay_call_summary (call_summary_replay &r,
5635 const region_model &summary)
757bf1df 5636{
bfca9505
DM
5637 gcc_assert (summary.get_stack_depth () == 1);
5638
5639 m_store.replay_call_summary (r, summary.m_store);
757bf1df 5640
841008d3
DM
5641 if (r.get_ctxt ())
5642 r.get_ctxt ()->maybe_did_work ();
5643
bfca9505
DM
5644 if (!m_constraints->replay_call_summary (r, *summary.m_constraints))
5645 return false;
5646
5647 for (auto kv : summary.m_dynamic_extents)
5648 {
5649 const region *summary_reg = kv.first;
5650 const region *caller_reg = r.convert_region_from_summary (summary_reg);
5651 if (!caller_reg)
5652 continue;
5653 const svalue *summary_sval = kv.second;
5654 const svalue *caller_sval = r.convert_svalue_from_summary (summary_sval);
5655 if (!caller_sval)
5656 continue;
5657 m_dynamic_extents.put (caller_reg, caller_sval);
5658 }
5659
5660 return true;
757bf1df
DM
5661}
5662
5663/* Given a true or false edge guarded by conditional statement COND_STMT,
5664 determine appropriate constraints for the edge to be taken.
5665
5666 If they are feasible, add the constraints and return true.
5667
5668 Return false if the constraints contradict existing knowledge
84fb3546
DM
5669 (and so the edge should not be taken).
5670 When returning false, if OUT is non-NULL, write a new rejected_constraint
5671 to it. */
757bf1df
DM
5672
5673bool
8878f7ab
DM
5674region_model::
5675apply_constraints_for_gcond (const cfg_superedge &sedge,
5676 const gcond *cond_stmt,
5677 region_model_context *ctxt,
5678 std::unique_ptr<rejected_constraint> *out)
757bf1df
DM
5679{
5680 ::edge cfg_edge = sedge.get_cfg_edge ();
5681 gcc_assert (cfg_edge != NULL);
5682 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
5683
5684 enum tree_code op = gimple_cond_code (cond_stmt);
5685 tree lhs = gimple_cond_lhs (cond_stmt);
5686 tree rhs = gimple_cond_rhs (cond_stmt);
5687 if (cfg_edge->flags & EDGE_FALSE_VALUE)
5688 op = invert_tree_comparison (op, false /* honor_nans */);
84fb3546 5689 return add_constraint (lhs, op, rhs, ctxt, out);
757bf1df
DM
5690}
5691
ccd4df81
DM
5692/* Return true iff SWITCH_STMT has a non-default label that contains
5693 INT_CST. */
5694
5695static bool
5696has_nondefault_case_for_value_p (const gswitch *switch_stmt, tree int_cst)
5697{
5698 /* We expect the initial label to be the default; skip it. */
5699 gcc_assert (CASE_LOW (gimple_switch_label (switch_stmt, 0)) == NULL);
5700 unsigned min_idx = 1;
5701 unsigned max_idx = gimple_switch_num_labels (switch_stmt) - 1;
5702
5703 /* Binary search: try to find the label containing INT_CST.
5704 This requires the cases to be sorted by CASE_LOW (done by the
5705 gimplifier). */
5706 while (max_idx >= min_idx)
5707 {
5708 unsigned case_idx = (min_idx + max_idx) / 2;
5709 tree label = gimple_switch_label (switch_stmt, case_idx);
5710 tree low = CASE_LOW (label);
5711 gcc_assert (low);
5712 tree high = CASE_HIGH (label);
5713 if (!high)
5714 high = low;
5715 if (tree_int_cst_compare (int_cst, low) < 0)
5716 {
5717 /* INT_CST is below the range of this label. */
5718 gcc_assert (case_idx > 0);
5719 max_idx = case_idx - 1;
5720 }
5721 else if (tree_int_cst_compare (int_cst, high) > 0)
5722 {
5723 /* INT_CST is above the range of this case. */
5724 min_idx = case_idx + 1;
5725 }
5726 else
5727 /* This case contains INT_CST. */
5728 return true;
5729 }
5730 /* Not found. */
5731 return false;
5732}
5733
5734/* Return true iff SWITCH_STMT (which must be on an enum value)
5735 has nondefault cases handling all values in the enum. */
5736
5737static bool
3cbab07b
AO
5738has_nondefault_cases_for_all_enum_values_p (const gswitch *switch_stmt,
5739 tree type)
ccd4df81
DM
5740{
5741 gcc_assert (switch_stmt);
ccd4df81
DM
5742 gcc_assert (TREE_CODE (type) == ENUMERAL_TYPE);
5743
5744 for (tree enum_val_iter = TYPE_VALUES (type);
5745 enum_val_iter;
5746 enum_val_iter = TREE_CHAIN (enum_val_iter))
5747 {
5748 tree enum_val = TREE_VALUE (enum_val_iter);
5749 gcc_assert (TREE_CODE (enum_val) == CONST_DECL);
5750 gcc_assert (TREE_CODE (DECL_INITIAL (enum_val)) == INTEGER_CST);
5751 if (!has_nondefault_case_for_value_p (switch_stmt,
5752 DECL_INITIAL (enum_val)))
5753 return false;
5754 }
5755 return true;
5756}
5757
757bf1df
DM
5758/* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
5759 for the edge to be taken.
5760
5761 If they are feasible, add the constraints and return true.
5762
5763 Return false if the constraints contradict existing knowledge
84fb3546
DM
5764 (and so the edge should not be taken).
5765 When returning false, if OUT is non-NULL, write a new rejected_constraint
5766 to it. */
757bf1df
DM
5767
5768bool
8878f7ab
DM
5769region_model::
5770apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
5771 const gswitch *switch_stmt,
5772 region_model_context *ctxt,
5773 std::unique_ptr<rejected_constraint> *out)
757bf1df 5774{
ccd4df81
DM
5775 tree index = gimple_switch_index (switch_stmt);
5776 const svalue *index_sval = get_rvalue (index, ctxt);
3cbab07b
AO
5777 bool check_index_type = true;
5778
5779 /* With -fshort-enum, there may be a type cast. */
5780 if (ctxt && index_sval->get_kind () == SK_UNARYOP
5781 && TREE_CODE (index_sval->get_type ()) == INTEGER_TYPE)
5782 {
5783 const unaryop_svalue *unaryop = as_a <const unaryop_svalue *> (index_sval);
5784 if (unaryop->get_op () == NOP_EXPR
5785 && is_a <const initial_svalue *> (unaryop->get_arg ()))
5786 if (const initial_svalue *initvalop = (as_a <const initial_svalue *>
5787 (unaryop->get_arg ())))
e945d322
DM
5788 if (initvalop->get_type ()
5789 && TREE_CODE (initvalop->get_type ()) == ENUMERAL_TYPE)
3cbab07b
AO
5790 {
5791 index_sval = initvalop;
5792 check_index_type = false;
5793 }
5794 }
ccd4df81
DM
5795
5796 /* If we're switching based on an enum type, assume that the user is only
5797 working with values from the enum. Hence if this is an
5798 implicitly-created "default", assume it doesn't get followed.
5799 This fixes numerous "uninitialized" false positives where we otherwise
5800 consider jumping past the initialization cases. */
5801
5802 if (/* Don't check during feasibility-checking (when ctxt is NULL). */
5803 ctxt
5804 /* Must be an enum value. */
5805 && index_sval->get_type ()
3cbab07b
AO
5806 && (!check_index_type
5807 || TREE_CODE (TREE_TYPE (index)) == ENUMERAL_TYPE)
ccd4df81
DM
5808 && TREE_CODE (index_sval->get_type ()) == ENUMERAL_TYPE
5809 /* If we have a constant, then we can check it directly. */
5810 && index_sval->get_kind () != SK_CONSTANT
5811 && edge.implicitly_created_default_p ()
3cbab07b
AO
5812 && has_nondefault_cases_for_all_enum_values_p (switch_stmt,
5813 index_sval->get_type ())
ccd4df81
DM
5814 /* Don't do this if there's a chance that the index is
5815 attacker-controlled. */
5816 && !ctxt->possibly_tainted_p (index_sval))
5817 {
5818 if (out)
8878f7ab 5819 *out = make_unique <rejected_default_case> (*this);
ccd4df81
DM
5820 return false;
5821 }
5822
8ca7fa84
DM
5823 bounded_ranges_manager *ranges_mgr = get_range_manager ();
5824 const bounded_ranges *all_cases_ranges
5825 = ranges_mgr->get_or_create_ranges_for_switch (&edge, switch_stmt);
8ca7fa84
DM
5826 bool sat = m_constraints->add_bounded_ranges (index_sval, all_cases_ranges);
5827 if (!sat && out)
8878f7ab 5828 *out = make_unique <rejected_ranges_constraint> (*this, index, all_cases_ranges);
2c044ff1
DM
5829 if (sat && ctxt && !all_cases_ranges->empty_p ())
5830 ctxt->on_bounded_ranges (*index_sval, *all_cases_ranges);
8ca7fa84 5831 return sat;
757bf1df
DM
5832}
5833
1b761fed
DM
5834/* Given an edge reached by GOTO_STMT, determine appropriate constraints
5835 for the edge to be taken.
5836
5837 If they are feasible, add the constraints and return true.
5838
5839 Return false if the constraints contradict existing knowledge
5840 (and so the edge should not be taken). */
5841
5842bool
5843region_model::apply_constraints_for_ggoto (const cfg_superedge &edge,
5844 const ggoto *goto_stmt,
5845 region_model_context *ctxt)
5846{
5847 tree dest = gimple_goto_dest (goto_stmt);
5848 const svalue *dest_sval = get_rvalue (dest, ctxt);
5849
5850 /* If we know we were jumping to a specific label. */
5851 if (tree dst_label = edge.m_dest->get_label ())
5852 {
5853 const label_region *dst_label_reg
5854 = m_mgr->get_region_for_label (dst_label);
5855 const svalue *dst_label_ptr
5856 = m_mgr->get_ptr_svalue (ptr_type_node, dst_label_reg);
5857
5858 if (!add_constraint (dest_sval, EQ_EXPR, dst_label_ptr, ctxt))
5859 return false;
5860 }
5861
5862 return true;
5863}
5864
1690a839
DM
5865/* Apply any constraints due to an exception being thrown at LAST_STMT.
5866
5867 If they are feasible, add the constraints and return true.
5868
5869 Return false if the constraints contradict existing knowledge
84fb3546
DM
5870 (and so the edge should not be taken).
5871 When returning false, if OUT is non-NULL, write a new rejected_constraint
5872 to it. */
1690a839
DM
5873
5874bool
8878f7ab
DM
5875region_model::
5876apply_constraints_for_exception (const gimple *last_stmt,
5877 region_model_context *ctxt,
5878 std::unique_ptr<rejected_constraint> *out)
1690a839
DM
5879{
5880 gcc_assert (last_stmt);
5881 if (const gcall *call = dyn_cast <const gcall *> (last_stmt))
5882 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
5883 if (is_named_call_p (callee_fndecl, "operator new", call, 1)
5884 || is_named_call_p (callee_fndecl, "operator new []", call, 1))
5885 {
5886 /* We have an exception thrown from operator new.
5887 Add a constraint that the result was NULL, to avoid a false
5888 leak report due to the result being lost when following
5889 the EH edge. */
5890 if (tree lhs = gimple_call_lhs (call))
84fb3546 5891 return add_constraint (lhs, EQ_EXPR, null_pointer_node, ctxt, out);
1690a839
DM
5892 return true;
5893 }
5894 return true;
5895}
5896
808f4dfe
DM
5897/* For use with push_frame when handling a top-level call within the analysis.
5898 PARAM has a defined but unknown initial value.
5899 Anything it points to has escaped, since the calling context "knows"
5900 the pointer, and thus calls to unknown functions could read/write into
dcfc7ac9
DM
5901 the region.
5902 If NONNULL is true, then assume that PARAM must be non-NULL. */
757bf1df
DM
5903
5904void
808f4dfe 5905region_model::on_top_level_param (tree param,
dcfc7ac9
DM
5906 bool nonnull,
5907 region_model_context *ctxt)
757bf1df 5908{
808f4dfe 5909 if (POINTER_TYPE_P (TREE_TYPE (param)))
5eae0ac7 5910 {
808f4dfe
DM
5911 const region *param_reg = get_lvalue (param, ctxt);
5912 const svalue *init_ptr_sval
5913 = m_mgr->get_or_create_initial_value (param_reg);
5914 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
5915 m_store.mark_as_escaped (pointee_reg);
dcfc7ac9
DM
5916 if (nonnull)
5917 {
5918 const svalue *null_ptr_sval
5919 = m_mgr->get_or_create_null_ptr (TREE_TYPE (param));
5920 add_constraint (init_ptr_sval, NE_EXPR, null_ptr_sval, ctxt);
5921 }
5eae0ac7 5922 }
757bf1df
DM
5923}
5924
808f4dfe
DM
5925/* Update this region_model to reflect pushing a frame onto the stack
5926 for a call to FUN.
757bf1df 5927
808f4dfe
DM
5928 If ARG_SVALS is non-NULL, use it to populate the parameters
5929 in the new frame.
5930 Otherwise, the params have their initial_svalues.
757bf1df 5931
808f4dfe 5932 Return the frame_region for the new frame. */
757bf1df 5933
808f4dfe 5934const region *
c0d8a64e
DM
5935region_model::push_frame (const function &fun,
5936 const vec<const svalue *> *arg_svals,
808f4dfe 5937 region_model_context *ctxt)
757bf1df 5938{
808f4dfe
DM
5939 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
5940 if (arg_svals)
757bf1df 5941 {
808f4dfe 5942 /* Arguments supplied from a caller frame. */
c0d8a64e 5943 tree fndecl = fun.decl;
808f4dfe
DM
5944 unsigned idx = 0;
5945 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
5946 iter_parm = DECL_CHAIN (iter_parm), ++idx)
757bf1df 5947 {
808f4dfe
DM
5948 /* If there's a mismatching declaration, the call stmt might
5949 not have enough args. Handle this case by leaving the
5950 rest of the params as uninitialized. */
5951 if (idx >= arg_svals->length ())
5952 break;
294b6da2 5953 tree parm_lval = iter_parm;
c0d8a64e 5954 if (tree parm_default_ssa = get_ssa_default_def (fun, iter_parm))
294b6da2
DM
5955 parm_lval = parm_default_ssa;
5956 const region *parm_reg = get_lvalue (parm_lval, ctxt);
808f4dfe 5957 const svalue *arg_sval = (*arg_svals)[idx];
808f4dfe 5958 set_value (parm_reg, arg_sval, ctxt);
757bf1df 5959 }
2402dc6b
DM
5960
5961 /* Handle any variadic args. */
5962 unsigned va_arg_idx = 0;
5963 for (; idx < arg_svals->length (); idx++, va_arg_idx++)
5964 {
5965 const svalue *arg_sval = (*arg_svals)[idx];
5966 const region *var_arg_reg
5967 = m_mgr->get_var_arg_region (m_current_frame,
5968 va_arg_idx);
5969 set_value (var_arg_reg, arg_sval, ctxt);
5970 }
757bf1df 5971 }
808f4dfe 5972 else
757bf1df 5973 {
808f4dfe
DM
5974 /* Otherwise we have a top-level call within the analysis. The params
5975 have defined but unknown initial values.
5976 Anything they point to has escaped. */
c0d8a64e 5977 tree fndecl = fun.decl;
dcfc7ac9
DM
5978
5979 /* Handle "__attribute__((nonnull))". */
5980 tree fntype = TREE_TYPE (fndecl);
5981 bitmap nonnull_args = get_nonnull_args (fntype);
5982
5983 unsigned parm_idx = 0;
808f4dfe
DM
5984 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
5985 iter_parm = DECL_CHAIN (iter_parm))
757bf1df 5986 {
dcfc7ac9
DM
5987 bool non_null = (nonnull_args
5988 ? (bitmap_empty_p (nonnull_args)
5989 || bitmap_bit_p (nonnull_args, parm_idx))
5990 : false);
c0d8a64e 5991 if (tree parm_default_ssa = get_ssa_default_def (fun, iter_parm))
dcfc7ac9 5992 on_top_level_param (parm_default_ssa, non_null, ctxt);
294b6da2 5993 else
dcfc7ac9
DM
5994 on_top_level_param (iter_parm, non_null, ctxt);
5995 parm_idx++;
757bf1df 5996 }
dcfc7ac9
DM
5997
5998 BITMAP_FREE (nonnull_args);
757bf1df 5999 }
757bf1df 6000
808f4dfe 6001 return m_current_frame;
757bf1df
DM
6002}
6003
808f4dfe
DM
6004/* Get the function of the top-most frame in this region_model's stack.
6005 There must be such a frame. */
757bf1df 6006
c0d8a64e 6007const function *
808f4dfe 6008region_model::get_current_function () const
757bf1df 6009{
808f4dfe
DM
6010 const frame_region *frame = get_current_frame ();
6011 gcc_assert (frame);
c0d8a64e 6012 return &frame->get_function ();
757bf1df
DM
6013}
6014
808f4dfe 6015/* Pop the topmost frame_region from this region_model's stack;
757bf1df 6016
4cebae09
DM
6017 If RESULT_LVALUE is non-null, copy any return value from the frame
6018 into the corresponding region (evaluated with respect to the *caller*
6019 frame, rather than the called frame).
808f4dfe
DM
6020 If OUT_RESULT is non-null, copy any return value from the frame
6021 into *OUT_RESULT.
757bf1df 6022
430d7d88
DM
6023 If EVAL_RETURN_SVALUE is false, then don't evaluate the return value.
6024 This is for use when unwinding frames e.g. due to longjmp, to suppress
6025 erroneously reporting uninitialized return values.
6026
808f4dfe
DM
6027 Purge the frame region and all its descendent regions.
6028 Convert any pointers that point into such regions into
6029 POISON_KIND_POPPED_STACK svalues. */
757bf1df 6030
808f4dfe 6031void
4cebae09 6032region_model::pop_frame (tree result_lvalue,
808f4dfe 6033 const svalue **out_result,
430d7d88
DM
6034 region_model_context *ctxt,
6035 bool eval_return_svalue)
808f4dfe
DM
6036{
6037 gcc_assert (m_current_frame);
757bf1df 6038
597b9ec6 6039 const region_model pre_popped_model = *this;
808f4dfe 6040 const frame_region *frame_reg = m_current_frame;
5c6546ca
DM
6041
6042 /* Notify state machines. */
6043 if (ctxt)
6044 ctxt->on_pop_frame (frame_reg);
6045
6046 /* Evaluate the result, within the callee frame. */
c0d8a64e 6047 tree fndecl = m_current_frame->get_function ().decl;
808f4dfe 6048 tree result = DECL_RESULT (fndecl);
4cebae09 6049 const svalue *retval = NULL;
430d7d88
DM
6050 if (result
6051 && TREE_TYPE (result) != void_type_node
6052 && eval_return_svalue)
808f4dfe 6053 {
4cebae09 6054 retval = get_rvalue (result, ctxt);
808f4dfe 6055 if (out_result)
13ad6d9f 6056 *out_result = retval;
808f4dfe 6057 }
757bf1df 6058
808f4dfe
DM
6059 /* Pop the frame. */
6060 m_current_frame = m_current_frame->get_calling_frame ();
757bf1df 6061
4cebae09
DM
6062 if (result_lvalue && retval)
6063 {
430d7d88
DM
6064 gcc_assert (eval_return_svalue);
6065
4cebae09
DM
6066 /* Compute result_dst_reg using RESULT_LVALUE *after* popping
6067 the frame, but before poisoning pointers into the old frame. */
6068 const region *result_dst_reg = get_lvalue (result_lvalue, ctxt);
6069 set_value (result_dst_reg, retval, ctxt);
6070 }
6071
808f4dfe 6072 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
597b9ec6 6073 notify_on_pop_frame (this, &pre_popped_model, retval, ctxt);
757bf1df
DM
6074}
6075
808f4dfe 6076/* Get the number of frames in this region_model's stack. */
757bf1df 6077
808f4dfe
DM
6078int
6079region_model::get_stack_depth () const
757bf1df 6080{
808f4dfe
DM
6081 const frame_region *frame = get_current_frame ();
6082 if (frame)
6083 return frame->get_stack_depth ();
6084 else
6085 return 0;
757bf1df
DM
6086}
6087
808f4dfe
DM
6088/* Get the frame_region with the given index within the stack.
6089 The frame_region must exist. */
757bf1df 6090
808f4dfe
DM
6091const frame_region *
6092region_model::get_frame_at_index (int index) const
757bf1df 6093{
808f4dfe
DM
6094 const frame_region *frame = get_current_frame ();
6095 gcc_assert (frame);
6096 gcc_assert (index >= 0);
6097 gcc_assert (index <= frame->get_index ());
6098 while (index != frame->get_index ())
6099 {
6100 frame = frame->get_calling_frame ();
6101 gcc_assert (frame);
6102 }
6103 return frame;
757bf1df
DM
6104}
6105
808f4dfe
DM
6106/* Unbind svalues for any regions in REG and below.
6107 Find any pointers to such regions; convert them to
9a2c9579
DM
6108 poisoned values of kind PKIND.
6109 Also purge any dynamic extents. */
757bf1df 6110
808f4dfe
DM
6111void
6112region_model::unbind_region_and_descendents (const region *reg,
6113 enum poison_kind pkind)
757bf1df 6114{
808f4dfe
DM
6115 /* Gather a set of base regions to be unbound. */
6116 hash_set<const region *> base_regs;
6117 for (store::cluster_map_t::iterator iter = m_store.begin ();
6118 iter != m_store.end (); ++iter)
757bf1df 6119 {
808f4dfe
DM
6120 const region *iter_base_reg = (*iter).first;
6121 if (iter_base_reg->descendent_of_p (reg))
6122 base_regs.add (iter_base_reg);
757bf1df 6123 }
808f4dfe
DM
6124 for (hash_set<const region *>::iterator iter = base_regs.begin ();
6125 iter != base_regs.end (); ++iter)
6126 m_store.purge_cluster (*iter);
757bf1df 6127
808f4dfe
DM
6128 /* Find any pointers to REG or its descendents; convert to poisoned. */
6129 poison_any_pointers_to_descendents (reg, pkind);
9a2c9579
DM
6130
6131 /* Purge dynamic extents of any base regions in REG and below
6132 (e.g. VLAs and alloca stack regions). */
6133 for (auto iter : m_dynamic_extents)
6134 {
6135 const region *iter_reg = iter.first;
6136 if (iter_reg->descendent_of_p (reg))
6137 unset_dynamic_extents (iter_reg);
6138 }
757bf1df
DM
6139}
6140
808f4dfe
DM
6141/* Implementation of BindingVisitor.
6142 Update the bound svalues for regions below REG to use poisoned
6143 values instead. */
757bf1df 6144
808f4dfe 6145struct bad_pointer_finder
757bf1df 6146{
808f4dfe
DM
6147 bad_pointer_finder (const region *reg, enum poison_kind pkind,
6148 region_model_manager *mgr)
6149 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
6150 {}
757bf1df 6151
808f4dfe
DM
6152 void on_binding (const binding_key *, const svalue *&sval)
6153 {
6154 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
6155 {
6156 const region *ptr_dst = ptr_sval->get_pointee ();
6157 /* Poison ptrs to descendents of REG, but not to REG itself,
6158 otherwise double-free detection doesn't work (since sm-state
6159 for "free" is stored on the original ptr svalue). */
6160 if (ptr_dst->descendent_of_p (m_reg)
6161 && ptr_dst != m_reg)
6162 {
6163 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
6164 sval->get_type ());
6165 ++m_count;
6166 }
6167 }
6168 }
757bf1df 6169
808f4dfe
DM
6170 const region *m_reg;
6171 enum poison_kind m_pkind;
6172 region_model_manager *const m_mgr;
6173 int m_count;
6174};
757bf1df 6175
808f4dfe
DM
6176/* Find any pointers to REG or its descendents; convert them to
6177 poisoned values of kind PKIND.
6178 Return the number of pointers that were poisoned. */
757bf1df 6179
808f4dfe
DM
6180int
6181region_model::poison_any_pointers_to_descendents (const region *reg,
6182 enum poison_kind pkind)
6183{
6184 bad_pointer_finder bv (reg, pkind, m_mgr);
6185 m_store.for_each_binding (bv);
6186 return bv.m_count;
757bf1df
DM
6187}
6188
808f4dfe
DM
6189/* Attempt to merge THIS with OTHER_MODEL, writing the result
6190 to OUT_MODEL. Use POINT to distinguish values created as a
6191 result of merging. */
757bf1df 6192
808f4dfe
DM
6193bool
6194region_model::can_merge_with_p (const region_model &other_model,
6195 const program_point &point,
f573d351
DM
6196 region_model *out_model,
6197 const extrinsic_state *ext_state,
6198 const program_state *state_a,
6199 const program_state *state_b) const
757bf1df 6200{
808f4dfe
DM
6201 gcc_assert (out_model);
6202 gcc_assert (m_mgr == other_model.m_mgr);
6203 gcc_assert (m_mgr == out_model->m_mgr);
757bf1df 6204
808f4dfe
DM
6205 if (m_current_frame != other_model.m_current_frame)
6206 return false;
6207 out_model->m_current_frame = m_current_frame;
757bf1df 6208
f573d351
DM
6209 model_merger m (this, &other_model, point, out_model,
6210 ext_state, state_a, state_b);
757bf1df 6211
808f4dfe
DM
6212 if (!store::can_merge_p (&m_store, &other_model.m_store,
6213 &out_model->m_store, m_mgr->get_store_manager (),
6214 &m))
6215 return false;
6216
9a2c9579
DM
6217 if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
6218 &out_model->m_dynamic_extents))
6219 return false;
6220
808f4dfe
DM
6221 /* Merge constraints. */
6222 constraint_manager::merge (*m_constraints,
6223 *other_model.m_constraints,
c710051a 6224 out_model->m_constraints);
757bf1df 6225
841008d3
DM
6226 for (auto iter : m.m_svals_changing_meaning)
6227 out_model->m_constraints->purge_state_involving (iter);
6228
808f4dfe 6229 return true;
757bf1df
DM
6230}
6231
6232/* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
6233 otherwise. */
6234
6235tree
6236region_model::get_fndecl_for_call (const gcall *call,
6237 region_model_context *ctxt)
6238{
6239 tree fn_ptr = gimple_call_fn (call);
6240 if (fn_ptr == NULL_TREE)
6241 return NULL_TREE;
808f4dfe
DM
6242 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
6243 if (const region_svalue *fn_ptr_ptr
6244 = fn_ptr_sval->dyn_cast_region_svalue ())
757bf1df 6245 {
808f4dfe
DM
6246 const region *reg = fn_ptr_ptr->get_pointee ();
6247 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
757bf1df 6248 {
808f4dfe 6249 tree fn_decl = fn_reg->get_fndecl ();
0ba70d1b
DM
6250 cgraph_node *node = cgraph_node::get (fn_decl);
6251 if (!node)
6252 return NULL_TREE;
6253 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
91f993b7
DM
6254 if (ultimate_node)
6255 return ultimate_node->decl;
757bf1df
DM
6256 }
6257 }
6258
6259 return NULL_TREE;
6260}
6261
808f4dfe 6262/* Would be much simpler to use a lambda here, if it were supported. */
757bf1df 6263
faacafd2 6264struct append_regions_cb_data
757bf1df 6265{
808f4dfe
DM
6266 const region_model *model;
6267 auto_vec<const decl_region *> *out;
6268};
757bf1df 6269
faacafd2 6270/* Populate *OUT with all decl_regions in the current
808f4dfe 6271 frame that have clusters within the store. */
757bf1df
DM
6272
6273void
808f4dfe 6274region_model::
faacafd2 6275get_regions_for_current_frame (auto_vec<const decl_region *> *out) const
757bf1df 6276{
faacafd2 6277 append_regions_cb_data data;
808f4dfe
DM
6278 data.model = this;
6279 data.out = out;
faacafd2 6280 m_store.for_each_cluster (append_regions_cb, &data);
757bf1df
DM
6281}
6282
faacafd2 6283/* Implementation detail of get_regions_for_current_frame. */
757bf1df 6284
808f4dfe 6285void
faacafd2
DM
6286region_model::append_regions_cb (const region *base_reg,
6287 append_regions_cb_data *cb_data)
757bf1df 6288{
808f4dfe
DM
6289 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
6290 return;
6291 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
faacafd2 6292 cb_data->out->safe_push (decl_reg);
757bf1df
DM
6293}
6294
c83e9731
TL
6295
6296/* Abstract class for diagnostics related to the use of
6297 floating-point arithmetic where precision is needed. */
6298
6299class imprecise_floating_point_arithmetic : public pending_diagnostic
6300{
6301public:
6302 int get_controlling_option () const final override
6303 {
6304 return OPT_Wanalyzer_imprecise_fp_arithmetic;
6305 }
6306};
6307
6308/* Concrete diagnostic to complain about uses of floating-point arithmetic
6309 in the size argument of malloc etc. */
6310
6311class float_as_size_arg : public imprecise_floating_point_arithmetic
6312{
6313public:
6314 float_as_size_arg (tree arg) : m_arg (arg)
6315 {}
6316
6317 const char *get_kind () const final override
6318 {
6319 return "float_as_size_arg_diagnostic";
6320 }
6321
ac9230fb 6322 bool subclass_equal_p (const pending_diagnostic &other) const final override
c83e9731
TL
6323 {
6324 return same_tree_p (m_arg, ((const float_as_size_arg &) other).m_arg);
6325 }
6326
12b67d1e 6327 bool emit (diagnostic_emission_context &ctxt) final override
c83e9731 6328 {
12b67d1e
DM
6329 bool warned = ctxt.warn ("use of floating-point arithmetic here might"
6330 " yield unexpected results");
c83e9731 6331 if (warned)
12b67d1e
DM
6332 inform (ctxt.get_location (),
6333 "only use operands of an integer type"
6334 " inside the size argument");
c83e9731
TL
6335 return warned;
6336 }
6337
6338 label_text describe_final_event (const evdesc::final_event &ev) final
6339 override
6340 {
6341 if (m_arg)
6342 return ev.formatted_print ("operand %qE is of type %qT",
6343 m_arg, TREE_TYPE (m_arg));
6344 return ev.formatted_print ("at least one operand of the size argument is"
6345 " of a floating-point type");
6346 }
6347
6348private:
6349 tree m_arg;
6350};
6351
6352/* Visitor to find uses of floating-point variables/constants in an svalue. */
6353
6354class contains_floating_point_visitor : public visitor
6355{
6356public:
6357 contains_floating_point_visitor (const svalue *root_sval) : m_result (NULL)
6358 {
6359 root_sval->accept (this);
6360 }
6361
6362 const svalue *get_svalue_to_report ()
6363 {
6364 return m_result;
6365 }
6366
6367 void visit_constant_svalue (const constant_svalue *sval) final override
6368 {
6369 /* At the point the analyzer runs, constant integer operands in a floating
6370 point expression are already implictly converted to floating-points.
6371 Thus, we do prefer to report non-constants such that the diagnostic
6372 always reports a floating-point operand. */
6373 tree type = sval->get_type ();
6374 if (type && FLOAT_TYPE_P (type) && !m_result)
6375 m_result = sval;
6376 }
6377
6378 void visit_conjured_svalue (const conjured_svalue *sval) final override
6379 {
6380 tree type = sval->get_type ();
6381 if (type && FLOAT_TYPE_P (type))
6382 m_result = sval;
6383 }
6384
6385 void visit_initial_svalue (const initial_svalue *sval) final override
6386 {
6387 tree type = sval->get_type ();
6388 if (type && FLOAT_TYPE_P (type))
6389 m_result = sval;
6390 }
6391
6392private:
6393 /* Non-null if at least one floating-point operand was found. */
6394 const svalue *m_result;
6395};
6396
6397/* May complain about uses of floating-point operands in SIZE_IN_BYTES. */
6398
6399void
6400region_model::check_dynamic_size_for_floats (const svalue *size_in_bytes,
6401 region_model_context *ctxt) const
6402{
6403 gcc_assert (ctxt);
6404
6405 contains_floating_point_visitor v (size_in_bytes);
6406 if (const svalue *float_sval = v.get_svalue_to_report ())
6407 {
6408 tree diag_arg = get_representative_tree (float_sval);
6341f14e 6409 ctxt->warn (make_unique<float_as_size_arg> (diag_arg));
c83e9731
TL
6410 }
6411}
6412
ce917b04
DM
6413/* Return a region describing a heap-allocated block of memory.
6414 Use CTXT to complain about tainted sizes.
6415
6416 Reuse an existing heap_allocated_region if it's not being referenced by
38c00edd
EF
6417 this region_model; otherwise create a new one.
6418
6419 Optionally (update_state_machine) transitions the pointer pointing to the
6420 heap_allocated_region from start to assumed non-null. */
757bf1df 6421
808f4dfe 6422const region *
ce917b04 6423region_model::get_or_create_region_for_heap_alloc (const svalue *size_in_bytes,
38c00edd
EF
6424 region_model_context *ctxt,
6425 bool update_state_machine,
6426 const call_details *cd)
ce917b04
DM
6427{
6428 /* Determine which regions are referenced in this region_model, so that
6429 we can reuse an existing heap_allocated_region if it's not in use on
6430 this path. */
7dc0ecaf 6431 auto_bitmap base_regs_in_use;
ce917b04 6432 get_referenced_base_regions (base_regs_in_use);
b03a10b0
DM
6433
6434 /* Don't reuse regions that are marked as TOUCHED. */
6435 for (store::cluster_map_t::iterator iter = m_store.begin ();
6436 iter != m_store.end (); ++iter)
6437 if ((*iter).second->touched_p ())
6438 {
6439 const region *base_reg = (*iter).first;
6440 bitmap_set_bit (base_regs_in_use, base_reg->get_id ());
6441 }
6442
ce917b04
DM
6443 const region *reg
6444 = m_mgr->get_or_create_region_for_heap_alloc (base_regs_in_use);
688fc162
DM
6445 if (size_in_bytes)
6446 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
6447 set_dynamic_extents (reg, size_in_bytes, ctxt);
38c00edd
EF
6448
6449 if (update_state_machine && cd)
6450 {
6451 const svalue *ptr_sval
6452 = m_mgr->get_ptr_svalue (cd->get_lhs_type (), reg);
6453 transition_ptr_sval_non_null (ctxt, ptr_sval);
6454 }
6455
808f4dfe 6456 return reg;
757bf1df
DM
6457}
6458
ce917b04
DM
6459/* Populate OUT_IDS with the set of IDs of those base regions which are
6460 reachable in this region_model. */
6461
6462void
7dc0ecaf 6463region_model::get_referenced_base_regions (auto_bitmap &out_ids) const
ce917b04
DM
6464{
6465 reachable_regions reachable_regs (const_cast<region_model *> (this));
6466 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
6467 &reachable_regs);
6468 /* Get regions for locals that have explicitly bound values. */
6469 for (store::cluster_map_t::iterator iter = m_store.begin ();
6470 iter != m_store.end (); ++iter)
6471 {
6472 const region *base_reg = (*iter).first;
6473 if (const region *parent = base_reg->get_parent_region ())
6474 if (parent->get_kind () == RK_FRAME)
6475 reachable_regs.add (base_reg, false);
6476 }
6477
6478 bitmap_clear (out_ids);
6479 for (auto iter_reg : reachable_regs)
6480 bitmap_set_bit (out_ids, iter_reg->get_id ());
6481}
6482
808f4dfe 6483/* Return a new region describing a block of memory allocated within the
b9365b93
DM
6484 current frame.
6485 Use CTXT to complain about tainted sizes. */
757bf1df 6486
808f4dfe 6487const region *
b9365b93
DM
6488region_model::create_region_for_alloca (const svalue *size_in_bytes,
6489 region_model_context *ctxt)
757bf1df 6490{
808f4dfe 6491 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
ea4e3218 6492 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
b9365b93 6493 set_dynamic_extents (reg, size_in_bytes, ctxt);
808f4dfe 6494 return reg;
757bf1df
DM
6495}
6496
b9365b93
DM
6497/* Record that the size of REG is SIZE_IN_BYTES.
6498 Use CTXT to complain about tainted sizes. */
757bf1df
DM
6499
6500void
9a2c9579 6501region_model::set_dynamic_extents (const region *reg,
b9365b93
DM
6502 const svalue *size_in_bytes,
6503 region_model_context *ctxt)
9a2c9579
DM
6504{
6505 assert_compat_types (size_in_bytes->get_type (), size_type_node);
b9365b93 6506 if (ctxt)
c83e9731
TL
6507 {
6508 check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
6509 ctxt);
6510 check_dynamic_size_for_floats (size_in_bytes, ctxt);
6511 }
9a2c9579
DM
6512 m_dynamic_extents.put (reg, size_in_bytes);
6513}
6514
6515/* Get the recording of REG in bytes, or NULL if no dynamic size was
6516 recorded. */
6517
6518const svalue *
6519region_model::get_dynamic_extents (const region *reg) const
757bf1df 6520{
9a2c9579
DM
6521 if (const svalue * const *slot = m_dynamic_extents.get (reg))
6522 return *slot;
6523 return NULL;
6524}
6525
6526/* Unset any recorded dynamic size of REG. */
6527
6528void
6529region_model::unset_dynamic_extents (const region *reg)
6530{
6531 m_dynamic_extents.remove (reg);
757bf1df
DM
6532}
6533
c81b60b8
DM
6534/* A subclass of pending_diagnostic for complaining about uninitialized data
6535 being copied across a trust boundary to an untrusted output
6536 (e.g. copy_to_user infoleaks in the Linux kernel). */
6537
6538class exposure_through_uninit_copy
6539 : public pending_diagnostic_subclass<exposure_through_uninit_copy>
6540{
6541public:
6542 exposure_through_uninit_copy (const region *src_region,
6543 const region *dest_region,
ffaeb9dc 6544 const svalue *copied_sval)
c81b60b8
DM
6545 : m_src_region (src_region),
6546 m_dest_region (dest_region),
ffaeb9dc 6547 m_copied_sval (copied_sval)
c81b60b8
DM
6548 {
6549 gcc_assert (m_copied_sval->get_kind () == SK_POISONED
6550 || m_copied_sval->get_kind () == SK_COMPOUND);
6551 }
6552
6553 const char *get_kind () const final override
6554 {
6555 return "exposure_through_uninit_copy";
6556 }
6557
6558 bool operator== (const exposure_through_uninit_copy &other) const
6559 {
6560 return (m_src_region == other.m_src_region
6561 && m_dest_region == other.m_dest_region
6562 && m_copied_sval == other.m_copied_sval);
6563 }
6564
6565 int get_controlling_option () const final override
6566 {
6567 return OPT_Wanalyzer_exposure_through_uninit_copy;
6568 }
6569
12b67d1e 6570 bool emit (diagnostic_emission_context &ctxt) final override
c81b60b8 6571 {
c81b60b8 6572 /* CWE-200: Exposure of Sensitive Information to an Unauthorized Actor. */
12b67d1e 6573 ctxt.add_cwe (200);
c81b60b8
DM
6574 enum memory_space mem_space = get_src_memory_space ();
6575 bool warned;
6576 switch (mem_space)
6577 {
6578 default:
12b67d1e
DM
6579 warned = ctxt.warn ("potential exposure of sensitive information"
6580 " by copying uninitialized data"
6581 " across trust boundary");
c81b60b8
DM
6582 break;
6583 case MEMSPACE_STACK:
12b67d1e
DM
6584 warned = ctxt.warn ("potential exposure of sensitive information"
6585 " by copying uninitialized data from stack"
6586 " across trust boundary");
c81b60b8
DM
6587 break;
6588 case MEMSPACE_HEAP:
12b67d1e
DM
6589 warned = ctxt.warn ("potential exposure of sensitive information"
6590 " by copying uninitialized data from heap"
6591 " across trust boundary");
c81b60b8
DM
6592 break;
6593 }
6594 if (warned)
6595 {
12b67d1e 6596 const location_t loc = ctxt.get_location ();
c81b60b8
DM
6597 inform_number_of_uninit_bits (loc);
6598 complain_about_uninit_ranges (loc);
6599
6600 if (mem_space == MEMSPACE_STACK)
6601 maybe_emit_fixit_hint ();
6602 }
6603 return warned;
6604 }
6605
6606 label_text describe_final_event (const evdesc::final_event &) final override
6607 {
6608 enum memory_space mem_space = get_src_memory_space ();
6609 switch (mem_space)
6610 {
6611 default:
6612 return label_text::borrow ("uninitialized data copied here");
6613
6614 case MEMSPACE_STACK:
6615 return label_text::borrow ("uninitialized data copied from stack here");
6616
6617 case MEMSPACE_HEAP:
6618 return label_text::borrow ("uninitialized data copied from heap here");
6619 }
6620 }
6621
6622 void mark_interesting_stuff (interesting_t *interest) final override
6623 {
6624 if (m_src_region)
6625 interest->add_region_creation (m_src_region);
6626 }
6627
12b67d1e
DM
6628 void
6629 maybe_add_sarif_properties (sarif_object &result_obj) const final override
6630 {
6631 sarif_property_bag &props = result_obj.get_or_create_properties ();
6632#define PROPERTY_PREFIX "gcc/-Wanalyzer-exposure-through-uninit-copy/"
6633 props.set (PROPERTY_PREFIX "src_region", m_src_region->to_json ());
6634 props.set (PROPERTY_PREFIX "dest_region", m_dest_region->to_json ());
6635 props.set (PROPERTY_PREFIX "copied_sval", m_copied_sval->to_json ());
6636#undef PROPERTY_PREFIX
6637 }
6638
c81b60b8
DM
6639private:
6640 enum memory_space get_src_memory_space () const
6641 {
6642 return m_src_region ? m_src_region->get_memory_space () : MEMSPACE_UNKNOWN;
6643 }
6644
6645 bit_size_t calc_num_uninit_bits () const
6646 {
6647 switch (m_copied_sval->get_kind ())
6648 {
6649 default:
6650 gcc_unreachable ();
6651 break;
6652 case SK_POISONED:
6653 {
6654 const poisoned_svalue *poisoned_sval
6655 = as_a <const poisoned_svalue *> (m_copied_sval);
6656 gcc_assert (poisoned_sval->get_poison_kind () == POISON_KIND_UNINIT);
6657
6658 /* Give up if don't have type information. */
6659 if (m_copied_sval->get_type () == NULL_TREE)
6660 return 0;
6661
6662 bit_size_t size_in_bits;
6663 if (int_size_in_bits (m_copied_sval->get_type (), &size_in_bits))
6664 return size_in_bits;
6665
6666 /* Give up if we can't get the size of the type. */
6667 return 0;
6668 }
6669 break;
6670 case SK_COMPOUND:
6671 {
6672 const compound_svalue *compound_sval
6673 = as_a <const compound_svalue *> (m_copied_sval);
6674 bit_size_t result = 0;
6675 /* Find keys for uninit svals. */
6676 for (auto iter : *compound_sval)
6677 {
6678 const svalue *sval = iter.second;
6679 if (const poisoned_svalue *psval
6680 = sval->dyn_cast_poisoned_svalue ())
6681 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
6682 {
6683 const binding_key *key = iter.first;
6684 const concrete_binding *ckey
6685 = key->dyn_cast_concrete_binding ();
6686 gcc_assert (ckey);
6687 result += ckey->get_size_in_bits ();
6688 }
6689 }
6690 return result;
6691 }
6692 }
6693 }
6694
6695 void inform_number_of_uninit_bits (location_t loc) const
6696 {
6697 bit_size_t num_uninit_bits = calc_num_uninit_bits ();
6698 if (num_uninit_bits <= 0)
6699 return;
6700 if (num_uninit_bits % BITS_PER_UNIT == 0)
6701 {
6702 /* Express in bytes. */
6703 byte_size_t num_uninit_bytes = num_uninit_bits / BITS_PER_UNIT;
6704 if (num_uninit_bytes == 1)
6705 inform (loc, "1 byte is uninitialized");
6706 else
6707 inform (loc,
6708 "%wu bytes are uninitialized", num_uninit_bytes.to_uhwi ());
6709 }
6710 else
6711 {
6712 /* Express in bits. */
6713 if (num_uninit_bits == 1)
6714 inform (loc, "1 bit is uninitialized");
6715 else
6716 inform (loc,
6717 "%wu bits are uninitialized", num_uninit_bits.to_uhwi ());
6718 }
6719 }
6720
6721 void complain_about_uninit_ranges (location_t loc) const
6722 {
6723 if (const compound_svalue *compound_sval
6724 = m_copied_sval->dyn_cast_compound_svalue ())
6725 {
6726 /* Find keys for uninit svals. */
6727 auto_vec<const concrete_binding *> uninit_keys;
6728 for (auto iter : *compound_sval)
6729 {
6730 const svalue *sval = iter.second;
6731 if (const poisoned_svalue *psval
6732 = sval->dyn_cast_poisoned_svalue ())
6733 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
6734 {
6735 const binding_key *key = iter.first;
6736 const concrete_binding *ckey
6737 = key->dyn_cast_concrete_binding ();
6738 gcc_assert (ckey);
6739 uninit_keys.safe_push (ckey);
6740 }
6741 }
6742 /* Complain about them in sorted order. */
6743 uninit_keys.qsort (concrete_binding::cmp_ptr_ptr);
6744
6745 std::unique_ptr<record_layout> layout;
6746
6747 tree type = m_copied_sval->get_type ();
6748 if (type && TREE_CODE (type) == RECORD_TYPE)
6749 {
6750 // (std::make_unique is C++14)
6751 layout = std::unique_ptr<record_layout> (new record_layout (type));
6752
6753 if (0)
6754 layout->dump ();
6755 }
6756
6757 unsigned i;
6758 const concrete_binding *ckey;
6759 FOR_EACH_VEC_ELT (uninit_keys, i, ckey)
6760 {
6761 bit_offset_t start_bit = ckey->get_start_bit_offset ();
6762 bit_offset_t next_bit = ckey->get_next_bit_offset ();
6763 complain_about_uninit_range (loc, start_bit, next_bit,
6764 layout.get ());
6765 }
6766 }
6767 }
6768
6769 void complain_about_uninit_range (location_t loc,
6770 bit_offset_t start_bit,
6771 bit_offset_t next_bit,
6772 const record_layout *layout) const
6773 {
6774 if (layout)
6775 {
6776 while (start_bit < next_bit)
6777 {
6778 if (const record_layout::item *item
6779 = layout->get_item_at (start_bit))
6780 {
6781 gcc_assert (start_bit >= item->get_start_bit_offset ());
6782 gcc_assert (start_bit < item->get_next_bit_offset ());
6783 if (item->get_start_bit_offset () == start_bit
6784 && item->get_next_bit_offset () <= next_bit)
6785 complain_about_fully_uninit_item (*item);
6786 else
6787 complain_about_partially_uninit_item (*item);
6788 start_bit = item->get_next_bit_offset ();
6789 continue;
6790 }
6791 else
6792 break;
6793 }
6794 }
6795
6796 if (start_bit >= next_bit)
6797 return;
6798
6799 if (start_bit % 8 == 0 && next_bit % 8 == 0)
6800 {
6801 /* Express in bytes. */
6802 byte_offset_t start_byte = start_bit / 8;
6803 byte_offset_t last_byte = (next_bit / 8) - 1;
6804 if (last_byte == start_byte)
6805 inform (loc,
6806 "byte %wu is uninitialized",
6807 start_byte.to_uhwi ());
6808 else
6809 inform (loc,
6810 "bytes %wu - %wu are uninitialized",
6811 start_byte.to_uhwi (),
6812 last_byte.to_uhwi ());
6813 }
6814 else
6815 {
6816 /* Express in bits. */
6817 bit_offset_t last_bit = next_bit - 1;
6818 if (last_bit == start_bit)
6819 inform (loc,
6820 "bit %wu is uninitialized",
6821 start_bit.to_uhwi ());
6822 else
6823 inform (loc,
6824 "bits %wu - %wu are uninitialized",
6825 start_bit.to_uhwi (),
6826 last_bit.to_uhwi ());
6827 }
6828 }
6829
6830 static void
6831 complain_about_fully_uninit_item (const record_layout::item &item)
6832 {
6833 tree field = item.m_field;
6834 bit_size_t num_bits = item.m_bit_range.m_size_in_bits;
6835 if (item.m_is_padding)
6836 {
6837 if (num_bits % 8 == 0)
6838 {
6839 /* Express in bytes. */
6840 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
6841 if (num_bytes == 1)
6842 inform (DECL_SOURCE_LOCATION (field),
6843 "padding after field %qD is uninitialized (1 byte)",
6844 field);
6845 else
6846 inform (DECL_SOURCE_LOCATION (field),
6847 "padding after field %qD is uninitialized (%wu bytes)",
6848 field, num_bytes.to_uhwi ());
6849 }
6850 else
6851 {
6852 /* Express in bits. */
6853 if (num_bits == 1)
6854 inform (DECL_SOURCE_LOCATION (field),
6855 "padding after field %qD is uninitialized (1 bit)",
6856 field);
6857 else
6858 inform (DECL_SOURCE_LOCATION (field),
6859 "padding after field %qD is uninitialized (%wu bits)",
6860 field, num_bits.to_uhwi ());
6861 }
6862 }
6863 else
6864 {
6865 if (num_bits % 8 == 0)
6866 {
6867 /* Express in bytes. */
6868 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
6869 if (num_bytes == 1)
6870 inform (DECL_SOURCE_LOCATION (field),
6871 "field %qD is uninitialized (1 byte)", field);
6872 else
6873 inform (DECL_SOURCE_LOCATION (field),
6874 "field %qD is uninitialized (%wu bytes)",
6875 field, num_bytes.to_uhwi ());
6876 }
6877 else
6878 {
6879 /* Express in bits. */
6880 if (num_bits == 1)
6881 inform (DECL_SOURCE_LOCATION (field),
6882 "field %qD is uninitialized (1 bit)", field);
6883 else
6884 inform (DECL_SOURCE_LOCATION (field),
6885 "field %qD is uninitialized (%wu bits)",
6886 field, num_bits.to_uhwi ());
6887 }
6888 }
6889 }
6890
6891 static void
6892 complain_about_partially_uninit_item (const record_layout::item &item)
6893 {
6894 tree field = item.m_field;
6895 if (item.m_is_padding)
6896 inform (DECL_SOURCE_LOCATION (field),
6897 "padding after field %qD is partially uninitialized",
6898 field);
6899 else
6900 inform (DECL_SOURCE_LOCATION (field),
6901 "field %qD is partially uninitialized",
6902 field);
6903 /* TODO: ideally we'd describe what parts are uninitialized. */
6904 }
6905
6906 void maybe_emit_fixit_hint () const
6907 {
6908 if (tree decl = m_src_region->maybe_get_decl ())
6909 {
6910 gcc_rich_location hint_richloc (DECL_SOURCE_LOCATION (decl));
6911 hint_richloc.add_fixit_insert_after (" = {0}");
6912 inform (&hint_richloc,
6913 "suggest forcing zero-initialization by"
6914 " providing a %<{0}%> initializer");
6915 }
6916 }
6917
6918private:
6919 const region *m_src_region;
6920 const region *m_dest_region;
6921 const svalue *m_copied_sval;
c81b60b8
DM
6922};
6923
6924/* Return true if any part of SVAL is uninitialized. */
6925
6926static bool
6927contains_uninit_p (const svalue *sval)
6928{
08262e78 6929 switch (sval->get_kind ())
c81b60b8 6930 {
08262e78
DM
6931 default:
6932 return false;
6933 case SK_POISONED:
6934 {
6935 const poisoned_svalue *psval
6936 = as_a <const poisoned_svalue *> (sval);
6937 return psval->get_poison_kind () == POISON_KIND_UNINIT;
6938 }
6939 case SK_COMPOUND:
6940 {
6941 const compound_svalue *compound_sval
6942 = as_a <const compound_svalue *> (sval);
c81b60b8 6943
08262e78
DM
6944 for (auto iter : *compound_sval)
6945 {
6946 const svalue *sval = iter.second;
6947 if (const poisoned_svalue *psval
6948 = sval->dyn_cast_poisoned_svalue ())
6949 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
6950 return true;
6951 }
c81b60b8 6952
08262e78
DM
6953 return false;
6954 }
6955 }
c81b60b8
DM
6956}
6957
6958/* Function for use by plugins when simulating writing data through a
6959 pointer to an "untrusted" region DST_REG (and thus crossing a security
6960 boundary), such as copying data to user space in an OS kernel.
6961
6962 Check that COPIED_SVAL is fully initialized. If not, complain about
6963 an infoleak to CTXT.
6964
6965 SRC_REG can be NULL; if non-NULL it is used as a hint in the diagnostic
6966 as to where COPIED_SVAL came from. */
6967
6968void
6969region_model::maybe_complain_about_infoleak (const region *dst_reg,
6970 const svalue *copied_sval,
6971 const region *src_reg,
6972 region_model_context *ctxt)
6973{
6974 /* Check for exposure. */
6975 if (contains_uninit_p (copied_sval))
6341f14e
DM
6976 ctxt->warn (make_unique<exposure_through_uninit_copy> (src_reg,
6977 dst_reg,
6978 copied_sval));
c81b60b8
DM
6979}
6980
3d2d04cd
DM
6981/* Set errno to a positive symbolic int, as if some error has occurred. */
6982
6983void
6984region_model::set_errno (const call_details &cd)
6985{
6986 const region *errno_reg = m_mgr->get_errno_region ();
6987 conjured_purge p (this, cd.get_ctxt ());
6988 const svalue *new_errno_sval
6989 = m_mgr->get_or_create_conjured_svalue (integer_type_node,
6990 cd.get_call_stmt (),
6991 errno_reg, p);
6992 const svalue *zero
6993 = m_mgr->get_or_create_int_cst (integer_type_node, 0);
6994 add_constraint (new_errno_sval, GT_EXPR, zero, cd.get_ctxt ());
6995 set_value (errno_reg, new_errno_sval, cd.get_ctxt ());
6996}
6997
eafa9d96
DM
6998/* class noop_region_model_context : public region_model_context. */
6999
c65d3c7f 7000void
6341f14e 7001noop_region_model_context::add_note (std::unique_ptr<pending_note>)
c65d3c7f 7002{
c65d3c7f
DM
7003}
7004
2503dd59
DM
7005void
7006noop_region_model_context::add_event (std::unique_ptr<checker_event>)
7007{
7008}
7009
eafa9d96 7010void
accece8c 7011noop_region_model_context::bifurcate (std::unique_ptr<custom_edge_info>)
eafa9d96 7012{
eafa9d96
DM
7013}
7014
7015void
7016noop_region_model_context::terminate_path ()
7017{
7018}
7019
2503dd59
DM
7020/* class region_model_context_decorator : public region_model_context. */
7021
7022void
7023region_model_context_decorator::add_event (std::unique_ptr<checker_event> event)
7024{
1e7b0a5d
DM
7025 if (m_inner)
7026 m_inner->add_event (std::move (event));
2503dd59
DM
7027}
7028
808f4dfe 7029/* struct model_merger. */
757bf1df 7030
808f4dfe 7031/* Dump a multiline representation of this merger to PP. */
757bf1df
DM
7032
7033void
808f4dfe 7034model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
757bf1df 7035{
808f4dfe
DM
7036 pp_string (pp, "model A:");
7037 pp_newline (pp);
7038 m_model_a->dump_to_pp (pp, simple, true);
7039 pp_newline (pp);
757bf1df 7040
808f4dfe 7041 pp_string (pp, "model B:");
757bf1df 7042 pp_newline (pp);
808f4dfe 7043 m_model_b->dump_to_pp (pp, simple, true);
757bf1df
DM
7044 pp_newline (pp);
7045
808f4dfe 7046 pp_string (pp, "merged model:");
757bf1df 7047 pp_newline (pp);
808f4dfe 7048 m_merged_model->dump_to_pp (pp, simple, true);
757bf1df
DM
7049 pp_newline (pp);
7050}
7051
808f4dfe 7052/* Dump a multiline representation of this merger to FILE. */
757bf1df
DM
7053
7054void
808f4dfe 7055model_merger::dump (FILE *fp, bool simple) const
757bf1df
DM
7056{
7057 pretty_printer pp;
7058 pp_format_decoder (&pp) = default_tree_printer;
7059 pp_show_color (&pp) = pp_show_color (global_dc->printer);
7060 pp.buffer->stream = fp;
808f4dfe 7061 dump_to_pp (&pp, simple);
757bf1df
DM
7062 pp_flush (&pp);
7063}
7064
808f4dfe 7065/* Dump a multiline representation of this merger to stderr. */
757bf1df
DM
7066
7067DEBUG_FUNCTION void
808f4dfe 7068model_merger::dump (bool simple) const
757bf1df 7069{
808f4dfe 7070 dump (stderr, simple);
757bf1df
DM
7071}
7072
f573d351
DM
7073/* Return true if it's OK to merge SVAL with other svalues. */
7074
7075bool
7076model_merger::mergeable_svalue_p (const svalue *sval) const
7077{
7078 if (m_ext_state)
7079 {
7080 /* Reject merging svalues that have non-purgable sm-state,
7081 to avoid falsely reporting memory leaks by merging them
7082 with something else. For example, given a local var "p",
7083 reject the merger of a:
7084 store_a mapping "p" to a malloc-ed ptr
7085 with:
7086 store_b mapping "p" to a NULL ptr. */
7087 if (m_state_a)
7088 if (!m_state_a->can_purge_p (*m_ext_state, sval))
7089 return false;
7090 if (m_state_b)
7091 if (!m_state_b->can_purge_p (*m_ext_state, sval))
7092 return false;
7093 }
7094 return true;
7095}
7096
841008d3
DM
7097/* Mark WIDENING_SVAL as changing meaning during the merge. */
7098
7099void
7100model_merger::on_widening_reuse (const widening_svalue *widening_sval)
7101{
7102 m_svals_changing_meaning.add (widening_sval);
7103}
7104
75038aa6
DM
7105} // namespace ana
7106
808f4dfe 7107/* Dump RMODEL fully to stderr (i.e. without summarization). */
757bf1df 7108
808f4dfe
DM
7109DEBUG_FUNCTION void
7110debug (const region_model &rmodel)
757bf1df 7111{
808f4dfe 7112 rmodel.dump (false);
757bf1df
DM
7113}
7114
8ca7fa84 7115/* class rejected_op_constraint : public rejected_constraint. */
84fb3546
DM
7116
7117void
8ca7fa84 7118rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
84fb3546
DM
7119{
7120 region_model m (m_model);
7121 const svalue *lhs_sval = m.get_rvalue (m_lhs, NULL);
7122 const svalue *rhs_sval = m.get_rvalue (m_rhs, NULL);
7123 lhs_sval->dump_to_pp (pp, true);
7124 pp_printf (pp, " %s ", op_symbol_code (m_op));
7125 rhs_sval->dump_to_pp (pp, true);
7126}
7127
ccd4df81
DM
7128/* class rejected_default_case : public rejected_constraint. */
7129
7130void
7131rejected_default_case::dump_to_pp (pretty_printer *pp) const
7132{
7133 pp_string (pp, "implicit default for enum");
7134}
7135
8ca7fa84
DM
7136/* class rejected_ranges_constraint : public rejected_constraint. */
7137
7138void
7139rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
7140{
7141 region_model m (m_model);
7142 const svalue *sval = m.get_rvalue (m_expr, NULL);
7143 sval->dump_to_pp (pp, true);
7144 pp_string (pp, " in ");
7145 m_ranges->dump_to_pp (pp, true);
7146}
7147
808f4dfe 7148/* class engine. */
757bf1df 7149
11a2ff8d
DM
7150/* engine's ctor. */
7151
4cebae09
DM
7152engine::engine (const supergraph *sg, logger *logger)
7153: m_sg (sg), m_mgr (logger)
11a2ff8d
DM
7154{
7155}
7156
808f4dfe 7157/* Dump the managed objects by class to LOGGER, and the per-class totals. */
757bf1df 7158
808f4dfe
DM
7159void
7160engine::log_stats (logger *logger) const
757bf1df 7161{
808f4dfe 7162 m_mgr.log_stats (logger, true);
757bf1df
DM
7163}
7164
75038aa6
DM
7165namespace ana {
7166
757bf1df
DM
7167#if CHECKING_P
7168
7169namespace selftest {
7170
8c08c983
DM
7171/* Build a constant tree of the given type from STR. */
7172
7173static tree
7174build_real_cst_from_string (tree type, const char *str)
7175{
7176 REAL_VALUE_TYPE real;
7177 real_from_string (&real, str);
7178 return build_real (type, real);
7179}
7180
7181/* Append various "interesting" constants to OUT (e.g. NaN). */
7182
7183static void
7184append_interesting_constants (auto_vec<tree> *out)
7185{
59067ddf 7186 out->safe_push (integer_zero_node);
8c08c983
DM
7187 out->safe_push (build_int_cst (integer_type_node, 42));
7188 out->safe_push (build_int_cst (unsigned_type_node, 0));
7189 out->safe_push (build_int_cst (unsigned_type_node, 42));
7190 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
7191 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
7192 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
7193 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
7194 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
7195 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
7196 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
7197 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
7198}
7199
7200/* Verify that tree_cmp is a well-behaved comparator for qsort, even
7201 if the underlying constants aren't comparable. */
7202
7203static void
7204test_tree_cmp_on_constants ()
7205{
7206 auto_vec<tree> csts;
7207 append_interesting_constants (&csts);
7208
7209 /* Try sorting every triple. */
7210 const unsigned num = csts.length ();
7211 for (unsigned i = 0; i < num; i++)
7212 for (unsigned j = 0; j < num; j++)
7213 for (unsigned k = 0; k < num; k++)
7214 {
7215 auto_vec<tree> v (3);
7216 v.quick_push (csts[i]);
7217 v.quick_push (csts[j]);
7218 v.quick_push (csts[k]);
7219 v.qsort (tree_cmp);
7220 }
7221}
7222
757bf1df
DM
7223/* Implementation detail of the ASSERT_CONDITION_* macros. */
7224
808f4dfe
DM
7225void
7226assert_condition (const location &loc,
7227 region_model &model,
7228 const svalue *lhs, tree_code op, const svalue *rhs,
7229 tristate expected)
7230{
7231 tristate actual = model.eval_condition (lhs, op, rhs);
7232 ASSERT_EQ_AT (loc, actual, expected);
7233}
7234
7235/* Implementation detail of the ASSERT_CONDITION_* macros. */
7236
757bf1df
DM
7237void
7238assert_condition (const location &loc,
7239 region_model &model,
7240 tree lhs, tree_code op, tree rhs,
7241 tristate expected)
7242{
7243 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
7244 ASSERT_EQ_AT (loc, actual, expected);
7245}
7246
90f7c300
DM
7247/* Implementation detail of ASSERT_DUMP_TREE_EQ. */
7248
7249static void
7250assert_dump_tree_eq (const location &loc, tree t, const char *expected)
7251{
7252 auto_fix_quotes sentinel;
7253 pretty_printer pp;
7254 pp_format_decoder (&pp) = default_tree_printer;
7255 dump_tree (&pp, t);
7256 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
7257}
7258
7259/* Assert that dump_tree (T) is EXPECTED. */
7260
7261#define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
7262 SELFTEST_BEGIN_STMT \
7263 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
7264 SELFTEST_END_STMT
7265
757bf1df
DM
7266/* Implementation detail of ASSERT_DUMP_EQ. */
7267
7268static void
7269assert_dump_eq (const location &loc,
7270 const region_model &model,
7271 bool summarize,
7272 const char *expected)
7273{
7274 auto_fix_quotes sentinel;
7275 pretty_printer pp;
7276 pp_format_decoder (&pp) = default_tree_printer;
808f4dfe
DM
7277
7278 model.dump_to_pp (&pp, summarize, true);
757bf1df
DM
7279 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
7280}
7281
7282/* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
7283
7284#define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
7285 SELFTEST_BEGIN_STMT \
7286 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
7287 SELFTEST_END_STMT
7288
7289/* Smoketest for region_model::dump_to_pp. */
7290
7291static void
7292test_dump ()
7293{
808f4dfe
DM
7294 region_model_manager mgr;
7295 region_model model (&mgr);
757bf1df
DM
7296
7297 ASSERT_DUMP_EQ (model, false,
808f4dfe
DM
7298 "stack depth: 0\n"
7299 "m_called_unknown_fn: FALSE\n"
7300 "constraint_manager:\n"
7301 " equiv classes:\n"
7302 " constraints:\n");
7303 ASSERT_DUMP_EQ (model, true,
7304 "stack depth: 0\n"
7305 "m_called_unknown_fn: FALSE\n"
7306 "constraint_manager:\n"
757bf1df
DM
7307 " equiv classes:\n"
7308 " constraints:\n");
757bf1df
DM
7309}
7310
884d9141
DM
7311/* Helper function for selftests. Create a struct or union type named NAME,
7312 with the fields given by the FIELD_DECLS in FIELDS.
7313 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
7314 create a UNION_TYPE. */
7315
7316static tree
7317make_test_compound_type (const char *name, bool is_struct,
7318 const auto_vec<tree> *fields)
7319{
7320 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
7321 TYPE_NAME (t) = get_identifier (name);
7322 TYPE_SIZE (t) = 0;
7323
7324 tree fieldlist = NULL;
7325 int i;
7326 tree field;
7327 FOR_EACH_VEC_ELT (*fields, i, field)
7328 {
7329 gcc_assert (TREE_CODE (field) == FIELD_DECL);
7330 DECL_CONTEXT (field) = t;
7331 fieldlist = chainon (field, fieldlist);
7332 }
7333 fieldlist = nreverse (fieldlist);
7334 TYPE_FIELDS (t) = fieldlist;
7335
7336 layout_type (t);
7337 return t;
7338}
7339
a96f1c38
DM
7340/* Selftest fixture for creating the type "struct coord {int x; int y; };". */
7341
7342struct coord_test
7343{
7344 coord_test ()
7345 {
7346 auto_vec<tree> fields;
7347 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
7348 get_identifier ("x"), integer_type_node);
7349 fields.safe_push (m_x_field);
7350 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
7351 get_identifier ("y"), integer_type_node);
7352 fields.safe_push (m_y_field);
7353 m_coord_type = make_test_compound_type ("coord", true, &fields);
7354 }
7355
7356 tree m_x_field;
7357 tree m_y_field;
7358 tree m_coord_type;
7359};
7360
808f4dfe 7361/* Verify usage of a struct. */
884d9141
DM
7362
7363static void
808f4dfe 7364test_struct ()
884d9141 7365{
a96f1c38
DM
7366 coord_test ct;
7367
7368 tree c = build_global_decl ("c", ct.m_coord_type);
7369 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7370 c, ct.m_x_field, NULL_TREE);
7371 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
7372 c, ct.m_y_field, NULL_TREE);
884d9141
DM
7373
7374 tree int_17 = build_int_cst (integer_type_node, 17);
7375 tree int_m3 = build_int_cst (integer_type_node, -3);
7376
808f4dfe
DM
7377 region_model_manager mgr;
7378 region_model model (&mgr);
884d9141
DM
7379 model.set_value (c_x, int_17, NULL);
7380 model.set_value (c_y, int_m3, NULL);
7381
808f4dfe
DM
7382 /* Verify get_offset for "c.x". */
7383 {
7384 const region *c_x_reg = model.get_lvalue (c_x, NULL);
7a6564c9 7385 region_offset offset = c_x_reg->get_offset (&mgr);
808f4dfe
DM
7386 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
7387 ASSERT_EQ (offset.get_bit_offset (), 0);
7388 }
7389
7390 /* Verify get_offset for "c.y". */
7391 {
7392 const region *c_y_reg = model.get_lvalue (c_y, NULL);
7a6564c9 7393 region_offset offset = c_y_reg->get_offset (&mgr);
808f4dfe
DM
7394 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
7395 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
7396 }
884d9141
DM
7397}
7398
808f4dfe 7399/* Verify usage of an array element. */
884d9141
DM
7400
7401static void
808f4dfe 7402test_array_1 ()
884d9141
DM
7403{
7404 tree tlen = size_int (10);
7405 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
7406
7407 tree a = build_global_decl ("a", arr_type);
7408
808f4dfe
DM
7409 region_model_manager mgr;
7410 region_model model (&mgr);
59067ddf 7411 tree int_0 = integer_zero_node;
884d9141
DM
7412 tree a_0 = build4 (ARRAY_REF, char_type_node,
7413 a, int_0, NULL_TREE, NULL_TREE);
7414 tree char_A = build_int_cst (char_type_node, 'A');
7415 model.set_value (a_0, char_A, NULL);
884d9141
DM
7416}
7417
90f7c300
DM
7418/* Verify that region_model::get_representative_tree works as expected. */
7419
7420static void
7421test_get_representative_tree ()
7422{
808f4dfe
DM
7423 region_model_manager mgr;
7424
90f7c300
DM
7425 /* STRING_CST. */
7426 {
7427 tree string_cst = build_string (4, "foo");
808f4dfe
DM
7428 region_model m (&mgr);
7429 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
7430 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
7431 ASSERT_EQ (rep, string_cst);
7432 }
7433
7434 /* String literal. */
7435 {
7436 tree string_cst_ptr = build_string_literal (4, "foo");
808f4dfe
DM
7437 region_model m (&mgr);
7438 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
7439 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
7440 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
7441 }
808f4dfe
DM
7442
7443 /* Value of an element within an array. */
7444 {
7445 tree tlen = size_int (10);
7446 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
7447 tree a = build_global_decl ("a", arr_type);
9d804f9b
DM
7448 placeholder_svalue test_sval (mgr.alloc_symbol_id (),
7449 char_type_node, "test value");
808f4dfe
DM
7450
7451 /* Value of a[3]. */
7452 {
7453 test_region_model_context ctxt;
7454 region_model model (&mgr);
7455 tree int_3 = build_int_cst (integer_type_node, 3);
7456 tree a_3 = build4 (ARRAY_REF, char_type_node,
7457 a, int_3, NULL_TREE, NULL_TREE);
7458 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
7459 model.set_value (a_3_reg, &test_sval, &ctxt);
7460 tree rep = model.get_representative_tree (&test_sval);
7461 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
7462 }
7463
7464 /* Value of a[0]. */
7465 {
7466 test_region_model_context ctxt;
7467 region_model model (&mgr);
59067ddf 7468 tree idx = integer_zero_node;
808f4dfe
DM
7469 tree a_0 = build4 (ARRAY_REF, char_type_node,
7470 a, idx, NULL_TREE, NULL_TREE);
7471 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
7472 model.set_value (a_0_reg, &test_sval, &ctxt);
7473 tree rep = model.get_representative_tree (&test_sval);
7474 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
7475 }
7476 }
7477
7478 /* Value of a field within a struct. */
7479 {
7480 coord_test ct;
7481
7482 tree c = build_global_decl ("c", ct.m_coord_type);
7483 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7484 c, ct.m_x_field, NULL_TREE);
7485 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
7486 c, ct.m_y_field, NULL_TREE);
7487
7488 test_region_model_context ctxt;
7489
7490 /* Value of initial field. */
7491 {
7492 region_model m (&mgr);
7493 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
9d804f9b
DM
7494 placeholder_svalue test_sval_x (mgr.alloc_symbol_id (),
7495 integer_type_node, "test x val");
808f4dfe
DM
7496 m.set_value (c_x_reg, &test_sval_x, &ctxt);
7497 tree rep = m.get_representative_tree (&test_sval_x);
7498 ASSERT_DUMP_TREE_EQ (rep, "c.x");
7499 }
7500
7501 /* Value of non-initial field. */
7502 {
7503 region_model m (&mgr);
7504 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
9d804f9b
DM
7505 placeholder_svalue test_sval_y (mgr.alloc_symbol_id (),
7506 integer_type_node, "test y val");
808f4dfe
DM
7507 m.set_value (c_y_reg, &test_sval_y, &ctxt);
7508 tree rep = m.get_representative_tree (&test_sval_y);
7509 ASSERT_DUMP_TREE_EQ (rep, "c.y");
7510 }
7511 }
90f7c300
DM
7512}
7513
757bf1df 7514/* Verify that calling region_model::get_rvalue repeatedly on the same
808f4dfe 7515 tree constant retrieves the same svalue *. */
757bf1df
DM
7516
7517static void
7518test_unique_constants ()
7519{
59067ddf 7520 tree int_0 = integer_zero_node;
757bf1df
DM
7521 tree int_42 = build_int_cst (integer_type_node, 42);
7522
7523 test_region_model_context ctxt;
808f4dfe
DM
7524 region_model_manager mgr;
7525 region_model model (&mgr);
757bf1df
DM
7526 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
7527 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
7528 model.get_rvalue (int_42, &ctxt));
7529 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
7530 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
757bf1df 7531
808f4dfe
DM
7532 /* A "(const int)42" will be a different tree from "(int)42)"... */
7533 tree const_int_type_node
7534 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
7535 tree const_int_42 = build_int_cst (const_int_type_node, 42);
7536 ASSERT_NE (int_42, const_int_42);
7537 /* It should have a different const_svalue. */
7538 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
7539 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
7540 ASSERT_NE (int_42_sval, const_int_42_sval);
7541 /* But they should compare as equal. */
7542 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
7543 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
757bf1df
DM
7544}
7545
808f4dfe
DM
7546/* Verify that each type gets its own singleton unknown_svalue within a
7547 region_model_manager, and that NULL_TREE gets its own singleton. */
757bf1df
DM
7548
7549static void
808f4dfe 7550test_unique_unknowns ()
757bf1df 7551{
808f4dfe
DM
7552 region_model_manager mgr;
7553 const svalue *unknown_int
7554 = mgr.get_or_create_unknown_svalue (integer_type_node);
7555 /* Repeated calls with the same type should get the same "unknown"
7556 svalue. */
7557 const svalue *unknown_int_2
7558 = mgr.get_or_create_unknown_svalue (integer_type_node);
7559 ASSERT_EQ (unknown_int, unknown_int_2);
757bf1df 7560
808f4dfe
DM
7561 /* Different types (or the NULL type) should have different
7562 unknown_svalues. */
7563 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
7564 ASSERT_NE (unknown_NULL_type, unknown_int);
757bf1df 7565
808f4dfe
DM
7566 /* Repeated calls with NULL for the type should get the same "unknown"
7567 svalue. */
7568 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
7569 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
757bf1df
DM
7570}
7571
808f4dfe 7572/* Verify that initial_svalue are handled as expected. */
757bf1df 7573
808f4dfe
DM
7574static void
7575test_initial_svalue_folding ()
757bf1df 7576{
808f4dfe
DM
7577 region_model_manager mgr;
7578 tree x = build_global_decl ("x", integer_type_node);
7579 tree y = build_global_decl ("y", integer_type_node);
757bf1df 7580
808f4dfe
DM
7581 test_region_model_context ctxt;
7582 region_model model (&mgr);
7583 const svalue *x_init = model.get_rvalue (x, &ctxt);
7584 const svalue *y_init = model.get_rvalue (y, &ctxt);
7585 ASSERT_NE (x_init, y_init);
7586 const region *x_reg = model.get_lvalue (x, &ctxt);
7587 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
757bf1df 7588
808f4dfe 7589}
757bf1df 7590
808f4dfe 7591/* Verify that unary ops are folded as expected. */
757bf1df
DM
7592
7593static void
808f4dfe 7594test_unaryop_svalue_folding ()
757bf1df 7595{
808f4dfe 7596 region_model_manager mgr;
757bf1df
DM
7597 tree x = build_global_decl ("x", integer_type_node);
7598 tree y = build_global_decl ("y", integer_type_node);
7599
808f4dfe
DM
7600 test_region_model_context ctxt;
7601 region_model model (&mgr);
7602 const svalue *x_init = model.get_rvalue (x, &ctxt);
7603 const svalue *y_init = model.get_rvalue (y, &ctxt);
7604 const region *x_reg = model.get_lvalue (x, &ctxt);
7605 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
7606
7607 /* "(int)x" -> "x". */
7608 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
7609
7610 /* "(void *)x" -> something other than "x". */
7611 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
7612
7613 /* "!(x == y)" -> "x != y". */
7614 ASSERT_EQ (mgr.get_or_create_unaryop
7615 (boolean_type_node, TRUTH_NOT_EXPR,
7616 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
7617 x_init, y_init)),
7618 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
7619 x_init, y_init));
7620 /* "!(x > y)" -> "x <= y". */
7621 ASSERT_EQ (mgr.get_or_create_unaryop
7622 (boolean_type_node, TRUTH_NOT_EXPR,
7623 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
7624 x_init, y_init)),
7625 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
7626 x_init, y_init));
7627}
7628
7629/* Verify that binops on constant svalues are folded. */
757bf1df 7630
808f4dfe
DM
7631static void
7632test_binop_svalue_folding ()
7633{
7634#define NUM_CSTS 10
7635 tree cst_int[NUM_CSTS];
7636 region_model_manager mgr;
7637 const svalue *cst_sval[NUM_CSTS];
7638 for (int i = 0; i < NUM_CSTS; i++)
7639 {
7640 cst_int[i] = build_int_cst (integer_type_node, i);
7641 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
7642 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
7643 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
7644 }
757bf1df 7645
808f4dfe
DM
7646 for (int i = 0; i < NUM_CSTS; i++)
7647 for (int j = 0; j < NUM_CSTS; j++)
7648 {
7649 if (i != j)
7650 ASSERT_NE (cst_sval[i], cst_sval[j]);
7651 if (i + j < NUM_CSTS)
7652 {
7653 const svalue *sum
7654 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7655 cst_sval[i], cst_sval[j]);
7656 ASSERT_EQ (sum, cst_sval[i + j]);
7657 }
7658 if (i - j >= 0)
7659 {
7660 const svalue *difference
7661 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
7662 cst_sval[i], cst_sval[j]);
7663 ASSERT_EQ (difference, cst_sval[i - j]);
7664 }
7665 if (i * j < NUM_CSTS)
7666 {
7667 const svalue *product
7668 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7669 cst_sval[i], cst_sval[j]);
7670 ASSERT_EQ (product, cst_sval[i * j]);
7671 }
7672 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
7673 cst_sval[i], cst_sval[j]);
7674 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
7675 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
7676 cst_sval[i], cst_sval[j]);
7677 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
7678 // etc
7679 }
757bf1df 7680
808f4dfe 7681 tree x = build_global_decl ("x", integer_type_node);
757bf1df 7682
808f4dfe
DM
7683 test_region_model_context ctxt;
7684 region_model model (&mgr);
7685 const svalue *x_init = model.get_rvalue (x, &ctxt);
7686
7687 /* PLUS_EXPR folding. */
7688 const svalue *x_init_plus_zero
7689 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7690 x_init, cst_sval[0]);
7691 ASSERT_EQ (x_init_plus_zero, x_init);
7692 const svalue *zero_plus_x_init
7693 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7694 cst_sval[0], x_init);
7695 ASSERT_EQ (zero_plus_x_init, x_init);
7696
7697 /* MULT_EXPR folding. */
7698 const svalue *x_init_times_zero
7699 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7700 x_init, cst_sval[0]);
7701 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
7702 const svalue *zero_times_x_init
7703 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7704 cst_sval[0], x_init);
7705 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
7706
7707 const svalue *x_init_times_one
7708 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7709 x_init, cst_sval[1]);
7710 ASSERT_EQ (x_init_times_one, x_init);
7711 const svalue *one_times_x_init
7712 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7713 cst_sval[1], x_init);
7714 ASSERT_EQ (one_times_x_init, x_init);
7715
7716 // etc
7717 // TODO: do we want to use the match-and-simplify DSL for this?
7718
7719 /* Verify that binops put any constants on the RHS. */
7720 const svalue *four_times_x_init
7721 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7722 cst_sval[4], x_init);
7723 const svalue *x_init_times_four
7724 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7725 x_init, cst_sval[4]);
7726 ASSERT_EQ (four_times_x_init, x_init_times_four);
7727 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
7728 ASSERT_EQ (binop->get_op (), MULT_EXPR);
7729 ASSERT_EQ (binop->get_arg0 (), x_init);
7730 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
7731
7732 /* Verify that ((x + 1) + 1) == (x + 2). */
7733 const svalue *x_init_plus_one
7734 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7735 x_init, cst_sval[1]);
7736 const svalue *x_init_plus_two
7737 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7738 x_init, cst_sval[2]);
7739 const svalue *x_init_plus_one_plus_one
7740 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7741 x_init_plus_one, cst_sval[1]);
7742 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
4f34f8cc
DM
7743
7744 /* Verify various binops on booleans. */
7745 {
7746 const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1);
7747 const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0);
7748 const svalue *sval_unknown
7749 = mgr.get_or_create_unknown_svalue (boolean_type_node);
9d804f9b
DM
7750 const placeholder_svalue sval_placeholder (mgr.alloc_symbol_id (),
7751 boolean_type_node, "v");
4f34f8cc
DM
7752 for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR})
7753 {
7754 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7755 sval_true, sval_unknown),
7756 sval_true);
7757 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7758 sval_false, sval_unknown),
7759 sval_unknown);
7760 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7761 sval_false, &sval_placeholder),
7762 &sval_placeholder);
7763 }
7764 for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR})
7765 {
7766 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7767 sval_false, sval_unknown),
7768 sval_false);
7769 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7770 sval_true, sval_unknown),
7771 sval_unknown);
7772 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7773 sval_true, &sval_placeholder),
7774 &sval_placeholder);
7775 }
7776 }
808f4dfe
DM
7777}
7778
7779/* Verify that sub_svalues are folded as expected. */
757bf1df 7780
808f4dfe
DM
7781static void
7782test_sub_svalue_folding ()
7783{
7784 coord_test ct;
7785 tree c = build_global_decl ("c", ct.m_coord_type);
7786 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7787 c, ct.m_x_field, NULL_TREE);
757bf1df 7788
808f4dfe
DM
7789 region_model_manager mgr;
7790 region_model model (&mgr);
7791 test_region_model_context ctxt;
7792 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
757bf1df 7793
808f4dfe
DM
7794 /* Verify that sub_svalue of "unknown" simply
7795 yields an unknown. */
757bf1df 7796
808f4dfe
DM
7797 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
7798 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
7799 unknown, c_x_reg);
7800 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
7801 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
757bf1df
DM
7802}
7803
f09b9955
DM
7804/* Get BIT within VAL as a symbolic value within MGR. */
7805
7806static const svalue *
7807get_bit (region_model_manager *mgr,
7808 bit_offset_t bit,
7809 unsigned HOST_WIDE_INT val)
7810{
7811 const svalue *inner_svalue
7812 = mgr->get_or_create_int_cst (unsigned_type_node, val);
7813 return mgr->get_or_create_bits_within (boolean_type_node,
7814 bit_range (bit, 1),
7815 inner_svalue);
7816}
7817
7818/* Verify that bits_within_svalues are folded as expected. */
7819
7820static void
7821test_bits_within_svalue_folding ()
7822{
7823 region_model_manager mgr;
7824
7825 const svalue *zero = mgr.get_or_create_int_cst (boolean_type_node, 0);
7826 const svalue *one = mgr.get_or_create_int_cst (boolean_type_node, 1);
7827
7828 {
7829 const unsigned val = 0x0000;
7830 for (unsigned bit = 0; bit < 16; bit++)
7831 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
7832 }
7833
7834 {
7835 const unsigned val = 0x0001;
7836 ASSERT_EQ (get_bit (&mgr, 0, val), one);
7837 for (unsigned bit = 1; bit < 16; bit++)
7838 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
7839 }
7840
7841 {
7842 const unsigned val = 0x8000;
7843 for (unsigned bit = 0; bit < 15; bit++)
7844 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
7845 ASSERT_EQ (get_bit (&mgr, 15, val), one);
7846 }
7847
7848 {
7849 const unsigned val = 0xFFFF;
7850 for (unsigned bit = 0; bit < 16; bit++)
7851 ASSERT_EQ (get_bit (&mgr, bit, val), one);
7852 }
7853}
7854
808f4dfe 7855/* Test that region::descendent_of_p works as expected. */
757bf1df
DM
7856
7857static void
808f4dfe 7858test_descendent_of_p ()
757bf1df 7859{
808f4dfe
DM
7860 region_model_manager mgr;
7861 const region *stack = mgr.get_stack_region ();
7862 const region *heap = mgr.get_heap_region ();
7863 const region *code = mgr.get_code_region ();
7864 const region *globals = mgr.get_globals_region ();
757bf1df 7865
808f4dfe
DM
7866 /* descendent_of_p should return true when used on the region itself. */
7867 ASSERT_TRUE (stack->descendent_of_p (stack));
7868 ASSERT_FALSE (stack->descendent_of_p (heap));
7869 ASSERT_FALSE (stack->descendent_of_p (code));
7870 ASSERT_FALSE (stack->descendent_of_p (globals));
757bf1df 7871
808f4dfe
DM
7872 tree x = build_global_decl ("x", integer_type_node);
7873 const region *x_reg = mgr.get_region_for_global (x);
7874 ASSERT_TRUE (x_reg->descendent_of_p (globals));
757bf1df 7875
808f4dfe
DM
7876 /* A cast_region should be a descendent of the original region. */
7877 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
7878 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
757bf1df
DM
7879}
7880
391512ad
DM
7881/* Verify that bit_range_region works as expected. */
7882
7883static void
7884test_bit_range_regions ()
7885{
7886 tree x = build_global_decl ("x", integer_type_node);
7887 region_model_manager mgr;
7888 const region *x_reg = mgr.get_region_for_global (x);
7889 const region *byte0
7890 = mgr.get_bit_range (x_reg, char_type_node, bit_range (0, 8));
7891 const region *byte1
7892 = mgr.get_bit_range (x_reg, char_type_node, bit_range (8, 8));
7893 ASSERT_TRUE (byte0->descendent_of_p (x_reg));
7894 ASSERT_TRUE (byte1->descendent_of_p (x_reg));
7895 ASSERT_NE (byte0, byte1);
7896}
7897
757bf1df
DM
7898/* Verify that simple assignments work as expected. */
7899
7900static void
7901test_assignment ()
7902{
59067ddf 7903 tree int_0 = integer_zero_node;
757bf1df
DM
7904 tree x = build_global_decl ("x", integer_type_node);
7905 tree y = build_global_decl ("y", integer_type_node);
7906
7907 /* "x == 0", then use of y, then "y = 0;". */
808f4dfe
DM
7908 region_model_manager mgr;
7909 region_model model (&mgr);
757bf1df
DM
7910 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
7911 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
7912 model.set_value (model.get_lvalue (y, NULL),
7913 model.get_rvalue (int_0, NULL),
7914 NULL);
7915 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
7916 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
757bf1df
DM
7917}
7918
a96f1c38
DM
7919/* Verify that compound assignments work as expected. */
7920
7921static void
7922test_compound_assignment ()
7923{
7924 coord_test ct;
7925
7926 tree c = build_global_decl ("c", ct.m_coord_type);
7927 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7928 c, ct.m_x_field, NULL_TREE);
7929 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
7930 c, ct.m_y_field, NULL_TREE);
7931 tree d = build_global_decl ("d", ct.m_coord_type);
7932 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7933 d, ct.m_x_field, NULL_TREE);
7934 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
7935 d, ct.m_y_field, NULL_TREE);
7936
7937 tree int_17 = build_int_cst (integer_type_node, 17);
7938 tree int_m3 = build_int_cst (integer_type_node, -3);
7939
808f4dfe
DM
7940 region_model_manager mgr;
7941 region_model model (&mgr);
a96f1c38
DM
7942 model.set_value (c_x, int_17, NULL);
7943 model.set_value (c_y, int_m3, NULL);
7944
a96f1c38 7945 /* Copy c to d. */
13ad6d9f
DM
7946 const svalue *sval = model.get_rvalue (c, NULL);
7947 model.set_value (model.get_lvalue (d, NULL), sval, NULL);
7948
a96f1c38
DM
7949 /* Check that the fields have the same svalues. */
7950 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
7951 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
7952}
7953
757bf1df
DM
7954/* Verify the details of pushing and popping stack frames. */
7955
7956static void
7957test_stack_frames ()
7958{
7959 tree int_42 = build_int_cst (integer_type_node, 42);
7960 tree int_10 = build_int_cst (integer_type_node, 10);
7961 tree int_5 = build_int_cst (integer_type_node, 5);
59067ddf 7962 tree int_0 = integer_zero_node;
757bf1df
DM
7963
7964 auto_vec <tree> param_types;
7965 tree parent_fndecl = make_fndecl (integer_type_node,
7966 "parent_fn",
7967 param_types);
7968 allocate_struct_function (parent_fndecl, true);
7969
7970 tree child_fndecl = make_fndecl (integer_type_node,
7971 "child_fn",
7972 param_types);
7973 allocate_struct_function (child_fndecl, true);
7974
7975 /* "a" and "b" in the parent frame. */
7976 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7977 get_identifier ("a"),
7978 integer_type_node);
4cebae09 7979 DECL_CONTEXT (a) = parent_fndecl;
757bf1df
DM
7980 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7981 get_identifier ("b"),
7982 integer_type_node);
4cebae09 7983 DECL_CONTEXT (b) = parent_fndecl;
757bf1df
DM
7984 /* "x" and "y" in a child frame. */
7985 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7986 get_identifier ("x"),
7987 integer_type_node);
4cebae09 7988 DECL_CONTEXT (x) = child_fndecl;
757bf1df
DM
7989 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7990 get_identifier ("y"),
7991 integer_type_node);
4cebae09 7992 DECL_CONTEXT (y) = child_fndecl;
757bf1df
DM
7993
7994 /* "p" global. */
7995 tree p = build_global_decl ("p", ptr_type_node);
7996
7997 /* "q" global. */
7998 tree q = build_global_decl ("q", ptr_type_node);
7999
808f4dfe 8000 region_model_manager mgr;
757bf1df 8001 test_region_model_context ctxt;
808f4dfe 8002 region_model model (&mgr);
757bf1df
DM
8003
8004 /* Push stack frame for "parent_fn". */
808f4dfe 8005 const region *parent_frame_reg
c0d8a64e 8006 = model.push_frame (*DECL_STRUCT_FUNCTION (parent_fndecl),
808f4dfe
DM
8007 NULL, &ctxt);
8008 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
8009 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
8010 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
8011 model.set_value (a_in_parent_reg,
8012 model.get_rvalue (int_42, &ctxt),
8013 &ctxt);
8014 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
8015
757bf1df
DM
8016 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
8017 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
8018 tristate (tristate::TS_TRUE));
8019
8020 /* Push stack frame for "child_fn". */
808f4dfe 8021 const region *child_frame_reg
c0d8a64e 8022 = model.push_frame (*DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
808f4dfe
DM
8023 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
8024 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
8025 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
8026 model.set_value (x_in_child_reg,
8027 model.get_rvalue (int_0, &ctxt),
8028 &ctxt);
8029 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
8030
757bf1df
DM
8031 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
8032 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
8033 tristate (tristate::TS_TRUE));
8034
8035 /* Point a global pointer at a local in the child frame: p = &x. */
808f4dfe
DM
8036 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
8037 model.set_value (p_in_globals_reg,
8038 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
757bf1df 8039 &ctxt);
808f4dfe 8040 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
757bf1df
DM
8041
8042 /* Point another global pointer at p: q = &p. */
808f4dfe
DM
8043 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
8044 model.set_value (q_in_globals_reg,
8045 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
757bf1df
DM
8046 &ctxt);
8047
808f4dfe
DM
8048 /* Test region::descendent_of_p. */
8049 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
8050 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
8051 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
757bf1df
DM
8052
8053 /* Pop the "child_fn" frame from the stack. */
808f4dfe
DM
8054 model.pop_frame (NULL, NULL, &ctxt);
8055 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
8056 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
757bf1df
DM
8057
8058 /* Verify that p (which was pointing at the local "x" in the popped
8059 frame) has been poisoned. */
33255ad3 8060 const svalue *new_p_sval = model.get_rvalue (p, NULL);
757bf1df
DM
8061 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
8062 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
8063 POISON_KIND_POPPED_STACK);
8064
8065 /* Verify that q still points to p, in spite of the region
8066 renumbering. */
808f4dfe 8067 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
757bf1df 8068 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
5932dd35 8069 ASSERT_EQ (new_q_sval->maybe_get_region (),
757bf1df
DM
8070 model.get_lvalue (p, &ctxt));
8071
8072 /* Verify that top of stack has been updated. */
808f4dfe 8073 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
757bf1df
DM
8074
8075 /* Verify locals in parent frame. */
8076 /* Verify "a" still has its value. */
808f4dfe 8077 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
757bf1df
DM
8078 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
8079 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
8080 int_42);
8081 /* Verify "b" still has its constraint. */
8082 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
8083 tristate (tristate::TS_TRUE));
8084}
8085
8086/* Verify that get_representative_path_var works as expected, that
808f4dfe 8087 we can map from regions to parms and back within a recursive call
757bf1df
DM
8088 stack. */
8089
8090static void
8091test_get_representative_path_var ()
8092{
8093 auto_vec <tree> param_types;
8094 tree fndecl = make_fndecl (integer_type_node,
8095 "factorial",
8096 param_types);
8097 allocate_struct_function (fndecl, true);
8098
8099 /* Parm "n". */
8100 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8101 get_identifier ("n"),
8102 integer_type_node);
4cebae09 8103 DECL_CONTEXT (n) = fndecl;
757bf1df 8104
808f4dfe
DM
8105 region_model_manager mgr;
8106 test_region_model_context ctxt;
8107 region_model model (&mgr);
757bf1df
DM
8108
8109 /* Push 5 stack frames for "factorial", each with a param */
808f4dfe
DM
8110 auto_vec<const region *> parm_regs;
8111 auto_vec<const svalue *> parm_svals;
757bf1df
DM
8112 for (int depth = 0; depth < 5; depth++)
8113 {
808f4dfe 8114 const region *frame_n_reg
c0d8a64e 8115 = model.push_frame (*DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
808f4dfe
DM
8116 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
8117 parm_regs.safe_push (parm_n_reg);
757bf1df 8118
808f4dfe
DM
8119 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
8120 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
8121 parm_svals.safe_push (sval_n);
757bf1df
DM
8122 }
8123
8124 /* Verify that we can recognize that the regions are the parms,
8125 at every depth. */
8126 for (int depth = 0; depth < 5; depth++)
8127 {
808f4dfe
DM
8128 {
8129 svalue_set visited;
8130 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
8131 &visited),
8132 path_var (n, depth + 1));
8133 }
757bf1df
DM
8134 /* ...and that we can lookup lvalues for locals for all frames,
8135 not just the top. */
8136 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
808f4dfe 8137 parm_regs[depth]);
757bf1df 8138 /* ...and that we can locate the svalues. */
808f4dfe
DM
8139 {
8140 svalue_set visited;
8141 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
8142 &visited),
8143 path_var (n, depth + 1));
8144 }
757bf1df
DM
8145 }
8146}
8147
808f4dfe 8148/* Ensure that region_model::operator== works as expected. */
757bf1df
DM
8149
8150static void
808f4dfe 8151test_equality_1 ()
757bf1df 8152{
808f4dfe
DM
8153 tree int_42 = build_int_cst (integer_type_node, 42);
8154 tree int_17 = build_int_cst (integer_type_node, 17);
757bf1df 8155
808f4dfe
DM
8156/* Verify that "empty" region_model instances are equal to each other. */
8157 region_model_manager mgr;
8158 region_model model0 (&mgr);
8159 region_model model1 (&mgr);
757bf1df 8160 ASSERT_EQ (model0, model1);
808f4dfe
DM
8161
8162 /* Verify that setting state in model1 makes the models non-equal. */
8163 tree x = build_global_decl ("x", integer_type_node);
8164 model0.set_value (x, int_42, NULL);
8165 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
8166 ASSERT_NE (model0, model1);
8167
8168 /* Verify the copy-ctor. */
8169 region_model model2 (model0);
8170 ASSERT_EQ (model0, model2);
8171 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
8172 ASSERT_NE (model1, model2);
8173
8174 /* Verify that models obtained from copy-ctor are independently editable
8175 w/o affecting the original model. */
8176 model2.set_value (x, int_17, NULL);
8177 ASSERT_NE (model0, model2);
8178 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
8179 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
757bf1df
DM
8180}
8181
8182/* Verify that region models for
8183 x = 42; y = 113;
8184 and
8185 y = 113; x = 42;
808f4dfe 8186 are equal. */
757bf1df
DM
8187
8188static void
8189test_canonicalization_2 ()
8190{
8191 tree int_42 = build_int_cst (integer_type_node, 42);
8192 tree int_113 = build_int_cst (integer_type_node, 113);
8193 tree x = build_global_decl ("x", integer_type_node);
8194 tree y = build_global_decl ("y", integer_type_node);
8195
808f4dfe
DM
8196 region_model_manager mgr;
8197 region_model model0 (&mgr);
757bf1df
DM
8198 model0.set_value (model0.get_lvalue (x, NULL),
8199 model0.get_rvalue (int_42, NULL),
8200 NULL);
8201 model0.set_value (model0.get_lvalue (y, NULL),
8202 model0.get_rvalue (int_113, NULL),
8203 NULL);
8204
808f4dfe 8205 region_model model1 (&mgr);
757bf1df
DM
8206 model1.set_value (model1.get_lvalue (y, NULL),
8207 model1.get_rvalue (int_113, NULL),
8208 NULL);
8209 model1.set_value (model1.get_lvalue (x, NULL),
8210 model1.get_rvalue (int_42, NULL),
8211 NULL);
8212
757bf1df
DM
8213 ASSERT_EQ (model0, model1);
8214}
8215
8216/* Verify that constraints for
8217 x > 3 && y > 42
8218 and
8219 y > 42 && x > 3
8220 are equal after canonicalization. */
8221
8222static void
8223test_canonicalization_3 ()
8224{
8225 tree int_3 = build_int_cst (integer_type_node, 3);
8226 tree int_42 = build_int_cst (integer_type_node, 42);
8227 tree x = build_global_decl ("x", integer_type_node);
8228 tree y = build_global_decl ("y", integer_type_node);
8229
808f4dfe
DM
8230 region_model_manager mgr;
8231 region_model model0 (&mgr);
757bf1df
DM
8232 model0.add_constraint (x, GT_EXPR, int_3, NULL);
8233 model0.add_constraint (y, GT_EXPR, int_42, NULL);
8234
808f4dfe 8235 region_model model1 (&mgr);
757bf1df
DM
8236 model1.add_constraint (y, GT_EXPR, int_42, NULL);
8237 model1.add_constraint (x, GT_EXPR, int_3, NULL);
8238
808f4dfe
DM
8239 model0.canonicalize ();
8240 model1.canonicalize ();
757bf1df
DM
8241 ASSERT_EQ (model0, model1);
8242}
8243
8c08c983
DM
8244/* Verify that we can canonicalize a model containing NaN and other real
8245 constants. */
8246
8247static void
8248test_canonicalization_4 ()
8249{
8250 auto_vec<tree> csts;
8251 append_interesting_constants (&csts);
8252
808f4dfe
DM
8253 region_model_manager mgr;
8254 region_model model (&mgr);
8c08c983 8255
3f207ab3 8256 for (tree cst : csts)
8c08c983
DM
8257 model.get_rvalue (cst, NULL);
8258
808f4dfe 8259 model.canonicalize ();
8c08c983
DM
8260}
8261
757bf1df
DM
8262/* Assert that if we have two region_model instances
8263 with values VAL_A and VAL_B for EXPR that they are
8264 mergable. Write the merged model to *OUT_MERGED_MODEL,
8265 and the merged svalue ptr to *OUT_MERGED_SVALUE.
8266 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
8267 for that region_model. */
8268
8269static void
8270assert_region_models_merge (tree expr, tree val_a, tree val_b,
808f4dfe
DM
8271 region_model *out_merged_model,
8272 const svalue **out_merged_svalue)
757bf1df 8273{
808f4dfe 8274 region_model_manager *mgr = out_merged_model->get_manager ();
bb8e93eb
DM
8275 program_point point (program_point::origin (*mgr));
8276 test_region_model_context ctxt;
808f4dfe
DM
8277 region_model model0 (mgr);
8278 region_model model1 (mgr);
757bf1df
DM
8279 if (val_a)
8280 model0.set_value (model0.get_lvalue (expr, &ctxt),
8281 model0.get_rvalue (val_a, &ctxt),
8282 &ctxt);
8283 if (val_b)
8284 model1.set_value (model1.get_lvalue (expr, &ctxt),
8285 model1.get_rvalue (val_b, &ctxt),
8286 &ctxt);
8287
8288 /* They should be mergeable. */
808f4dfe
DM
8289 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
8290 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
757bf1df
DM
8291}
8292
8293/* Verify that we can merge region_model instances. */
8294
8295static void
8296test_state_merging ()
8297{
8298 tree int_42 = build_int_cst (integer_type_node, 42);
8299 tree int_113 = build_int_cst (integer_type_node, 113);
8300 tree x = build_global_decl ("x", integer_type_node);
8301 tree y = build_global_decl ("y", integer_type_node);
8302 tree z = build_global_decl ("z", integer_type_node);
8303 tree p = build_global_decl ("p", ptr_type_node);
8304
8305 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
8306 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
8307
8308 auto_vec <tree> param_types;
8309 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
8310 allocate_struct_function (test_fndecl, true);
8311
8312 /* Param "a". */
8313 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8314 get_identifier ("a"),
8315 integer_type_node);
4cebae09 8316 DECL_CONTEXT (a) = test_fndecl;
757bf1df
DM
8317 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
8318
455f58ec
DM
8319 /* Param "q", a pointer. */
8320 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8321 get_identifier ("q"),
8322 ptr_type_node);
4cebae09 8323 DECL_CONTEXT (q) = test_fndecl;
455f58ec 8324
808f4dfe 8325 region_model_manager mgr;
bb8e93eb 8326 program_point point (program_point::origin (mgr));
808f4dfe 8327
757bf1df 8328 {
808f4dfe
DM
8329 region_model model0 (&mgr);
8330 region_model model1 (&mgr);
8331 region_model merged (&mgr);
757bf1df 8332 /* Verify empty models can be merged. */
808f4dfe 8333 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
8334 ASSERT_EQ (model0, merged);
8335 }
8336
8337 /* Verify that we can merge two contradictory constraints on the
8338 value for a global. */
8339 /* TODO: verify that the merged model doesn't have a value for
8340 the global */
8341 {
808f4dfe
DM
8342 region_model model0 (&mgr);
8343 region_model model1 (&mgr);
8344 region_model merged (&mgr);
757bf1df
DM
8345 test_region_model_context ctxt;
8346 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
8347 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe 8348 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
8349 ASSERT_NE (model0, merged);
8350 ASSERT_NE (model1, merged);
8351 }
8352
8353 /* Verify handling of a PARM_DECL. */
8354 {
8355 test_region_model_context ctxt;
808f4dfe
DM
8356 region_model model0 (&mgr);
8357 region_model model1 (&mgr);
757bf1df 8358 ASSERT_EQ (model0.get_stack_depth (), 0);
c0d8a64e 8359 model0.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
757bf1df 8360 ASSERT_EQ (model0.get_stack_depth (), 1);
c0d8a64e 8361 model1.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
757bf1df 8362
9d804f9b
DM
8363 placeholder_svalue test_sval (mgr.alloc_symbol_id (),
8364 integer_type_node, "test sval");
808f4dfe
DM
8365 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
8366 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
757bf1df
DM
8367 ASSERT_EQ (model0, model1);
8368
757bf1df 8369 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
8370 region_model merged (&mgr);
8371 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 8372 ASSERT_EQ (model0, merged);
808f4dfe
DM
8373 /* In particular, "a" should have the placeholder value. */
8374 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
757bf1df
DM
8375 }
8376
8377 /* Verify handling of a global. */
8378 {
8379 test_region_model_context ctxt;
808f4dfe
DM
8380 region_model model0 (&mgr);
8381 region_model model1 (&mgr);
757bf1df 8382
9d804f9b
DM
8383 placeholder_svalue test_sval (mgr.alloc_symbol_id (),
8384 integer_type_node, "test sval");
808f4dfe
DM
8385 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
8386 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
8387 ASSERT_EQ (model0, model1);
757bf1df
DM
8388
8389 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
8390 region_model merged (&mgr);
8391 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 8392 ASSERT_EQ (model0, merged);
808f4dfe
DM
8393 /* In particular, "x" should have the placeholder value. */
8394 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
757bf1df
DM
8395 }
8396
8397 /* Use global-handling to verify various combinations of values. */
8398
8399 /* Two equal constant values. */
8400 {
808f4dfe
DM
8401 region_model merged (&mgr);
8402 const svalue *merged_x_sval;
757bf1df
DM
8403 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
8404
8405 /* In particular, there should be a constant value for "x". */
8406 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
8407 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
8408 int_42);
8409 }
8410
8411 /* Two non-equal constant values. */
8412 {
808f4dfe
DM
8413 region_model merged (&mgr);
8414 const svalue *merged_x_sval;
757bf1df
DM
8415 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
8416
808f4dfe
DM
8417 /* In particular, there should be a "widening" value for "x". */
8418 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
757bf1df
DM
8419 }
8420
808f4dfe 8421 /* Initial and constant. */
757bf1df 8422 {
808f4dfe
DM
8423 region_model merged (&mgr);
8424 const svalue *merged_x_sval;
757bf1df
DM
8425 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
8426
8427 /* In particular, there should be an unknown value for "x". */
8428 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
8429 }
8430
808f4dfe 8431 /* Constant and initial. */
757bf1df 8432 {
808f4dfe
DM
8433 region_model merged (&mgr);
8434 const svalue *merged_x_sval;
757bf1df
DM
8435 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
8436
8437 /* In particular, there should be an unknown value for "x". */
8438 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
8439 }
8440
8441 /* Unknown and constant. */
8442 // TODO
8443
8444 /* Pointers: NULL and NULL. */
8445 // TODO
8446
8447 /* Pointers: NULL and non-NULL. */
8448 // TODO
8449
8450 /* Pointers: non-NULL and non-NULL: ptr to a local. */
8451 {
808f4dfe 8452 region_model model0 (&mgr);
c0d8a64e 8453 model0.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
757bf1df
DM
8454 model0.set_value (model0.get_lvalue (p, NULL),
8455 model0.get_rvalue (addr_of_a, NULL), NULL);
8456
8457 region_model model1 (model0);
8458 ASSERT_EQ (model0, model1);
8459
8460 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
8461 region_model merged (&mgr);
8462 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
8463 ASSERT_EQ (model0, merged);
8464 }
8465
8466 /* Pointers: non-NULL and non-NULL: ptr to a global. */
8467 {
808f4dfe 8468 region_model merged (&mgr);
757bf1df 8469 /* p == &y in both input models. */
808f4dfe 8470 const svalue *merged_p_sval;
757bf1df
DM
8471 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
8472 &merged_p_sval);
8473
8474 /* We should get p == &y in the merged model. */
8475 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
808f4dfe
DM
8476 const region_svalue *merged_p_ptr
8477 = merged_p_sval->dyn_cast_region_svalue ();
8478 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
8479 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
757bf1df
DM
8480 }
8481
8482 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
8483 {
808f4dfe
DM
8484 region_model merged (&mgr);
8485 /* x == &y vs x == &z in the input models; these are actually casts
8486 of the ptrs to "int". */
8487 const svalue *merged_x_sval;
8488 // TODO:
757bf1df
DM
8489 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
8490 &merged_x_sval);
8491
8492 /* We should get x == unknown in the merged model. */
8493 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
8494 }
8495
8496 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
8497 {
8498 test_region_model_context ctxt;
808f4dfe 8499 region_model model0 (&mgr);
9a2c9579 8500 tree size = build_int_cst (size_type_node, 1024);
808f4dfe 8501 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
b9365b93 8502 const region *new_reg
ce917b04 8503 = model0.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe 8504 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
757bf1df 8505 model0.set_value (model0.get_lvalue (p, &ctxt),
808f4dfe 8506 ptr_sval, &ctxt);
757bf1df
DM
8507
8508 region_model model1 (model0);
8509
8510 ASSERT_EQ (model0, model1);
8511
808f4dfe
DM
8512 region_model merged (&mgr);
8513 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 8514
808f4dfe 8515 /* The merged model ought to be identical. */
757bf1df
DM
8516 ASSERT_EQ (model0, merged);
8517 }
8518
808f4dfe
DM
8519 /* Two regions sharing the same placeholder svalue should continue sharing
8520 it after self-merger. */
757bf1df
DM
8521 {
8522 test_region_model_context ctxt;
808f4dfe 8523 region_model model0 (&mgr);
9d804f9b
DM
8524 placeholder_svalue placeholder_sval (mgr.alloc_symbol_id (),
8525 integer_type_node, "test");
808f4dfe
DM
8526 model0.set_value (model0.get_lvalue (x, &ctxt),
8527 &placeholder_sval, &ctxt);
8528 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
757bf1df
DM
8529 region_model model1 (model0);
8530
8531 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
8532 region_model merged (&mgr);
8533 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
8534 ASSERT_EQ (model0, merged);
8535
8536 /* In particular, we should have x == y. */
8537 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
8538 tristate (tristate::TS_TRUE));
8539 }
8540
757bf1df 8541 {
808f4dfe
DM
8542 region_model model0 (&mgr);
8543 region_model model1 (&mgr);
757bf1df
DM
8544 test_region_model_context ctxt;
8545 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
8546 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
808f4dfe
DM
8547 region_model merged (&mgr);
8548 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
8549 }
8550
8551 {
808f4dfe
DM
8552 region_model model0 (&mgr);
8553 region_model model1 (&mgr);
757bf1df
DM
8554 test_region_model_context ctxt;
8555 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
8556 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
8557 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe
DM
8558 region_model merged (&mgr);
8559 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 8560 }
757bf1df
DM
8561
8562 // TODO: what can't we merge? need at least one such test
8563
8564 /* TODO: various things
8565 - heap regions
8566 - value merging:
8567 - every combination, but in particular
808f4dfe 8568 - pairs of regions
757bf1df
DM
8569 */
8570
8571 /* Views. */
8572 {
8573 test_region_model_context ctxt;
808f4dfe 8574 region_model model0 (&mgr);
757bf1df 8575
808f4dfe
DM
8576 const region *x_reg = model0.get_lvalue (x, &ctxt);
8577 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
757bf1df
DM
8578 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
8579
8580 region_model model1 (model0);
8581 ASSERT_EQ (model1, model0);
8582
8583 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
8584 region_model merged (&mgr);
8585 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 8586 }
455f58ec
DM
8587
8588 /* Verify that we can merge a model in which a local in an older stack
8589 frame points to a local in a more recent stack frame. */
8590 {
808f4dfe 8591 region_model model0 (&mgr);
c0d8a64e 8592 model0.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
808f4dfe 8593 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
455f58ec
DM
8594
8595 /* Push a second frame. */
808f4dfe 8596 const region *reg_2nd_frame
c0d8a64e 8597 = model0.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
455f58ec
DM
8598
8599 /* Have a pointer in the older frame point to a local in the
8600 more recent frame. */
808f4dfe
DM
8601 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
8602 model0.set_value (q_in_first_frame, sval_ptr, NULL);
455f58ec
DM
8603
8604 /* Verify that it's pointing at the newer frame. */
5932dd35 8605 const region *reg_pointee = sval_ptr->maybe_get_region ();
808f4dfe 8606 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
455f58ec 8607
808f4dfe 8608 model0.canonicalize ();
455f58ec
DM
8609
8610 region_model model1 (model0);
8611 ASSERT_EQ (model0, model1);
8612
8613 /* They should be mergeable, and the result should be the same
8614 (after canonicalization, at least). */
808f4dfe
DM
8615 region_model merged (&mgr);
8616 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8617 merged.canonicalize ();
455f58ec
DM
8618 ASSERT_EQ (model0, merged);
8619 }
8620
8621 /* Verify that we can merge a model in which a local points to a global. */
8622 {
808f4dfe 8623 region_model model0 (&mgr);
c0d8a64e 8624 model0.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
455f58ec
DM
8625 model0.set_value (model0.get_lvalue (q, NULL),
8626 model0.get_rvalue (addr_of_y, NULL), NULL);
8627
455f58ec
DM
8628 region_model model1 (model0);
8629 ASSERT_EQ (model0, model1);
8630
8631 /* They should be mergeable, and the result should be the same
8632 (after canonicalization, at least). */
808f4dfe
DM
8633 region_model merged (&mgr);
8634 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
455f58ec
DM
8635 ASSERT_EQ (model0, merged);
8636 }
757bf1df
DM
8637}
8638
8639/* Verify that constraints are correctly merged when merging region_model
8640 instances. */
8641
8642static void
8643test_constraint_merging ()
8644{
59067ddf 8645 tree int_0 = integer_zero_node;
757bf1df
DM
8646 tree int_5 = build_int_cst (integer_type_node, 5);
8647 tree x = build_global_decl ("x", integer_type_node);
8648 tree y = build_global_decl ("y", integer_type_node);
8649 tree z = build_global_decl ("z", integer_type_node);
8650 tree n = build_global_decl ("n", integer_type_node);
8651
808f4dfe 8652 region_model_manager mgr;
757bf1df
DM
8653 test_region_model_context ctxt;
8654
8655 /* model0: 0 <= (x == y) < n. */
808f4dfe 8656 region_model model0 (&mgr);
757bf1df
DM
8657 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
8658 model0.add_constraint (x, GE_EXPR, int_0, NULL);
8659 model0.add_constraint (x, LT_EXPR, n, NULL);
8660
8661 /* model1: z != 5 && (0 <= x < n). */
808f4dfe 8662 region_model model1 (&mgr);
757bf1df
DM
8663 model1.add_constraint (z, NE_EXPR, int_5, NULL);
8664 model1.add_constraint (x, GE_EXPR, int_0, NULL);
8665 model1.add_constraint (x, LT_EXPR, n, NULL);
8666
8667 /* They should be mergeable; the merged constraints should
8668 be: (0 <= x < n). */
bb8e93eb 8669 program_point point (program_point::origin (mgr));
808f4dfe
DM
8670 region_model merged (&mgr);
8671 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
8672
8673 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
8674 tristate (tristate::TS_TRUE));
8675 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
8676 tristate (tristate::TS_TRUE));
8677
8678 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
8679 tristate (tristate::TS_UNKNOWN));
8680 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
8681 tristate (tristate::TS_UNKNOWN));
8682}
8683
808f4dfe
DM
8684/* Verify that widening_svalue::eval_condition_without_cm works as
8685 expected. */
8686
8687static void
8688test_widening_constraints ()
8689{
bb8e93eb 8690 region_model_manager mgr;
e6fe02d8 8691 function_point point (program_point::origin (mgr).get_function_point ());
59067ddf 8692 tree int_0 = integer_zero_node;
808f4dfe 8693 tree int_m1 = build_int_cst (integer_type_node, -1);
59067ddf 8694 tree int_1 = integer_one_node;
808f4dfe 8695 tree int_256 = build_int_cst (integer_type_node, 256);
808f4dfe
DM
8696 test_region_model_context ctxt;
8697 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
8698 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
8699 const svalue *w_zero_then_one_sval
8700 = mgr.get_or_create_widening_svalue (integer_type_node, point,
8701 int_0_sval, int_1_sval);
8702 const widening_svalue *w_zero_then_one
8703 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
8704 ASSERT_EQ (w_zero_then_one->get_direction (),
8705 widening_svalue::DIR_ASCENDING);
8706 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
8707 tristate::TS_FALSE);
8708 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
8709 tristate::TS_FALSE);
8710 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
8711 tristate::TS_UNKNOWN);
8712 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
8713 tristate::TS_UNKNOWN);
8714
8715 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
8716 tristate::TS_FALSE);
8717 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
8718 tristate::TS_UNKNOWN);
8719 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
8720 tristate::TS_UNKNOWN);
8721 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
8722 tristate::TS_UNKNOWN);
8723
8724 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
8725 tristate::TS_TRUE);
8726 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
8727 tristate::TS_UNKNOWN);
8728 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
8729 tristate::TS_UNKNOWN);
8730 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
8731 tristate::TS_UNKNOWN);
8732
8733 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
8734 tristate::TS_TRUE);
8735 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
8736 tristate::TS_TRUE);
8737 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
8738 tristate::TS_UNKNOWN);
8739 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
8740 tristate::TS_UNKNOWN);
8741
8742 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
8743 tristate::TS_FALSE);
8744 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
8745 tristate::TS_UNKNOWN);
8746 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
8747 tristate::TS_UNKNOWN);
8748 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
8749 tristate::TS_UNKNOWN);
8750
8751 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
8752 tristate::TS_TRUE);
8753 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
8754 tristate::TS_UNKNOWN);
8755 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
8756 tristate::TS_UNKNOWN);
8757 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
8758 tristate::TS_UNKNOWN);
8759}
8760
8761/* Verify merging constraints for states simulating successive iterations
8762 of a loop.
8763 Simulate:
8764 for (i = 0; i < 256; i++)
8765 [...body...]
8766 i.e. this gimple:.
8767 i_15 = 0;
8768 goto <bb 4>;
8769
8770 <bb 4> :
8771 i_11 = PHI <i_15(2), i_23(3)>
8772 if (i_11 <= 255)
8773 goto <bb 3>;
8774 else
8775 goto [AFTER LOOP]
8776
8777 <bb 3> :
8778 [LOOP BODY]
8779 i_23 = i_11 + 1;
8780
8781 and thus these ops (and resultant states):
8782 i_11 = PHI()
8783 {i_11: 0}
8784 add_constraint (i_11 <= 255) [for the true edge]
8785 {i_11: 0} [constraint was a no-op]
8786 i_23 = i_11 + 1;
8787 {i_22: 1}
8788 i_11 = PHI()
8789 {i_11: WIDENED (at phi, 0, 1)}
8790 add_constraint (i_11 <= 255) [for the true edge]
8791 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
8792 i_23 = i_11 + 1;
8793 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
8794 i_11 = PHI(); merge with state at phi above
8795 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
8796 [changing meaning of "WIDENED" here]
8797 if (i_11 <= 255)
8798 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
8799 F: {i_11: 256}
8800 */
8801
8802static void
8803test_iteration_1 ()
8804{
bb8e93eb
DM
8805 region_model_manager mgr;
8806 program_point point (program_point::origin (mgr));
808f4dfe 8807
59067ddf
JJ
8808 tree int_0 = integer_zero_node;
8809 tree int_1 = integer_one_node;
808f4dfe 8810 tree int_256 = build_int_cst (integer_type_node, 256);
808f4dfe
DM
8811 tree i = build_global_decl ("i", integer_type_node);
8812
808f4dfe
DM
8813 test_region_model_context ctxt;
8814
8815 /* model0: i: 0. */
8816 region_model model0 (&mgr);
8817 model0.set_value (i, int_0, &ctxt);
8818
8819 /* model1: i: 1. */
8820 region_model model1 (&mgr);
8821 model1.set_value (i, int_1, &ctxt);
8822
8823 /* Should merge "i" to a widened value. */
8824 region_model model2 (&mgr);
8825 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
8826 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
8827 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
8828 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
8829 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
8830
8831 /* Add constraint: i < 256 */
8832 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
8833 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
8834 tristate (tristate::TS_TRUE));
8835 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
8836 tristate (tristate::TS_TRUE));
8837
8838 /* Try merging with the initial state. */
8839 region_model model3 (&mgr);
8840 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
8841 /* Merging the merged value with the initial value should be idempotent,
8842 so that the analysis converges. */
8843 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
8844 /* Merger of 0 and a widening value with constraint < CST
8845 should retain the constraint, even though it was implicit
8846 for the 0 case. */
8847 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
8848 tristate (tristate::TS_TRUE));
8849 /* ...and we should have equality: the analysis should have converged. */
8850 ASSERT_EQ (model3, model2);
8851
8852 /* "i_23 = i_11 + 1;" */
8853 region_model model4 (model3);
8854 ASSERT_EQ (model4, model2);
8855 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
8856 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
8857 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
8858
8859 /* Try merging with the "i: 1" state. */
8860 region_model model5 (&mgr);
8861 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
8862 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
8863 ASSERT_EQ (model5, model4);
8864
8865 /* "i_11 = PHI();" merge with state at phi above.
8866 For i, we should have a merger of WIDENING with WIDENING + 1,
8867 and this should be WIDENING again. */
8868 region_model model6 (&mgr);
8869 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
8870 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
8871 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
808f4dfe
DM
8872}
8873
6969ac30
DM
8874/* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
8875 all cast pointers to that region are also known to be non-NULL. */
8876
8877static void
8878test_malloc_constraints ()
8879{
808f4dfe
DM
8880 region_model_manager mgr;
8881 region_model model (&mgr);
6969ac30
DM
8882 tree p = build_global_decl ("p", ptr_type_node);
8883 tree char_star = build_pointer_type (char_type_node);
8884 tree q = build_global_decl ("q", char_star);
8885 tree null_ptr = build_int_cst (ptr_type_node, 0);
8886
808f4dfe 8887 const svalue *size_in_bytes
9a2c9579 8888 = mgr.get_or_create_unknown_svalue (size_type_node);
ce917b04
DM
8889 const region *reg
8890 = model.get_or_create_region_for_heap_alloc (size_in_bytes, NULL);
808f4dfe
DM
8891 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
8892 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
6969ac30
DM
8893 model.set_value (q, p, NULL);
8894
6969ac30
DM
8895 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
8896 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
8897 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
8898 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
8899
8900 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
8901
6969ac30
DM
8902 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
8903 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
8904 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
8905 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
8906}
8907
808f4dfe
DM
8908/* Smoketest of getting and setting the value of a variable. */
8909
8910static void
8911test_var ()
8912{
8913 /* "int i;" */
8914 tree i = build_global_decl ("i", integer_type_node);
8915
8916 tree int_17 = build_int_cst (integer_type_node, 17);
8917 tree int_m3 = build_int_cst (integer_type_node, -3);
8918
8919 region_model_manager mgr;
8920 region_model model (&mgr);
8921
8922 const region *i_reg = model.get_lvalue (i, NULL);
8923 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
8924
8925 /* Reading "i" should give a symbolic "initial value". */
8926 const svalue *sval_init = model.get_rvalue (i, NULL);
8927 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
8928 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
8929 /* ..and doing it again should give the same "initial value". */
8930 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
8931
8932 /* "i = 17;". */
8933 model.set_value (i, int_17, NULL);
8934 ASSERT_EQ (model.get_rvalue (i, NULL),
8935 model.get_rvalue (int_17, NULL));
8936
8937 /* "i = -3;". */
8938 model.set_value (i, int_m3, NULL);
8939 ASSERT_EQ (model.get_rvalue (i, NULL),
8940 model.get_rvalue (int_m3, NULL));
8941
8942 /* Verify get_offset for "i". */
8943 {
7a6564c9 8944 region_offset offset = i_reg->get_offset (&mgr);
808f4dfe
DM
8945 ASSERT_EQ (offset.get_base_region (), i_reg);
8946 ASSERT_EQ (offset.get_bit_offset (), 0);
8947 }
8948}
8949
8950static void
8951test_array_2 ()
8952{
8953 /* "int arr[10];" */
8954 tree tlen = size_int (10);
8955 tree arr_type
8956 = build_array_type (integer_type_node, build_index_type (tlen));
8957 tree arr = build_global_decl ("arr", arr_type);
8958
8959 /* "int i;" */
8960 tree i = build_global_decl ("i", integer_type_node);
8961
59067ddf
JJ
8962 tree int_0 = integer_zero_node;
8963 tree int_1 = integer_one_node;
808f4dfe
DM
8964
8965 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
8966 arr, int_0, NULL_TREE, NULL_TREE);
8967 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
8968 arr, int_1, NULL_TREE, NULL_TREE);
8969 tree arr_i = build4 (ARRAY_REF, integer_type_node,
8970 arr, i, NULL_TREE, NULL_TREE);
8971
8972 tree int_17 = build_int_cst (integer_type_node, 17);
8973 tree int_42 = build_int_cst (integer_type_node, 42);
8974 tree int_m3 = build_int_cst (integer_type_node, -3);
8975
8976 region_model_manager mgr;
8977 region_model model (&mgr);
8978 /* "arr[0] = 17;". */
8979 model.set_value (arr_0, int_17, NULL);
8980 /* "arr[1] = -3;". */
8981 model.set_value (arr_1, int_m3, NULL);
8982
8983 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
8984 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
8985
8986 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
8987 model.set_value (arr_1, int_42, NULL);
8988 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
8989
8990 /* Verify get_offset for "arr[0]". */
8991 {
8992 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
7a6564c9 8993 region_offset offset = arr_0_reg->get_offset (&mgr);
808f4dfe
DM
8994 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
8995 ASSERT_EQ (offset.get_bit_offset (), 0);
8996 }
8997
8998 /* Verify get_offset for "arr[1]". */
8999 {
9000 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
7a6564c9 9001 region_offset offset = arr_1_reg->get_offset (&mgr);
808f4dfe
DM
9002 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
9003 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
9004 }
9005
7a6564c9
TL
9006 /* Verify get_offset for "arr[i]". */
9007 {
9008 const region *arr_i_reg = model.get_lvalue (arr_i, NULL);
9009 region_offset offset = arr_i_reg->get_offset (&mgr);
9010 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
1579394c
DM
9011 const svalue *offset_sval = offset.get_symbolic_byte_offset ();
9012 if (const svalue *cast = offset_sval->maybe_undo_cast ())
9013 offset_sval = cast;
9014 ASSERT_EQ (offset_sval->get_kind (), SK_BINOP);
7a6564c9
TL
9015 }
9016
808f4dfe
DM
9017 /* "arr[i] = i;" - this should remove the earlier bindings. */
9018 model.set_value (arr_i, i, NULL);
9019 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
9020 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
9021
9022 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
9023 model.set_value (arr_0, int_17, NULL);
9024 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
9025 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
9026}
9027
9028/* Smoketest of dereferencing a pointer via MEM_REF. */
9029
9030static void
9031test_mem_ref ()
9032{
9033 /*
9034 x = 17;
9035 p = &x;
9036 *p;
9037 */
9038 tree x = build_global_decl ("x", integer_type_node);
9039 tree int_star = build_pointer_type (integer_type_node);
9040 tree p = build_global_decl ("p", int_star);
9041
9042 tree int_17 = build_int_cst (integer_type_node, 17);
9043 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
c87f1f3d
JJ
9044 tree ptype = build_pointer_type_for_mode (char_type_node, ptr_mode, true);
9045 tree offset_0 = build_int_cst (ptype, 0);
808f4dfe
DM
9046 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
9047
9048 region_model_manager mgr;
9049 region_model model (&mgr);
9050
9051 /* "x = 17;". */
9052 model.set_value (x, int_17, NULL);
9053
9054 /* "p = &x;". */
9055 model.set_value (p, addr_of_x, NULL);
9056
9057 const svalue *sval = model.get_rvalue (star_p, NULL);
9058 ASSERT_EQ (sval->maybe_get_constant (), int_17);
9059}
9060
9061/* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
9062 Analogous to this code:
9063 void test_6 (int a[10])
9064 {
9065 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
9066 a[3] = 42;
9067 __analyzer_eval (a[3] == 42); [should be TRUE]
9068 }
9069 from data-model-1.c, which looks like this at the gimple level:
9070 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
9071 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
9072 int _2 = *_1; # MEM_REF
9073 _Bool _3 = _2 == 42;
9074 int _4 = (int) _3;
9075 __analyzer_eval (_4);
9076
9077 # a[3] = 42;
9078 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
9079 *_5 = 42; # MEM_REF
9080
9081 # __analyzer_eval (a[3] == 42); [should be TRUE]
9082 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
9083 int _7 = *_6; # MEM_REF
9084 _Bool _8 = _7 == 42;
9085 int _9 = (int) _8;
9086 __analyzer_eval (_9); */
9087
9088static void
9089test_POINTER_PLUS_EXPR_then_MEM_REF ()
9090{
9091 tree int_star = build_pointer_type (integer_type_node);
9092 tree a = build_global_decl ("a", int_star);
9093 tree offset_12 = build_int_cst (size_type_node, 12);
9094 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
c87f1f3d
JJ
9095 tree ptype = build_pointer_type_for_mode (char_type_node, ptr_mode, true);
9096 tree offset_0 = build_int_cst (ptype, 0);
808f4dfe
DM
9097 tree mem_ref = build2 (MEM_REF, integer_type_node,
9098 pointer_plus_expr, offset_0);
9099 region_model_manager mgr;
9100 region_model m (&mgr);
9101
9102 tree int_42 = build_int_cst (integer_type_node, 42);
9103 m.set_value (mem_ref, int_42, NULL);
9104 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
9105}
9106
9107/* Verify that malloc works. */
9108
9109static void
9110test_malloc ()
9111{
9112 tree int_star = build_pointer_type (integer_type_node);
9113 tree p = build_global_decl ("p", int_star);
9114 tree n = build_global_decl ("n", integer_type_node);
9115 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
9116 n, build_int_cst (size_type_node, 4));
9117
9118 region_model_manager mgr;
9119 test_region_model_context ctxt;
9120 region_model model (&mgr);
9121
9122 /* "p = malloc (n * 4);". */
9123 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
ce917b04
DM
9124 const region *reg
9125 = model.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe
DM
9126 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
9127 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 9128 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
9129}
9130
9131/* Verify that alloca works. */
9132
9133static void
9134test_alloca ()
9135{
9136 auto_vec <tree> param_types;
9137 tree fndecl = make_fndecl (integer_type_node,
9138 "test_fn",
9139 param_types);
9140 allocate_struct_function (fndecl, true);
9141
9142
9143 tree int_star = build_pointer_type (integer_type_node);
9144 tree p = build_global_decl ("p", int_star);
9145 tree n = build_global_decl ("n", integer_type_node);
9146 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
9147 n, build_int_cst (size_type_node, 4));
9148
9149 region_model_manager mgr;
9150 test_region_model_context ctxt;
9151 region_model model (&mgr);
9152
9153 /* Push stack frame. */
9154 const region *frame_reg
c0d8a64e 9155 = model.push_frame (*DECL_STRUCT_FUNCTION (fndecl),
808f4dfe
DM
9156 NULL, &ctxt);
9157 /* "p = alloca (n * 4);". */
9158 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
b9365b93 9159 const region *reg = model.create_region_for_alloca (size_sval, &ctxt);
808f4dfe
DM
9160 ASSERT_EQ (reg->get_parent_region (), frame_reg);
9161 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
9162 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 9163 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
9164
9165 /* Verify that the pointers to the alloca region are replaced by
9166 poisoned values when the frame is popped. */
9167 model.pop_frame (NULL, NULL, &ctxt);
33255ad3 9168 ASSERT_EQ (model.get_rvalue (p, NULL)->get_kind (), SK_POISONED);
808f4dfe
DM
9169}
9170
71fc4655
DM
9171/* Verify that svalue::involves_p works. */
9172
9173static void
9174test_involves_p ()
9175{
9176 region_model_manager mgr;
9177 tree int_star = build_pointer_type (integer_type_node);
9178 tree p = build_global_decl ("p", int_star);
9179 tree q = build_global_decl ("q", int_star);
9180
9181 test_region_model_context ctxt;
9182 region_model model (&mgr);
9183 const svalue *p_init = model.get_rvalue (p, &ctxt);
9184 const svalue *q_init = model.get_rvalue (q, &ctxt);
9185
9186 ASSERT_TRUE (p_init->involves_p (p_init));
9187 ASSERT_FALSE (p_init->involves_p (q_init));
9188
9189 const region *star_p_reg = mgr.get_symbolic_region (p_init);
9190 const region *star_q_reg = mgr.get_symbolic_region (q_init);
9191
9192 const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
9193 const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
9194
9195 ASSERT_TRUE (init_star_p->involves_p (p_init));
9196 ASSERT_FALSE (p_init->involves_p (init_star_p));
9197 ASSERT_FALSE (init_star_p->involves_p (q_init));
9198 ASSERT_TRUE (init_star_q->involves_p (q_init));
9199 ASSERT_FALSE (init_star_q->involves_p (p_init));
9200}
9201
757bf1df
DM
9202/* Run all of the selftests within this file. */
9203
9204void
9205analyzer_region_model_cc_tests ()
9206{
8c08c983 9207 test_tree_cmp_on_constants ();
757bf1df 9208 test_dump ();
808f4dfe
DM
9209 test_struct ();
9210 test_array_1 ();
90f7c300 9211 test_get_representative_tree ();
757bf1df 9212 test_unique_constants ();
808f4dfe
DM
9213 test_unique_unknowns ();
9214 test_initial_svalue_folding ();
9215 test_unaryop_svalue_folding ();
9216 test_binop_svalue_folding ();
9217 test_sub_svalue_folding ();
f09b9955 9218 test_bits_within_svalue_folding ();
808f4dfe 9219 test_descendent_of_p ();
391512ad 9220 test_bit_range_regions ();
757bf1df 9221 test_assignment ();
a96f1c38 9222 test_compound_assignment ();
757bf1df
DM
9223 test_stack_frames ();
9224 test_get_representative_path_var ();
808f4dfe 9225 test_equality_1 ();
757bf1df
DM
9226 test_canonicalization_2 ();
9227 test_canonicalization_3 ();
8c08c983 9228 test_canonicalization_4 ();
757bf1df
DM
9229 test_state_merging ();
9230 test_constraint_merging ();
808f4dfe
DM
9231 test_widening_constraints ();
9232 test_iteration_1 ();
6969ac30 9233 test_malloc_constraints ();
808f4dfe
DM
9234 test_var ();
9235 test_array_2 ();
9236 test_mem_ref ();
9237 test_POINTER_PLUS_EXPR_then_MEM_REF ();
9238 test_malloc ();
9239 test_alloca ();
71fc4655 9240 test_involves_p ();
757bf1df
DM
9241}
9242
9243} // namespace selftest
9244
9245#endif /* CHECKING_P */
9246
75038aa6
DM
9247} // namespace ana
9248
757bf1df 9249#endif /* #if ENABLE_ANALYZER */