]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/analyzer/region-model.cc
tree-cfg: Make the verifier returns_twice message translatable
[thirdparty/gcc.git] / gcc / analyzer / region-model.cc
CommitLineData
757bf1df 1/* Classes for modeling the state of memory.
a945c346 2 Copyright (C) 2019-2024 Free Software Foundation, Inc.
757bf1df
DM
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
c81b60b8 22#define INCLUDE_MEMORY
4e1e8752 23#define INCLUDE_ALGORITHM
757bf1df
DM
24#include "system.h"
25#include "coretypes.h"
6341f14e 26#include "make-unique.h"
757bf1df
DM
27#include "tree.h"
28#include "function.h"
29#include "basic-block.h"
30#include "gimple.h"
31#include "gimple-iterator.h"
7892ff37 32#include "diagnostic-core.h"
757bf1df
DM
33#include "graphviz.h"
34#include "options.h"
35#include "cgraph.h"
36#include "tree-dfa.h"
37#include "stringpool.h"
38#include "convert.h"
39#include "target.h"
40#include "fold-const.h"
41#include "tree-pretty-print.h"
42#include "diagnostic-color.h"
ef7827b0 43#include "bitmap.h"
757bf1df 44#include "selftest.h"
757bf1df
DM
45#include "analyzer/analyzer.h"
46#include "analyzer/analyzer-logging.h"
47#include "ordered-hash-map.h"
48#include "options.h"
49#include "cgraph.h"
50#include "cfg.h"
757bf1df
DM
51#include "analyzer/supergraph.h"
52#include "sbitmap.h"
808f4dfe
DM
53#include "analyzer/call-string.h"
54#include "analyzer/program-point.h"
55#include "analyzer/store.h"
757bf1df
DM
56#include "analyzer/region-model.h"
57#include "analyzer/constraint-manager.h"
58#include "diagnostic-event-id.h"
59#include "analyzer/sm.h"
60#include "diagnostic-event-id.h"
61#include "analyzer/sm.h"
62#include "analyzer/pending-diagnostic.h"
808f4dfe 63#include "analyzer/region-model-reachability.h"
757bf1df 64#include "analyzer/analyzer-selftests.h"
f573d351 65#include "analyzer/program-state.h"
bfca9505 66#include "analyzer/call-summary.h"
884d9141 67#include "stor-layout.h"
c7e276b8 68#include "attribs.h"
9a2c9579 69#include "tree-object-size.h"
1e2fe671
DM
70#include "gimple-ssa.h"
71#include "tree-phinodes.h"
72#include "tree-ssa-operands.h"
73#include "ssa-iterators.h"
5fbcbcaf 74#include "calls.h"
e6c3bb37 75#include "is-a.h"
c81b60b8 76#include "gcc-rich-location.h"
f5758fe5
DM
77#include "analyzer/checker-event.h"
78#include "analyzer/checker-path.h"
b03a10b0 79#include "analyzer/feasible-graph.h"
37e1634e 80#include "analyzer/record-layout.h"
12b67d1e 81#include "diagnostic-format-sarif.h"
757bf1df
DM
82
83#if ENABLE_ANALYZER
84
75038aa6
DM
85namespace ana {
86
597b9ec6
EF
87auto_vec<pop_frame_callback> region_model::pop_frame_callbacks;
88
757bf1df
DM
89/* Dump T to PP in language-independent form, for debugging/logging/dumping
90 purposes. */
91
757bf1df 92void
808f4dfe 93dump_tree (pretty_printer *pp, tree t)
757bf1df 94{
808f4dfe 95 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
757bf1df
DM
96}
97
808f4dfe
DM
98/* Dump T to PP in language-independent form in quotes, for
99 debugging/logging/dumping purposes. */
757bf1df
DM
100
101void
808f4dfe 102dump_quoted_tree (pretty_printer *pp, tree t)
757bf1df 103{
808f4dfe
DM
104 pp_begin_quote (pp, pp_show_color (pp));
105 dump_tree (pp, t);
106 pp_end_quote (pp, pp_show_color (pp));
757bf1df
DM
107}
108
808f4dfe
DM
109/* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
110 calls within other pp_printf calls.
757bf1df 111
808f4dfe
DM
112 default_tree_printer handles 'T' and some other codes by calling
113 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
114 dump_generic_node calls pp_printf in various places, leading to
115 garbled output.
757bf1df 116
808f4dfe
DM
117 Ideally pp_printf could be made to be reentrant, but in the meantime
118 this function provides a workaround. */
6969ac30
DM
119
120void
808f4dfe 121print_quoted_type (pretty_printer *pp, tree t)
6969ac30 122{
1579394c
DM
123 if (!t)
124 return;
808f4dfe
DM
125 pp_begin_quote (pp, pp_show_color (pp));
126 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
127 pp_end_quote (pp, pp_show_color (pp));
6969ac30
DM
128}
129
1579394c
DM
130/* Print EXPR to PP, without quotes.
131 For use within svalue::maybe_print_for_user
132 and region::maybe_print_for_user. */
133
134void
135print_expr_for_user (pretty_printer *pp, tree expr)
136{
137 /* Workaround for C++'s lang_hooks.decl_printable_name,
138 which unhelpfully (for us) prefixes the decl with its
139 type. */
140 if (DECL_P (expr))
141 dump_generic_node (pp, expr, 0, TDF_SLIM, 0);
142 else
143 pp_printf (pp, "%E", expr);
144}
145
d726a57b
DM
146/* class region_to_value_map. */
147
148/* Assignment operator for region_to_value_map. */
149
150region_to_value_map &
151region_to_value_map::operator= (const region_to_value_map &other)
152{
153 m_hash_map.empty ();
154 for (auto iter : other.m_hash_map)
155 {
156 const region *reg = iter.first;
157 const svalue *sval = iter.second;
158 m_hash_map.put (reg, sval);
159 }
160 return *this;
161}
162
163/* Equality operator for region_to_value_map. */
164
165bool
166region_to_value_map::operator== (const region_to_value_map &other) const
167{
168 if (m_hash_map.elements () != other.m_hash_map.elements ())
169 return false;
170
171 for (auto iter : *this)
172 {
173 const region *reg = iter.first;
174 const svalue *sval = iter.second;
175 const svalue * const *other_slot = other.get (reg);
176 if (other_slot == NULL)
177 return false;
178 if (sval != *other_slot)
179 return false;
180 }
181
182 return true;
183}
184
185/* Dump this object to PP. */
186
187void
188region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
189 bool multiline) const
190{
191 auto_vec<const region *> regs;
192 for (iterator iter = begin (); iter != end (); ++iter)
193 regs.safe_push ((*iter).first);
194 regs.qsort (region::cmp_ptr_ptr);
195 if (multiline)
196 pp_newline (pp);
197 else
198 pp_string (pp, " {");
199 unsigned i;
200 const region *reg;
201 FOR_EACH_VEC_ELT (regs, i, reg)
202 {
203 if (multiline)
204 pp_string (pp, " ");
205 else if (i > 0)
206 pp_string (pp, ", ");
207 reg->dump_to_pp (pp, simple);
208 pp_string (pp, ": ");
209 const svalue *sval = *get (reg);
210 sval->dump_to_pp (pp, true);
211 if (multiline)
212 pp_newline (pp);
213 }
214 if (!multiline)
215 pp_string (pp, "}");
216}
217
218/* Dump this object to stderr. */
219
220DEBUG_FUNCTION void
221region_to_value_map::dump (bool simple) const
222{
223 pretty_printer pp;
224 pp_format_decoder (&pp) = default_tree_printer;
225 pp_show_color (&pp) = pp_show_color (global_dc->printer);
226 pp.buffer->stream = stderr;
227 dump_to_pp (&pp, simple, true);
228 pp_newline (&pp);
229 pp_flush (&pp);
230}
231
7abc7aae
DM
232/* Generate a JSON value for this region_to_value_map.
233 This is intended for debugging the analyzer rather than
234 serialization. */
235
236json::object *
237region_to_value_map::to_json () const
238{
239 json::object *map_obj = new json::object ();
240
241 auto_vec<const region *> regs;
242 for (iterator iter = begin (); iter != end (); ++iter)
243 regs.safe_push ((*iter).first);
244 regs.qsort (region::cmp_ptr_ptr);
245
246 unsigned i;
247 const region *reg;
248 FOR_EACH_VEC_ELT (regs, i, reg)
249 {
250 label_text reg_desc = reg->get_desc ();
251 const svalue *sval = *get (reg);
252 map_obj->set (reg_desc.get (), sval->to_json ());
253 }
254
255 return map_obj;
256}
d726a57b
DM
257
258/* Attempt to merge THIS with OTHER, writing the result
259 to OUT.
260
261 For now, write (region, value) mappings that are in common between THIS
ce917b04
DM
262 and OTHER to OUT, effectively taking the intersection.
263
264 Reject merger of different values. */
d726a57b
DM
265
266bool
267region_to_value_map::can_merge_with_p (const region_to_value_map &other,
268 region_to_value_map *out) const
269{
270 for (auto iter : *this)
271 {
272 const region *iter_reg = iter.first;
273 const svalue *iter_sval = iter.second;
274 const svalue * const * other_slot = other.get (iter_reg);
275 if (other_slot)
ce917b04
DM
276 {
277 if (iter_sval == *other_slot)
278 out->put (iter_reg, iter_sval);
279 else
280 return false;
281 }
d726a57b
DM
282 }
283 return true;
284}
285
33255ad3
DM
286/* Purge any state involving SVAL. */
287
288void
289region_to_value_map::purge_state_involving (const svalue *sval)
290{
291 auto_vec<const region *> to_purge;
292 for (auto iter : *this)
293 {
294 const region *iter_reg = iter.first;
295 const svalue *iter_sval = iter.second;
296 if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
297 to_purge.safe_push (iter_reg);
298 }
299 for (auto iter : to_purge)
300 m_hash_map.remove (iter);
301}
302
757bf1df
DM
303/* class region_model. */
304
808f4dfe 305/* Ctor for region_model: construct an "empty" model. */
757bf1df 306
808f4dfe 307region_model::region_model (region_model_manager *mgr)
9a2c9579
DM
308: m_mgr (mgr), m_store (), m_current_frame (NULL),
309 m_dynamic_extents ()
757bf1df 310{
808f4dfe 311 m_constraints = new constraint_manager (mgr);
757bf1df
DM
312}
313
314/* region_model's copy ctor. */
315
316region_model::region_model (const region_model &other)
808f4dfe
DM
317: m_mgr (other.m_mgr), m_store (other.m_store),
318 m_constraints (new constraint_manager (*other.m_constraints)),
9a2c9579
DM
319 m_current_frame (other.m_current_frame),
320 m_dynamic_extents (other.m_dynamic_extents)
757bf1df 321{
757bf1df
DM
322}
323
324/* region_model's dtor. */
325
326region_model::~region_model ()
327{
328 delete m_constraints;
329}
330
331/* region_model's assignment operator. */
332
333region_model &
334region_model::operator= (const region_model &other)
335{
808f4dfe
DM
336 /* m_mgr is const. */
337 gcc_assert (m_mgr == other.m_mgr);
757bf1df 338
808f4dfe 339 m_store = other.m_store;
757bf1df
DM
340
341 delete m_constraints;
808f4dfe 342 m_constraints = new constraint_manager (*other.m_constraints);
757bf1df 343
808f4dfe 344 m_current_frame = other.m_current_frame;
757bf1df 345
9a2c9579
DM
346 m_dynamic_extents = other.m_dynamic_extents;
347
757bf1df
DM
348 return *this;
349}
350
351/* Equality operator for region_model.
352
808f4dfe
DM
353 Amongst other things this directly compares the stores and the constraint
354 managers, so for this to be meaningful both this and OTHER should
757bf1df
DM
355 have been canonicalized. */
356
357bool
358region_model::operator== (const region_model &other) const
359{
808f4dfe
DM
360 /* We can only compare instances that use the same manager. */
361 gcc_assert (m_mgr == other.m_mgr);
757bf1df 362
808f4dfe 363 if (m_store != other.m_store)
757bf1df
DM
364 return false;
365
366 if (*m_constraints != *other.m_constraints)
367 return false;
368
808f4dfe
DM
369 if (m_current_frame != other.m_current_frame)
370 return false;
757bf1df 371
9a2c9579
DM
372 if (m_dynamic_extents != other.m_dynamic_extents)
373 return false;
374
757bf1df
DM
375 gcc_checking_assert (hash () == other.hash ());
376
377 return true;
378}
379
380/* Generate a hash value for this region_model. */
381
382hashval_t
808f4dfe
DM
383region_model::hash () const
384{
385 hashval_t result = m_store.hash ();
386 result ^= m_constraints->hash ();
387 return result;
757bf1df
DM
388}
389
808f4dfe
DM
390/* Dump a representation of this model to PP, showing the
391 stack, the store, and any constraints.
392 Use SIMPLE to control how svalues and regions are printed. */
757bf1df
DM
393
394void
808f4dfe
DM
395region_model::dump_to_pp (pretty_printer *pp, bool simple,
396 bool multiline) const
757bf1df 397{
808f4dfe
DM
398 /* Dump stack. */
399 pp_printf (pp, "stack depth: %i", get_stack_depth ());
400 if (multiline)
401 pp_newline (pp);
402 else
403 pp_string (pp, " {");
404 for (const frame_region *iter_frame = m_current_frame; iter_frame;
405 iter_frame = iter_frame->get_calling_frame ())
406 {
407 if (multiline)
408 pp_string (pp, " ");
409 else if (iter_frame != m_current_frame)
410 pp_string (pp, ", ");
411 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
412 iter_frame->dump_to_pp (pp, simple);
413 if (multiline)
414 pp_newline (pp);
415 }
416 if (!multiline)
417 pp_string (pp, "}");
418
419 /* Dump store. */
420 if (!multiline)
421 pp_string (pp, ", {");
422 m_store.dump_to_pp (pp, simple, multiline,
423 m_mgr->get_store_manager ());
424 if (!multiline)
425 pp_string (pp, "}");
426
427 /* Dump constraints. */
428 pp_string (pp, "constraint_manager:");
429 if (multiline)
430 pp_newline (pp);
431 else
432 pp_string (pp, " {");
433 m_constraints->dump_to_pp (pp, multiline);
434 if (!multiline)
435 pp_string (pp, "}");
9a2c9579
DM
436
437 /* Dump sizes of dynamic regions, if any are known. */
438 if (!m_dynamic_extents.is_empty ())
439 {
440 pp_string (pp, "dynamic_extents:");
441 m_dynamic_extents.dump_to_pp (pp, simple, multiline);
442 }
808f4dfe 443}
757bf1df 444
808f4dfe 445/* Dump a representation of this model to FILE. */
757bf1df 446
808f4dfe
DM
447void
448region_model::dump (FILE *fp, bool simple, bool multiline) const
449{
450 pretty_printer pp;
451 pp_format_decoder (&pp) = default_tree_printer;
452 pp_show_color (&pp) = pp_show_color (global_dc->printer);
453 pp.buffer->stream = fp;
454 dump_to_pp (&pp, simple, multiline);
455 pp_newline (&pp);
456 pp_flush (&pp);
757bf1df
DM
457}
458
808f4dfe 459/* Dump a multiline representation of this model to stderr. */
757bf1df 460
808f4dfe
DM
461DEBUG_FUNCTION void
462region_model::dump (bool simple) const
463{
464 dump (stderr, simple, true);
465}
757bf1df 466
808f4dfe 467/* Dump a multiline representation of this model to stderr. */
757bf1df 468
808f4dfe
DM
469DEBUG_FUNCTION void
470region_model::debug () const
757bf1df 471{
808f4dfe 472 dump (true);
757bf1df
DM
473}
474
7abc7aae
DM
475/* Generate a JSON value for this region_model.
476 This is intended for debugging the analyzer rather than
477 serialization. */
478
479json::object *
480region_model::to_json () const
481{
482 json::object *model_obj = new json::object ();
483 model_obj->set ("store", m_store.to_json ());
484 model_obj->set ("constraints", m_constraints->to_json ());
485 if (m_current_frame)
486 model_obj->set ("current_frame", m_current_frame->to_json ());
487 model_obj->set ("dynamic_extents", m_dynamic_extents.to_json ());
488 return model_obj;
489}
490
e61ffa20
DM
491/* Assert that this object is valid. */
492
493void
494region_model::validate () const
495{
496 m_store.validate ();
497}
498
808f4dfe
DM
499/* Canonicalize the store and constraints, to maximize the chance of
500 equality between region_model instances. */
757bf1df
DM
501
502void
808f4dfe 503region_model::canonicalize ()
757bf1df 504{
808f4dfe
DM
505 m_store.canonicalize (m_mgr->get_store_manager ());
506 m_constraints->canonicalize ();
757bf1df
DM
507}
508
509/* Return true if this region_model is in canonical form. */
510
511bool
512region_model::canonicalized_p () const
513{
514 region_model copy (*this);
808f4dfe 515 copy.canonicalize ();
757bf1df
DM
516 return *this == copy;
517}
518
808f4dfe
DM
519/* See the comment for store::loop_replay_fixup. */
520
521void
522region_model::loop_replay_fixup (const region_model *dst_state)
523{
524 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
525}
526
757bf1df
DM
527/* A subclass of pending_diagnostic for complaining about uses of
528 poisoned values. */
529
530class poisoned_value_diagnostic
531: public pending_diagnostic_subclass<poisoned_value_diagnostic>
532{
533public:
00e7d024 534 poisoned_value_diagnostic (tree expr, enum poison_kind pkind,
b03a10b0
DM
535 const region *src_region,
536 tree check_expr)
00e7d024 537 : m_expr (expr), m_pkind (pkind),
b03a10b0
DM
538 m_src_region (src_region),
539 m_check_expr (check_expr)
757bf1df
DM
540 {}
541
ff171cb1 542 const char *get_kind () const final override { return "poisoned_value_diagnostic"; }
757bf1df 543
ff171cb1 544 bool use_of_uninit_p () const final override
33255ad3
DM
545 {
546 return m_pkind == POISON_KIND_UNINIT;
547 }
548
757bf1df
DM
549 bool operator== (const poisoned_value_diagnostic &other) const
550 {
00e7d024
DM
551 return (m_expr == other.m_expr
552 && m_pkind == other.m_pkind
553 && m_src_region == other.m_src_region);
757bf1df
DM
554 }
555
ff171cb1 556 int get_controlling_option () const final override
7fd6e36e
DM
557 {
558 switch (m_pkind)
559 {
560 default:
561 gcc_unreachable ();
562 case POISON_KIND_UNINIT:
563 return OPT_Wanalyzer_use_of_uninitialized_value;
564 case POISON_KIND_FREED:
e7b26744 565 case POISON_KIND_DELETED:
7fd6e36e
DM
566 return OPT_Wanalyzer_use_after_free;
567 case POISON_KIND_POPPED_STACK:
568 return OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame;
569 }
570 }
571
8f636915
DM
572 bool terminate_path_p () const final override { return true; }
573
12b67d1e 574 bool emit (diagnostic_emission_context &ctxt) final override
757bf1df
DM
575 {
576 switch (m_pkind)
577 {
578 default:
579 gcc_unreachable ();
33255ad3
DM
580 case POISON_KIND_UNINIT:
581 {
12b67d1e
DM
582 ctxt.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
583 return ctxt.warn ("use of uninitialized value %qE",
584 m_expr);
33255ad3
DM
585 }
586 break;
757bf1df
DM
587 case POISON_KIND_FREED:
588 {
12b67d1e
DM
589 ctxt.add_cwe (416); /* "CWE-416: Use After Free". */
590 return ctxt.warn ("use after %<free%> of %qE",
591 m_expr);
757bf1df
DM
592 }
593 break;
e7b26744 594 case POISON_KIND_DELETED:
595 {
12b67d1e
DM
596 ctxt.add_cwe (416); /* "CWE-416: Use After Free". */
597 return ctxt.warn ("use after %<delete%> of %qE",
598 m_expr);
e7b26744 599 }
600 break;
757bf1df
DM
601 case POISON_KIND_POPPED_STACK:
602 {
757bf1df 603 /* TODO: which CWE? */
12b67d1e
DM
604 return ctxt.warn
605 ("dereferencing pointer %qE to within stale stack frame",
808f4dfe 606 m_expr);
757bf1df
DM
607 }
608 break;
609 }
610 }
611
ff171cb1 612 label_text describe_final_event (const evdesc::final_event &ev) final override
757bf1df
DM
613 {
614 switch (m_pkind)
615 {
616 default:
617 gcc_unreachable ();
33255ad3
DM
618 case POISON_KIND_UNINIT:
619 return ev.formatted_print ("use of uninitialized value %qE here",
620 m_expr);
757bf1df
DM
621 case POISON_KIND_FREED:
622 return ev.formatted_print ("use after %<free%> of %qE here",
623 m_expr);
e7b26744 624 case POISON_KIND_DELETED:
625 return ev.formatted_print ("use after %<delete%> of %qE here",
626 m_expr);
757bf1df
DM
627 case POISON_KIND_POPPED_STACK:
628 return ev.formatted_print
808f4dfe 629 ("dereferencing pointer %qE to within stale stack frame",
757bf1df
DM
630 m_expr);
631 }
632 }
633
ff171cb1 634 void mark_interesting_stuff (interesting_t *interest) final override
00e7d024
DM
635 {
636 if (m_src_region)
637 interest->add_region_creation (m_src_region);
638 }
639
b03a10b0
DM
640 /* Attempt to suppress false positives.
641 Reject paths where the value of the underlying region isn't poisoned.
642 This can happen due to state merging when exploring the exploded graph,
643 where the more precise analysis during feasibility analysis finds that
644 the region is in fact valid.
645 To do this we need to get the value from the fgraph. Unfortunately
646 we can't simply query the state of m_src_region (from the enode),
647 since it might be a different region in the fnode state (e.g. with
648 heap-allocated regions, the numbering could be different).
649 Hence we access m_check_expr, if available. */
650
651 bool check_valid_fpath_p (const feasible_node &fnode,
652 const gimple *emission_stmt)
653 const final override
654 {
655 if (!m_check_expr)
656 return true;
657
658 /* We've reached the enode, but not necessarily the right function_point.
659 Try to get the state at the correct stmt. */
660 region_model emission_model (fnode.get_model ().get_manager());
661 if (!fnode.get_state_at_stmt (emission_stmt, &emission_model))
662 /* Couldn't get state; accept this diagnostic. */
663 return true;
664
665 const svalue *fsval = emission_model.get_rvalue (m_check_expr, NULL);
666 /* Check to see if the expr is also poisoned in FNODE (and in the
667 same way). */
668 const poisoned_svalue * fspval = fsval->dyn_cast_poisoned_svalue ();
669 if (!fspval)
670 return false;
671 if (fspval->get_poison_kind () != m_pkind)
672 return false;
673 return true;
674 }
675
757bf1df
DM
676private:
677 tree m_expr;
678 enum poison_kind m_pkind;
00e7d024 679 const region *m_src_region;
b03a10b0 680 tree m_check_expr;
757bf1df
DM
681};
682
5e00ad3f
DM
683/* A subclass of pending_diagnostic for complaining about shifts
684 by negative counts. */
685
686class shift_count_negative_diagnostic
687: public pending_diagnostic_subclass<shift_count_negative_diagnostic>
688{
689public:
690 shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
691 : m_assign (assign), m_count_cst (count_cst)
692 {}
693
ff171cb1 694 const char *get_kind () const final override
5e00ad3f
DM
695 {
696 return "shift_count_negative_diagnostic";
697 }
698
699 bool operator== (const shift_count_negative_diagnostic &other) const
700 {
701 return (m_assign == other.m_assign
702 && same_tree_p (m_count_cst, other.m_count_cst));
703 }
704
ff171cb1 705 int get_controlling_option () const final override
7fd6e36e
DM
706 {
707 return OPT_Wanalyzer_shift_count_negative;
708 }
709
12b67d1e 710 bool emit (diagnostic_emission_context &ctxt) final override
5e00ad3f 711 {
12b67d1e 712 return ctxt.warn ("shift by negative count (%qE)", m_count_cst);
5e00ad3f
DM
713 }
714
ff171cb1 715 label_text describe_final_event (const evdesc::final_event &ev) final override
5e00ad3f
DM
716 {
717 return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst);
718 }
719
720private:
721 const gassign *m_assign;
722 tree m_count_cst;
723};
724
725/* A subclass of pending_diagnostic for complaining about shifts
726 by counts >= the width of the operand type. */
727
728class shift_count_overflow_diagnostic
729: public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
730{
731public:
732 shift_count_overflow_diagnostic (const gassign *assign,
733 int operand_precision,
734 tree count_cst)
735 : m_assign (assign), m_operand_precision (operand_precision),
736 m_count_cst (count_cst)
737 {}
738
ff171cb1 739 const char *get_kind () const final override
5e00ad3f
DM
740 {
741 return "shift_count_overflow_diagnostic";
742 }
743
744 bool operator== (const shift_count_overflow_diagnostic &other) const
745 {
746 return (m_assign == other.m_assign
747 && m_operand_precision == other.m_operand_precision
748 && same_tree_p (m_count_cst, other.m_count_cst));
749 }
750
ff171cb1 751 int get_controlling_option () const final override
7fd6e36e
DM
752 {
753 return OPT_Wanalyzer_shift_count_overflow;
754 }
755
12b67d1e 756 bool emit (diagnostic_emission_context &ctxt) final override
5e00ad3f 757 {
12b67d1e
DM
758 return ctxt.warn ("shift by count (%qE) >= precision of type (%qi)",
759 m_count_cst, m_operand_precision);
5e00ad3f
DM
760 }
761
ff171cb1 762 label_text describe_final_event (const evdesc::final_event &ev) final override
5e00ad3f
DM
763 {
764 return ev.formatted_print ("shift by count %qE here", m_count_cst);
765 }
766
767private:
768 const gassign *m_assign;
769 int m_operand_precision;
770 tree m_count_cst;
771};
772
808f4dfe
DM
773/* If ASSIGN is a stmt that can be modelled via
774 set_value (lhs_reg, SVALUE, CTXT)
775 for some SVALUE, get the SVALUE.
776 Otherwise return NULL. */
757bf1df 777
808f4dfe
DM
778const svalue *
779region_model::get_gassign_result (const gassign *assign,
780 region_model_context *ctxt)
757bf1df
DM
781{
782 tree lhs = gimple_assign_lhs (assign);
6319b5b2
DM
783
784 if (gimple_has_volatile_ops (assign)
785 && !gimple_clobber_p (assign))
786 {
787 conjured_purge p (this, ctxt);
788 return m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs),
789 assign,
790 get_lvalue (lhs, ctxt),
791 p);
792 }
793
757bf1df 794 tree rhs1 = gimple_assign_rhs1 (assign);
757bf1df
DM
795 enum tree_code op = gimple_assign_rhs_code (assign);
796 switch (op)
797 {
798 default:
808f4dfe 799 return NULL;
757bf1df
DM
800
801 case POINTER_PLUS_EXPR:
802 {
803 /* e.g. "_1 = a_10(D) + 12;" */
804 tree ptr = rhs1;
805 tree offset = gimple_assign_rhs2 (assign);
806
808f4dfe
DM
807 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
808 const svalue *offset_sval = get_rvalue (offset, ctxt);
809 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
810 is an integer of type sizetype". */
811 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
812
813 const svalue *sval_binop
814 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
815 ptr_sval, offset_sval);
816 return sval_binop;
757bf1df
DM
817 }
818 break;
819
820 case POINTER_DIFF_EXPR:
821 {
822 /* e.g. "_1 = p_2(D) - q_3(D);". */
808f4dfe
DM
823 tree rhs2 = gimple_assign_rhs2 (assign);
824 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
825 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 826
808f4dfe 827 // TODO: perhaps fold to zero if they're known to be equal?
757bf1df 828
808f4dfe
DM
829 const svalue *sval_binop
830 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
831 rhs1_sval, rhs2_sval);
832 return sval_binop;
757bf1df
DM
833 }
834 break;
835
808f4dfe
DM
836 /* Assignments of the form
837 set_value (lvalue (LHS), rvalue (EXPR))
838 for various EXPR.
839 We already have the lvalue for the LHS above, as "lhs_reg". */
840 case ADDR_EXPR: /* LHS = &RHS; */
841 case BIT_FIELD_REF:
842 case COMPONENT_REF: /* LHS = op0.op1; */
757bf1df 843 case MEM_REF:
757bf1df 844 case REAL_CST:
808f4dfe
DM
845 case COMPLEX_CST:
846 case VECTOR_CST:
757bf1df
DM
847 case INTEGER_CST:
848 case ARRAY_REF:
808f4dfe
DM
849 case SSA_NAME: /* LHS = VAR; */
850 case VAR_DECL: /* LHS = VAR; */
851 case PARM_DECL:/* LHS = VAR; */
852 case REALPART_EXPR:
853 case IMAGPART_EXPR:
854 return get_rvalue (rhs1, ctxt);
855
856 case ABS_EXPR:
857 case ABSU_EXPR:
858 case CONJ_EXPR:
859 case BIT_NOT_EXPR:
757bf1df
DM
860 case FIX_TRUNC_EXPR:
861 case FLOAT_EXPR:
808f4dfe 862 case NEGATE_EXPR:
757bf1df 863 case NOP_EXPR:
808f4dfe 864 case VIEW_CONVERT_EXPR:
757bf1df 865 {
808f4dfe
DM
866 /* Unary ops. */
867 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
868 const svalue *sval_unaryop
869 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
870 return sval_unaryop;
757bf1df 871 }
757bf1df
DM
872
873 case EQ_EXPR:
874 case GE_EXPR:
875 case LE_EXPR:
876 case NE_EXPR:
877 case GT_EXPR:
878 case LT_EXPR:
808f4dfe
DM
879 case UNORDERED_EXPR:
880 case ORDERED_EXPR:
757bf1df
DM
881 {
882 tree rhs2 = gimple_assign_rhs2 (assign);
883
808f4dfe
DM
884 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
885 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 886
2f5951bd 887 if (TREE_TYPE (lhs) == boolean_type_node)
808f4dfe 888 {
2f5951bd
DM
889 /* Consider constraints between svalues. */
890 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
891 if (t.is_known ())
892 return m_mgr->get_or_create_constant_svalue
893 (t.is_true () ? boolean_true_node : boolean_false_node);
808f4dfe 894 }
2f5951bd
DM
895
896 /* Otherwise, generate a symbolic binary op. */
897 const svalue *sval_binop
898 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
899 rhs1_sval, rhs2_sval);
900 return sval_binop;
757bf1df
DM
901 }
902 break;
903
904 case PLUS_EXPR:
905 case MINUS_EXPR:
906 case MULT_EXPR:
808f4dfe 907 case MULT_HIGHPART_EXPR:
757bf1df 908 case TRUNC_DIV_EXPR:
808f4dfe
DM
909 case CEIL_DIV_EXPR:
910 case FLOOR_DIV_EXPR:
911 case ROUND_DIV_EXPR:
757bf1df 912 case TRUNC_MOD_EXPR:
808f4dfe
DM
913 case CEIL_MOD_EXPR:
914 case FLOOR_MOD_EXPR:
915 case ROUND_MOD_EXPR:
916 case RDIV_EXPR:
917 case EXACT_DIV_EXPR:
757bf1df
DM
918 case LSHIFT_EXPR:
919 case RSHIFT_EXPR:
808f4dfe
DM
920 case LROTATE_EXPR:
921 case RROTATE_EXPR:
757bf1df
DM
922 case BIT_IOR_EXPR:
923 case BIT_XOR_EXPR:
924 case BIT_AND_EXPR:
925 case MIN_EXPR:
926 case MAX_EXPR:
808f4dfe 927 case COMPLEX_EXPR:
757bf1df
DM
928 {
929 /* Binary ops. */
930 tree rhs2 = gimple_assign_rhs2 (assign);
931
808f4dfe
DM
932 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
933 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 934
5e00ad3f
DM
935 if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
936 {
937 /* "INT34-C. Do not shift an expression by a negative number of bits
938 or by greater than or equal to the number of bits that exist in
939 the operand." */
940 if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
a4913a19
DM
941 if (TREE_CODE (rhs2_cst) == INTEGER_CST
942 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1)))
5e00ad3f
DM
943 {
944 if (tree_int_cst_sgn (rhs2_cst) < 0)
6341f14e
DM
945 ctxt->warn
946 (make_unique<shift_count_negative_diagnostic>
947 (assign, rhs2_cst));
5e00ad3f
DM
948 else if (compare_tree_int (rhs2_cst,
949 TYPE_PRECISION (TREE_TYPE (rhs1)))
950 >= 0)
6341f14e
DM
951 ctxt->warn
952 (make_unique<shift_count_overflow_diagnostic>
953 (assign,
954 int (TYPE_PRECISION (TREE_TYPE (rhs1))),
955 rhs2_cst));
5e00ad3f
DM
956 }
957 }
958
808f4dfe
DM
959 const svalue *sval_binop
960 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
961 rhs1_sval, rhs2_sval);
962 return sval_binop;
963 }
964
965 /* Vector expressions. In theory we could implement these elementwise,
966 but for now, simply return unknown values. */
967 case VEC_DUPLICATE_EXPR:
968 case VEC_SERIES_EXPR:
969 case VEC_COND_EXPR:
970 case VEC_PERM_EXPR:
1b0be822
DM
971 case VEC_WIDEN_MULT_HI_EXPR:
972 case VEC_WIDEN_MULT_LO_EXPR:
973 case VEC_WIDEN_MULT_EVEN_EXPR:
974 case VEC_WIDEN_MULT_ODD_EXPR:
975 case VEC_UNPACK_HI_EXPR:
976 case VEC_UNPACK_LO_EXPR:
977 case VEC_UNPACK_FLOAT_HI_EXPR:
978 case VEC_UNPACK_FLOAT_LO_EXPR:
979 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
980 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
981 case VEC_PACK_TRUNC_EXPR:
982 case VEC_PACK_SAT_EXPR:
983 case VEC_PACK_FIX_TRUNC_EXPR:
984 case VEC_PACK_FLOAT_EXPR:
985 case VEC_WIDEN_LSHIFT_HI_EXPR:
986 case VEC_WIDEN_LSHIFT_LO_EXPR:
808f4dfe
DM
987 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
988 }
989}
990
1e2fe671
DM
991/* Workaround for discarding certain false positives from
992 -Wanalyzer-use-of-uninitialized-value
993 of the form:
994 ((A OR-IF B) OR-IF C)
995 and:
996 ((A AND-IF B) AND-IF C)
997 where evaluating B is redundant, but could involve simple accesses of
998 uninitialized locals.
999
1000 When optimization is turned on the FE can immediately fold compound
1001 conditionals. Specifically, c_parser_condition parses this condition:
1002 ((A OR-IF B) OR-IF C)
1003 and calls c_fully_fold on the condition.
1004 Within c_fully_fold, fold_truth_andor is called, which bails when
1005 optimization is off, but if any optimization is turned on can convert the
1006 ((A OR-IF B) OR-IF C)
1007 into:
1008 ((A OR B) OR_IF C)
1009 for sufficiently simple B
1010 i.e. the inner OR-IF becomes an OR.
1011 At gimplification time the inner OR becomes BIT_IOR_EXPR (in gimplify_expr),
1012 giving this for the inner condition:
1013 tmp = A | B;
1014 if (tmp)
1015 thus effectively synthesizing a redundant access of B when optimization
1016 is turned on, when compared to:
1017 if (A) goto L1; else goto L4;
1018 L1: if (B) goto L2; else goto L4;
1019 L2: if (C) goto L3; else goto L4;
1020 for the unoptimized case.
1021
1022 Return true if CTXT appears to be handling such a short-circuitable stmt,
1023 such as the def-stmt for B for the:
1024 tmp = A | B;
1025 case above, for the case where A is true and thus B would have been
1026 short-circuited without optimization, using MODEL for the value of A. */
1027
1028static bool
1029within_short_circuited_stmt_p (const region_model *model,
b33dd787 1030 const gassign *assign_stmt)
1e2fe671 1031{
1e2fe671 1032 /* We must have an assignment to a temporary of _Bool type. */
1e2fe671
DM
1033 tree lhs = gimple_assign_lhs (assign_stmt);
1034 if (TREE_TYPE (lhs) != boolean_type_node)
1035 return false;
1036 if (TREE_CODE (lhs) != SSA_NAME)
1037 return false;
1038 if (SSA_NAME_VAR (lhs) != NULL_TREE)
1039 return false;
1040
1041 /* The temporary bool must be used exactly once: as the second arg of
1042 a BIT_IOR_EXPR or BIT_AND_EXPR. */
1043 use_operand_p use_op;
1044 gimple *use_stmt;
1045 if (!single_imm_use (lhs, &use_op, &use_stmt))
1046 return false;
1047 const gassign *use_assign = dyn_cast <const gassign *> (use_stmt);
1048 if (!use_assign)
1049 return false;
1050 enum tree_code op = gimple_assign_rhs_code (use_assign);
1051 if (!(op == BIT_IOR_EXPR ||op == BIT_AND_EXPR))
1052 return false;
1053 if (!(gimple_assign_rhs1 (use_assign) != lhs
1054 && gimple_assign_rhs2 (use_assign) == lhs))
1055 return false;
1056
1057 /* The first arg of the bitwise stmt must have a known value in MODEL
1058 that implies that the value of the second arg doesn't matter, i.e.
1059 1 for bitwise or, 0 for bitwise and. */
1060 tree other_arg = gimple_assign_rhs1 (use_assign);
1061 /* Use a NULL ctxt here to avoid generating warnings. */
1062 const svalue *other_arg_sval = model->get_rvalue (other_arg, NULL);
1063 tree other_arg_cst = other_arg_sval->maybe_get_constant ();
1064 if (!other_arg_cst)
1065 return false;
1066 switch (op)
1067 {
1068 default:
1069 gcc_unreachable ();
1070 case BIT_IOR_EXPR:
1071 if (zerop (other_arg_cst))
1072 return false;
1073 break;
1074 case BIT_AND_EXPR:
1075 if (!zerop (other_arg_cst))
1076 return false;
1077 break;
1078 }
1079
1080 /* All tests passed. We appear to be in a stmt that generates a boolean
1081 temporary with a value that won't matter. */
1082 return true;
1083}
1084
b33dd787
DM
1085/* Workaround for discarding certain false positives from
1086 -Wanalyzer-use-of-uninitialized-value
1087 seen with -ftrivial-auto-var-init=.
1088
1089 -ftrivial-auto-var-init= will generate calls to IFN_DEFERRED_INIT.
1090
1091 If the address of the var is taken, gimplification will give us
1092 something like:
1093
1094 _1 = .DEFERRED_INIT (4, 2, &"len"[0]);
1095 len = _1;
1096
1097 The result of DEFERRED_INIT will be an uninit value; we don't
1098 want to emit a false positive for "len = _1;"
1099
1100 Return true if ASSIGN_STMT is such a stmt. */
1101
1102static bool
1103due_to_ifn_deferred_init_p (const gassign *assign_stmt)
1104
1105{
1106 /* We must have an assignment to a decl from an SSA name that's the
1107 result of a IFN_DEFERRED_INIT call. */
1108 if (gimple_assign_rhs_code (assign_stmt) != SSA_NAME)
1109 return false;
1110 tree lhs = gimple_assign_lhs (assign_stmt);
1111 if (TREE_CODE (lhs) != VAR_DECL)
1112 return false;
1113 tree rhs = gimple_assign_rhs1 (assign_stmt);
1114 if (TREE_CODE (rhs) != SSA_NAME)
1115 return false;
1116 const gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1117 const gcall *call = dyn_cast <const gcall *> (def_stmt);
1118 if (!call)
1119 return false;
1120 if (gimple_call_internal_p (call)
1121 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
1122 return true;
1123 return false;
1124}
1125
33255ad3
DM
1126/* Check for SVAL being poisoned, adding a warning to CTXT.
1127 Return SVAL, or, if a warning is added, another value, to avoid
2fdc8546
DM
1128 repeatedly complaining about the same poisoned value in followup code.
1129 SRC_REGION is a hint about where SVAL came from, and can be NULL. */
33255ad3
DM
1130
1131const svalue *
1132region_model::check_for_poison (const svalue *sval,
1133 tree expr,
2fdc8546 1134 const region *src_region,
33255ad3
DM
1135 region_model_context *ctxt) const
1136{
1137 if (!ctxt)
1138 return sval;
1139
1140 if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
1141 {
cc68ad87
DM
1142 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
1143
1144 /* Ignore uninitialized uses of empty types; there's nothing
1145 to initialize. */
1146 if (pkind == POISON_KIND_UNINIT
1147 && sval->get_type ()
1148 && is_empty_type (sval->get_type ()))
1149 return sval;
1150
b33dd787
DM
1151 if (pkind == POISON_KIND_UNINIT)
1152 if (const gimple *curr_stmt = ctxt->get_stmt ())
1153 if (const gassign *assign_stmt
1154 = dyn_cast <const gassign *> (curr_stmt))
1155 {
1156 /* Special case to avoid certain false positives. */
1157 if (within_short_circuited_stmt_p (this, assign_stmt))
1158 return sval;
1159
1160 /* Special case to avoid false positive on
1161 -ftrivial-auto-var-init=. */
1162 if (due_to_ifn_deferred_init_p (assign_stmt))
1163 return sval;
1164 }
1e2fe671 1165
33255ad3
DM
1166 /* If we have an SSA name for a temporary, we don't want to print
1167 '<unknown>'.
1168 Poisoned values are shared by type, and so we can't reconstruct
1169 the tree other than via the def stmts, using
1170 fixup_tree_for_diagnostic. */
1171 tree diag_arg = fixup_tree_for_diagnostic (expr);
2fdc8546 1172 if (src_region == NULL && pkind == POISON_KIND_UNINIT)
00e7d024 1173 src_region = get_region_for_poisoned_expr (expr);
b03a10b0
DM
1174
1175 /* Can we reliably get the poisoned value from "expr"?
1176 This is for use by poisoned_value_diagnostic::check_valid_fpath_p.
1177 Unfortunately, we might not have a reliable value for EXPR.
1178 Hence we only query its value now, and only use it if we get the
1179 poisoned value back again. */
1180 tree check_expr = expr;
1181 const svalue *foo_sval = get_rvalue (expr, NULL);
1182 if (foo_sval == sval)
1183 check_expr = expr;
1184 else
1185 check_expr = NULL;
6341f14e
DM
1186 if (ctxt->warn (make_unique<poisoned_value_diagnostic> (diag_arg,
1187 pkind,
b03a10b0
DM
1188 src_region,
1189 check_expr)))
33255ad3
DM
1190 {
1191 /* We only want to report use of a poisoned value at the first
1192 place it gets used; return an unknown value to avoid generating
1193 a chain of followup warnings. */
1194 sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
1195 }
1196
1197 return sval;
1198 }
1199
1200 return sval;
1201}
1202
00e7d024
DM
1203/* Attempt to get a region for describing EXPR, the source of region of
1204 a poisoned_svalue for use in a poisoned_value_diagnostic.
1205 Return NULL if there is no good region to use. */
1206
1207const region *
1208region_model::get_region_for_poisoned_expr (tree expr) const
1209{
1210 if (TREE_CODE (expr) == SSA_NAME)
1211 {
1212 tree decl = SSA_NAME_VAR (expr);
1213 if (decl && DECL_P (decl))
1214 expr = decl;
1215 else
1216 return NULL;
1217 }
1218 return get_lvalue (expr, NULL);
1219}
1220
808f4dfe
DM
1221/* Update this model for the ASSIGN stmt, using CTXT to report any
1222 diagnostics. */
1223
1224void
1225region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
1226{
1227 tree lhs = gimple_assign_lhs (assign);
1228 tree rhs1 = gimple_assign_rhs1 (assign);
1229
1230 const region *lhs_reg = get_lvalue (lhs, ctxt);
1231
841008d3
DM
1232 /* Any writes other than to the stack are treated
1233 as externally visible. */
1234 if (ctxt)
1235 {
1236 enum memory_space memspace = lhs_reg->get_memory_space ();
1237 if (memspace != MEMSPACE_STACK)
1238 ctxt->maybe_did_work ();
1239 }
1240
808f4dfe
DM
1241 /* Most assignments are handled by:
1242 set_value (lhs_reg, SVALUE, CTXT)
1243 for some SVALUE. */
1244 if (const svalue *sval = get_gassign_result (assign, ctxt))
1245 {
33255ad3 1246 tree expr = get_diagnostic_tree_for_gassign (assign);
2fdc8546 1247 check_for_poison (sval, expr, NULL, ctxt);
808f4dfe
DM
1248 set_value (lhs_reg, sval, ctxt);
1249 return;
1250 }
1251
1252 enum tree_code op = gimple_assign_rhs_code (assign);
1253 switch (op)
1254 {
1255 default:
1256 {
1b0be822 1257 if (0)
808f4dfe
DM
1258 sorry_at (assign->location, "unhandled assignment op: %qs",
1259 get_tree_code_name (op));
1b0be822
DM
1260 const svalue *unknown_sval
1261 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1262 set_value (lhs_reg, unknown_sval, ctxt);
757bf1df
DM
1263 }
1264 break;
1265
808f4dfe
DM
1266 case CONSTRUCTOR:
1267 {
1268 if (TREE_CLOBBER_P (rhs1))
1269 {
1270 /* e.g. "x ={v} {CLOBBER};" */
1271 clobber_region (lhs_reg);
1272 }
1273 else
1274 {
1275 /* Any CONSTRUCTOR that survives to this point is either
1276 just a zero-init of everything, or a vector. */
1277 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
b923978a 1278 zero_fill_region (lhs_reg, ctxt);
808f4dfe
DM
1279 unsigned ix;
1280 tree index;
1281 tree val;
1282 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
1283 {
1284 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
1285 if (!index)
1286 index = build_int_cst (integer_type_node, ix);
1287 gcc_assert (TREE_CODE (index) == INTEGER_CST);
1288 const svalue *index_sval
1289 = m_mgr->get_or_create_constant_svalue (index);
1290 gcc_assert (index_sval);
1291 const region *sub_reg
1292 = m_mgr->get_element_region (lhs_reg,
1293 TREE_TYPE (val),
1294 index_sval);
1295 const svalue *val_sval = get_rvalue (val, ctxt);
1296 set_value (sub_reg, val_sval, ctxt);
1297 }
1298 }
1299 }
1300 break;
1301
1302 case STRING_CST:
757bf1df 1303 {
808f4dfe 1304 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
808f4dfe
DM
1305 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1306 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 1307 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
1308 }
1309 break;
1310 }
1311}
1312
33255ad3 1313/* Handle the pre-sm-state part of STMT, modifying this object in-place.
33255ad3
DM
1314 Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
1315 side effects. */
1316
1317void
1318region_model::on_stmt_pre (const gimple *stmt,
33255ad3
DM
1319 bool *out_unknown_side_effects,
1320 region_model_context *ctxt)
1321{
1322 switch (gimple_code (stmt))
1323 {
1324 default:
1325 /* No-op for now. */
1326 break;
1327
cc7aebff
DM
1328 case GIMPLE_DEBUG:
1329 /* We should have stripped these out when building the supergraph. */
1330 gcc_unreachable ();
1331 break;
1332
33255ad3
DM
1333 case GIMPLE_ASSIGN:
1334 {
1335 const gassign *assign = as_a <const gassign *> (stmt);
1336 on_assignment (assign, ctxt);
1337 }
1338 break;
1339
1340 case GIMPLE_ASM:
ded2c2c0
DM
1341 {
1342 const gasm *asm_stmt = as_a <const gasm *> (stmt);
1343 on_asm_stmt (asm_stmt, ctxt);
841008d3
DM
1344 if (ctxt)
1345 ctxt->maybe_did_work ();
ded2c2c0 1346 }
33255ad3
DM
1347 break;
1348
1349 case GIMPLE_CALL:
1350 {
1351 /* Track whether we have a gcall to a function that's not recognized by
1352 anything, for which we don't have a function body, or for which we
1353 don't know the fndecl. */
1354 const gcall *call = as_a <const gcall *> (stmt);
6bd31b33 1355 *out_unknown_side_effects = on_call_pre (call, ctxt);
33255ad3
DM
1356 }
1357 break;
1358
1359 case GIMPLE_RETURN:
1360 {
1361 const greturn *return_ = as_a <const greturn *> (stmt);
1362 on_return (return_, ctxt);
1363 }
1364 break;
1365 }
1366}
1367
3b691e01
DM
1368/* Given a call CD with function attribute FORMAT_ATTR, check that the
1369 format arg to the call is a valid null-terminated string. */
1370
1371void
1372region_model::check_call_format_attr (const call_details &cd,
1373 tree format_attr) const
1374{
1375 /* We assume that FORMAT_ATTR has already been validated. */
1376
1377 /* arg0 of the attribute should be kind of format strings
1378 that this function expects (e.g. "printf"). */
1379 const tree arg0_tree_list = TREE_VALUE (format_attr);
1380 if (!arg0_tree_list)
1381 return;
1382
1383 /* arg1 of the attribute should be the 1-based parameter index
1384 to treat as the format string. */
1385 const tree arg1_tree_list = TREE_CHAIN (arg0_tree_list);
1386 if (!arg1_tree_list)
1387 return;
1388 const tree arg1_value = TREE_VALUE (arg1_tree_list);
1389 if (!arg1_value)
1390 return;
1391
1392 unsigned format_arg_idx = TREE_INT_CST_LOW (arg1_value) - 1;
1393 if (cd.num_args () <= format_arg_idx)
1394 return;
1395
1396 /* Subclass of annotating_context that
1397 adds a note about the format attr to any saved diagnostics. */
1398 class annotating_ctxt : public annotating_context
1399 {
1400 public:
1401 annotating_ctxt (const call_details &cd,
1402 unsigned fmt_param_idx)
1403 : annotating_context (cd.get_ctxt ()),
1404 m_cd (cd),
1405 m_fmt_param_idx (fmt_param_idx)
1406 {
1407 }
1408 void add_annotations () final override
1409 {
1410 class reason_format_attr
1411 : public pending_note_subclass<reason_format_attr>
1412 {
1413 public:
1414 reason_format_attr (const call_arg_details &arg_details)
1415 : m_arg_details (arg_details)
1416 {
1417 }
1418
1419 const char *get_kind () const final override
1420 {
1421 return "reason_format_attr";
1422 }
1423
1424 void emit () const final override
1425 {
1426 inform (DECL_SOURCE_LOCATION (m_arg_details.m_called_fndecl),
1427 "parameter %i of %qD marked as a format string"
1428 " via %qs attribute",
1429 m_arg_details.m_arg_idx + 1, m_arg_details.m_called_fndecl,
1430 "format");
1431 }
1432
1433 bool operator== (const reason_format_attr &other) const
1434 {
1435 return m_arg_details == other.m_arg_details;
1436 }
1437
1438 private:
1439 call_arg_details m_arg_details;
1440 };
1441
1442 call_arg_details arg_details (m_cd, m_fmt_param_idx);
1443 add_note (make_unique<reason_format_attr> (arg_details));
1444 }
1445 private:
1446 const call_details &m_cd;
1447 unsigned m_fmt_param_idx;
1448 };
1449
1450 annotating_ctxt my_ctxt (cd, format_arg_idx);
1451 call_details my_cd (cd, &my_ctxt);
1452 my_cd.check_for_null_terminated_string_arg (format_arg_idx);
1453}
1454
9ff3e236 1455/* Ensure that all arguments at the call described by CD are checked
3b691e01
DM
1456 for poisoned values, by calling get_rvalue on each argument.
1457
1458 Check that calls to functions with "format" attribute have valid
1459 null-terminated strings for their format argument. */
9ff3e236
DM
1460
1461void
1462region_model::check_call_args (const call_details &cd) const
1463{
1464 for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++)
1465 cd.get_arg_svalue (arg_idx);
3b691e01
DM
1466
1467 /* Handle attribute "format". */
1468 if (tree format_attr = cd.lookup_function_attribute ("format"))
1469 check_call_format_attr (cd, format_attr);
9ff3e236
DM
1470}
1471
792f039f
DM
1472/* Update this model for an outcome of a call that returns a specific
1473 integer constant.
07e30160
DM
1474 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1475 the state-merger code from merging success and failure outcomes. */
1476
1477void
792f039f
DM
1478region_model::update_for_int_cst_return (const call_details &cd,
1479 int retval,
1480 bool unmergeable)
07e30160
DM
1481{
1482 if (!cd.get_lhs_type ())
1483 return;
4e4e45a4
DM
1484 if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1485 return;
07e30160 1486 const svalue *result
792f039f 1487 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), retval);
07e30160
DM
1488 if (unmergeable)
1489 result = m_mgr->get_or_create_unmergeable (result);
1490 set_value (cd.get_lhs_region (), result, cd.get_ctxt ());
1491}
1492
792f039f
DM
1493/* Update this model for an outcome of a call that returns zero.
1494 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1495 the state-merger code from merging success and failure outcomes. */
1496
1497void
1498region_model::update_for_zero_return (const call_details &cd,
1499 bool unmergeable)
1500{
1501 update_for_int_cst_return (cd, 0, unmergeable);
1502}
1503
73da34a5
DM
1504/* Update this model for an outcome of a call that returns non-zero.
1505 Specifically, assign an svalue to the LHS, and add a constraint that
1506 that svalue is non-zero. */
07e30160
DM
1507
1508void
1509region_model::update_for_nonzero_return (const call_details &cd)
1510{
1511 if (!cd.get_lhs_type ())
1512 return;
4e4e45a4
DM
1513 if (TREE_CODE (cd.get_lhs_type ()) != INTEGER_TYPE)
1514 return;
73da34a5 1515 cd.set_any_lhs_with_defaults ();
07e30160
DM
1516 const svalue *zero
1517 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), 0);
1518 const svalue *result
1519 = get_store_value (cd.get_lhs_region (), cd.get_ctxt ());
1520 add_constraint (result, NE_EXPR, zero, cd.get_ctxt ());
1521}
1522
1523/* Subroutine of region_model::maybe_get_copy_bounds.
1524 The Linux kernel commonly uses
1525 min_t([unsigned] long, VAR, sizeof(T));
1526 to set an upper bound on the size of a copy_to_user.
1527 Attempt to simplify such sizes by trying to get the upper bound as a
1528 constant.
1529 Return the simplified svalue if possible, or NULL otherwise. */
1530
1531static const svalue *
1532maybe_simplify_upper_bound (const svalue *num_bytes_sval,
1533 region_model_manager *mgr)
1534{
1535 tree type = num_bytes_sval->get_type ();
1536 while (const svalue *raw = num_bytes_sval->maybe_undo_cast ())
1537 num_bytes_sval = raw;
1538 if (const binop_svalue *binop_sval = num_bytes_sval->dyn_cast_binop_svalue ())
1539 if (binop_sval->get_op () == MIN_EXPR)
1540 if (binop_sval->get_arg1 ()->get_kind () == SK_CONSTANT)
1541 {
1542 return mgr->get_or_create_cast (type, binop_sval->get_arg1 ());
1543 /* TODO: we might want to also capture the constraint
1544 when recording the diagnostic, or note that we're using
1545 the upper bound. */
1546 }
1547 return NULL;
1548}
1549
1550/* Attempt to get an upper bound for the size of a copy when simulating a
1551 copy function.
1552
1553 NUM_BYTES_SVAL is the symbolic value for the size of the copy.
1554 Use it if it's constant, otherwise try to simplify it. Failing
1555 that, use the size of SRC_REG if constant.
1556
1557 Return a symbolic value for an upper limit on the number of bytes
1558 copied, or NULL if no such value could be determined. */
1559
1560const svalue *
1561region_model::maybe_get_copy_bounds (const region *src_reg,
1562 const svalue *num_bytes_sval)
1563{
1564 if (num_bytes_sval->maybe_get_constant ())
1565 return num_bytes_sval;
1566
1567 if (const svalue *simplified
1568 = maybe_simplify_upper_bound (num_bytes_sval, m_mgr))
1569 num_bytes_sval = simplified;
1570
1571 if (num_bytes_sval->maybe_get_constant ())
1572 return num_bytes_sval;
1573
1574 /* For now, try just guessing the size as the capacity of the
1575 base region of the src.
1576 This is a hack; we might get too large a value. */
1577 const region *src_base_reg = src_reg->get_base_region ();
1578 num_bytes_sval = get_capacity (src_base_reg);
1579
1580 if (num_bytes_sval->maybe_get_constant ())
1581 return num_bytes_sval;
1582
1583 /* Non-constant: give up. */
1584 return NULL;
1585}
1586
6bd31b33
DM
1587/* Get any known_function for FNDECL for call CD.
1588
1589 The call must match all assumptions made by the known_function (such as
1590 e.g. "argument 1's type must be a pointer type").
1591
1592 Return NULL if no known_function is found, or it does not match the
1593 assumption(s). */
1594
1595const known_function *
1596region_model::get_known_function (tree fndecl, const call_details &cd) const
1597{
1598 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
1599 return known_fn_mgr->get_match (fndecl, cd);
1600}
1601
1602/* Get any known_function for IFN, or NULL. */
07e30160
DM
1603
1604const known_function *
6bd31b33 1605region_model::get_known_function (enum internal_fn ifn) const
07e30160
DM
1606{
1607 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
6bd31b33 1608 return known_fn_mgr->get_internal_fn (ifn);
07e30160
DM
1609}
1610
55f6a7d9 1611/* Get any builtin_known_function for CALL and emit any warning to CTXT
1612 if not NULL.
1613
1614 The call must match all assumptions made by the known_function (such as
1615 e.g. "argument 1's type must be a pointer type").
1616
1617 Return NULL if no builtin_known_function is found, or it does
1618 not match the assumption(s).
1619
1620 Internally calls get_known_function to find a known_function and cast it
1621 to a builtin_known_function.
1622
1623 For instance, calloc is a C builtin, defined in gcc/builtins.def
1624 by the DEF_LIB_BUILTIN macro. Such builtins are recognized by the
1625 analyzer by their name, so that even in C++ or if the user redeclares
1626 them but mismatch their signature, they are still recognized as builtins.
1627
1628 Cases when a supposed builtin is not flagged as one by the FE:
1629
1630 The C++ FE does not recognize calloc as a builtin if it has not been
1631 included from a standard header, but the C FE does. Hence in C++ if
1632 CALL comes from a calloc and stdlib is not included,
1633 gcc/tree.h:fndecl_built_in_p (CALL) would be false.
1634
1635 In C code, a __SIZE_TYPE__ calloc (__SIZE_TYPE__, __SIZE_TYPE__) user
1636 declaration has obviously a mismatching signature from the standard, and
1637 its function_decl tree won't be unified by
1638 gcc/c-decl.cc:match_builtin_function_types.
1639
1640 Yet in both cases the analyzer should treat the calls as a builtin calloc
1641 so that extra attributes unspecified by the standard but added by GCC
1642 (e.g. sprintf attributes in gcc/builtins.def), useful for the detection of
1643 dangerous behavior, are indeed processed.
1644
1645 Therefore for those cases when a "builtin flag" is not added by the FE,
1646 builtins' kf are derived from builtin_known_function, whose method
1647 builtin_known_function::builtin_decl returns the builtin's
1648 function_decl tree as defined in gcc/builtins.def, with all the extra
1649 attributes. */
1650
1651const builtin_known_function *
1652region_model::get_builtin_kf (const gcall *call,
1653 region_model_context *ctxt /* = NULL */) const
1654{
1655 region_model *mut_this = const_cast <region_model *> (this);
1656 tree callee_fndecl = mut_this->get_fndecl_for_call (call, ctxt);
1657 if (! callee_fndecl)
1658 return NULL;
1659
1660 call_details cd (call, mut_this, ctxt);
1661 if (const known_function *kf = get_known_function (callee_fndecl, cd))
1662 return kf->dyn_cast_builtin_kf ();
1663
1664 return NULL;
1665}
1666
757bf1df
DM
1667/* Update this model for the CALL stmt, using CTXT to report any
1668 diagnostics - the first half.
1669
1670 Updates to the region_model that should be made *before* sm-states
1671 are updated are done here; other updates to the region_model are done
ef7827b0 1672 in region_model::on_call_post.
757bf1df 1673
ef7827b0
DM
1674 Return true if the function call has unknown side effects (it wasn't
1675 recognized and we don't have a body for it, or are unable to tell which
6bd31b33 1676 fndecl it is). */
ef7827b0
DM
1677
1678bool
6bd31b33 1679region_model::on_call_pre (const gcall *call, region_model_context *ctxt)
757bf1df 1680{
48e8a7a6
DM
1681 call_details cd (call, this, ctxt);
1682
9b4eee5f
DM
1683 /* Special-case for IFN_DEFERRED_INIT.
1684 We want to report uninitialized variables with -fanalyzer (treating
1685 -ftrivial-auto-var-init= as purely a mitigation feature).
1686 Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the
1687 lhs of the call, so that it is still uninitialized from the point of
1688 view of the analyzer. */
1689 if (gimple_call_internal_p (call)
1690 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
24ebc540 1691 return false; /* No side effects. */
9b4eee5f 1692
bddd8d86
DM
1693 /* Get svalues for all of the arguments at the callsite, to ensure that we
1694 complain about any uninitialized arguments. This might lead to
1695 duplicates if any of the handling below also looks up the svalues,
1696 but the deduplication code should deal with that. */
1697 if (ctxt)
ca123e01 1698 check_call_args (cd);
bddd8d86 1699
688fc162
DM
1700 tree callee_fndecl = get_fndecl_for_call (call, ctxt);
1701
48e8a7a6 1702 if (gimple_call_internal_p (call))
6bd31b33
DM
1703 if (const known_function *kf
1704 = get_known_function (gimple_call_internal_fn (call)))
1705 {
1706 kf->impl_call_pre (cd);
24ebc540 1707 return false; /* No further side effects. */
6bd31b33 1708 }
808f4dfe 1709
24ebc540 1710 if (!callee_fndecl)
73da34a5
DM
1711 {
1712 cd.set_any_lhs_with_defaults ();
1713 return true; /* Unknown side effects. */
1714 }
ee7bfbe5 1715
24ebc540
DM
1716 if (const known_function *kf = get_known_function (callee_fndecl, cd))
1717 {
1718 kf->impl_call_pre (cd);
1719 return false; /* No further side effects. */
757bf1df 1720 }
757bf1df 1721
73da34a5
DM
1722 cd.set_any_lhs_with_defaults ();
1723
24ebc540
DM
1724 const int callee_fndecl_flags = flags_from_decl_or_type (callee_fndecl);
1725 if (callee_fndecl_flags & (ECF_CONST | ECF_PURE))
1726 return false; /* No side effects. */
1727
1728 if (fndecl_built_in_p (callee_fndecl))
1729 return true; /* Unknown side effects. */
1730
1731 if (!fndecl_has_gimple_body_p (callee_fndecl))
1732 return true; /* Unknown side effects. */
1733
1734 return false; /* No side effects. */
757bf1df
DM
1735}
1736
1737/* Update this model for the CALL stmt, using CTXT to report any
1738 diagnostics - the second half.
1739
1740 Updates to the region_model that should be made *after* sm-states
1741 are updated are done here; other updates to the region_model are done
ef7827b0
DM
1742 in region_model::on_call_pre.
1743
1744 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
1745 to purge state. */
757bf1df
DM
1746
1747void
ef7827b0
DM
1748region_model::on_call_post (const gcall *call,
1749 bool unknown_side_effects,
1750 region_model_context *ctxt)
757bf1df 1751{
757bf1df 1752 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1690a839 1753 {
eafa9d96 1754 call_details cd (call, this, ctxt);
6bd31b33 1755 if (const known_function *kf = get_known_function (callee_fndecl, cd))
55e04240 1756 {
6bd31b33 1757 kf->impl_call_post (cd);
55e04240
DM
1758 return;
1759 }
c7e276b8
DM
1760 /* Was this fndecl referenced by
1761 __attribute__((malloc(FOO)))? */
1762 if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
1763 {
c7e276b8
DM
1764 impl_deallocation_call (cd);
1765 return;
1766 }
1690a839 1767 }
ef7827b0
DM
1768
1769 if (unknown_side_effects)
841008d3
DM
1770 {
1771 handle_unrecognized_call (call, ctxt);
1772 if (ctxt)
1773 ctxt->maybe_did_work ();
1774 }
ef7827b0
DM
1775}
1776
33255ad3
DM
1777/* Purge state involving SVAL from this region_model, using CTXT
1778 (if non-NULL) to purge other state in a program_state.
1779
1780 For example, if we're at the def-stmt of an SSA name, then we need to
1781 purge any state for svalues that involve that SSA name. This avoids
1782 false positives in loops, since a symbolic value referring to the
1783 SSA name will be referring to the previous value of that SSA name.
1784
1785 For example, in:
1786 while ((e = hashmap_iter_next(&iter))) {
1787 struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
1788 free (e_strbuf->value);
1789 }
1790 at the def-stmt of e_8:
1791 e_8 = hashmap_iter_next (&iter);
1792 we should purge the "freed" state of:
1793 INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
1794 which is the "e_strbuf->value" value from the previous iteration,
1795 or we will erroneously report a double-free - the "e_8" within it
1796 refers to the previous value. */
1797
1798void
1799region_model::purge_state_involving (const svalue *sval,
1800 region_model_context *ctxt)
1801{
a113b143
DM
1802 if (!sval->can_have_associated_state_p ())
1803 return;
33255ad3
DM
1804 m_store.purge_state_involving (sval, m_mgr);
1805 m_constraints->purge_state_involving (sval);
1806 m_dynamic_extents.purge_state_involving (sval);
1807 if (ctxt)
1808 ctxt->purge_state_involving (sval);
1809}
1810
c65d3c7f
DM
1811/* A pending_note subclass for adding a note about an
1812 __attribute__((access, ...)) to a diagnostic. */
1813
1814class reason_attr_access : public pending_note_subclass<reason_attr_access>
1815{
1816public:
1817 reason_attr_access (tree callee_fndecl, const attr_access &access)
1818 : m_callee_fndecl (callee_fndecl),
1819 m_ptr_argno (access.ptrarg),
1820 m_access_str (TREE_STRING_POINTER (access.to_external_string ()))
1821 {
1822 }
1823
ff171cb1 1824 const char *get_kind () const final override { return "reason_attr_access"; }
c65d3c7f 1825
2ac1459f 1826 void emit () const final override
c65d3c7f
DM
1827 {
1828 inform (DECL_SOURCE_LOCATION (m_callee_fndecl),
1829 "parameter %i of %qD marked with attribute %qs",
1830 m_ptr_argno + 1, m_callee_fndecl, m_access_str);
1831 }
1832
1833 bool operator== (const reason_attr_access &other) const
1834 {
1835 return (m_callee_fndecl == other.m_callee_fndecl
1836 && m_ptr_argno == other.m_ptr_argno
1837 && !strcmp (m_access_str, other.m_access_str));
1838 }
1839
1840private:
1841 tree m_callee_fndecl;
1842 unsigned m_ptr_argno;
1843 const char *m_access_str;
1844};
1845
b6eaf90c
DM
1846/* Check CALL a call to external function CALLEE_FNDECL based on
1847 any __attribute__ ((access, ....) on the latter, complaining to
1848 CTXT about any issues.
1849
1850 Currently we merely call check_region_for_write on any regions
1851 pointed to by arguments marked with a "write_only" or "read_write"
1852 attribute. */
1853
1854void
cd7dadcd
DM
1855region_model::check_function_attr_access (const gcall *call,
1856 tree callee_fndecl,
1857 region_model_context *ctxt,
1858 rdwr_map &rdwr_idx) const
b6eaf90c
DM
1859{
1860 gcc_assert (call);
1861 gcc_assert (callee_fndecl);
1862 gcc_assert (ctxt);
1863
1864 tree fntype = TREE_TYPE (callee_fndecl);
cd7dadcd 1865 gcc_assert (fntype);
b6eaf90c
DM
1866
1867 unsigned argno = 0;
1868
1869 for (tree iter = TYPE_ARG_TYPES (fntype); iter;
1870 iter = TREE_CHAIN (iter), ++argno)
1871 {
1872 const attr_access* access = rdwr_idx.get (argno);
1873 if (!access)
1874 continue;
1875
1876 /* Ignore any duplicate entry in the map for the size argument. */
1877 if (access->ptrarg != argno)
1878 continue;
1879
1880 if (access->mode == access_write_only
1881 || access->mode == access_read_write)
1882 {
e40a935d 1883 /* Subclass of annotating_context that
c65d3c7f 1884 adds a note about the attr access to any saved diagnostics. */
e40a935d 1885 class annotating_ctxt : public annotating_context
c65d3c7f
DM
1886 {
1887 public:
1888 annotating_ctxt (tree callee_fndecl,
1889 const attr_access &access,
1890 region_model_context *ctxt)
e40a935d 1891 : annotating_context (ctxt),
c65d3c7f
DM
1892 m_callee_fndecl (callee_fndecl),
1893 m_access (access)
1894 {
1895 }
e40a935d 1896 void add_annotations () final override
c65d3c7f 1897 {
e40a935d
DM
1898 add_note (make_unique<reason_attr_access>
1899 (m_callee_fndecl, m_access));
c65d3c7f
DM
1900 }
1901 private:
1902 tree m_callee_fndecl;
1903 const attr_access &m_access;
1904 };
1905
1906 /* Use this ctxt below so that any diagnostics get the
1907 note added to them. */
1908 annotating_ctxt my_ctxt (callee_fndecl, *access, ctxt);
1909
b6eaf90c 1910 tree ptr_tree = gimple_call_arg (call, access->ptrarg);
c65d3c7f
DM
1911 const svalue *ptr_sval = get_rvalue (ptr_tree, &my_ctxt);
1912 const region *reg = deref_rvalue (ptr_sval, ptr_tree, &my_ctxt);
0e466e97 1913 check_region_for_write (reg, nullptr, &my_ctxt);
b6eaf90c
DM
1914 /* We don't use the size arg for now. */
1915 }
1916 }
1917}
1918
cd7dadcd
DM
1919/* Subroutine of region_model::check_function_attr_null_terminated_string_arg,
1920 checking one instance of __attribute__((null_terminated_string_arg)). */
1921
1922void
1923region_model::
1924check_one_function_attr_null_terminated_string_arg (const gcall *call,
1925 tree callee_fndecl,
1926 region_model_context *ctxt,
1927 rdwr_map &rdwr_idx,
1928 tree attr)
1929{
1930 gcc_assert (call);
1931 gcc_assert (callee_fndecl);
1932 gcc_assert (ctxt);
1933 gcc_assert (attr);
1934
1935 tree arg = TREE_VALUE (attr);
1936 if (!arg)
1937 return;
1938
1939 /* Convert from 1-based to 0-based index. */
1940 unsigned int arg_idx = TREE_INT_CST_LOW (TREE_VALUE (arg)) - 1;
1941
1942 /* If there's also an "access" attribute on the ptr param
1943 for reading with a size param specified, then that size
1944 limits the size of the possible read from the pointer. */
1945 if (const attr_access* access = rdwr_idx.get (arg_idx))
1946 if ((access->mode == access_read_only
1947 || access->mode == access_read_write)
1948 && access->sizarg != UINT_MAX)
1949 {
cd7dadcd
DM
1950 call_details cd_checked (call, this, ctxt);
1951 const svalue *limit_sval
1952 = cd_checked.get_arg_svalue (access->sizarg);
1953 const svalue *ptr_sval
1954 = cd_checked.get_arg_svalue (arg_idx);
1955 /* Try reading all of the bytes expressed by the size param,
c71028c9 1956 but without emitting warnings (via a null context). */
cd7dadcd
DM
1957 const svalue *limited_sval
1958 = read_bytes (deref_rvalue (ptr_sval, NULL_TREE, nullptr),
1959 NULL_TREE,
1960 limit_sval,
1961 nullptr);
1962 if (limited_sval->get_kind () == SK_POISONED)
1963 {
1964 /* Reading up to the truncation limit caused issues.
1965 Assume that the string is meant to be terminated
1966 before then, so perform a *checked* check for the
1967 terminator. */
1968 check_for_null_terminated_string_arg (cd_checked,
1969 arg_idx);
1970 }
1971 else
1972 {
1973 /* Reading up to the truncation limit seems OK; repeat
1974 the read, but with checking enabled. */
c71028c9
DM
1975 read_bytes (deref_rvalue (ptr_sval, NULL_TREE, ctxt),
1976 NULL_TREE,
1977 limit_sval,
1978 ctxt);
cd7dadcd
DM
1979 }
1980 return;
1981 }
1982
1983 /* Otherwise, we don't have an access-attribute limiting the read.
1984 Simulate a read up to the null terminator (if any). */
1985
1986 call_details cd (call, this, ctxt);
1987 check_for_null_terminated_string_arg (cd, arg_idx);
1988}
1989
1990/* Check CALL a call to external function CALLEE_FNDECL for any uses
1991 of __attribute__ ((null_terminated_string_arg)), compaining
1992 to CTXT about any issues.
1993
1994 Use RDWR_IDX for tracking uses of __attribute__ ((access, ....). */
1995
1996void
1997region_model::
1998check_function_attr_null_terminated_string_arg (const gcall *call,
1999 tree callee_fndecl,
2000 region_model_context *ctxt,
2001 rdwr_map &rdwr_idx)
2002{
2003 gcc_assert (call);
2004 gcc_assert (callee_fndecl);
2005 gcc_assert (ctxt);
2006
2007 tree fntype = TREE_TYPE (callee_fndecl);
2008 gcc_assert (fntype);
2009
2010 /* A function declaration can specify multiple attribute
2011 null_terminated_string_arg, each with one argument. */
2012 for (tree attr = TYPE_ATTRIBUTES (fntype); attr; attr = TREE_CHAIN (attr))
2013 {
2014 attr = lookup_attribute ("null_terminated_string_arg", attr);
2015 if (!attr)
2016 return;
2017
2018 check_one_function_attr_null_terminated_string_arg (call, callee_fndecl,
2019 ctxt, rdwr_idx,
2020 attr);
2021 }
2022}
2023
2024/* Check CALL a call to external function CALLEE_FNDECL for any
2025 function attributes, complaining to CTXT about any issues. */
2026
2027void
2028region_model::check_function_attrs (const gcall *call,
2029 tree callee_fndecl,
2030 region_model_context *ctxt)
2031{
2032 gcc_assert (call);
2033 gcc_assert (callee_fndecl);
2034 gcc_assert (ctxt);
2035
2036 tree fntype = TREE_TYPE (callee_fndecl);
2037 if (!fntype)
2038 return;
2039
2040 if (!TYPE_ATTRIBUTES (fntype))
2041 return;
2042
2043 /* Initialize a map of attribute access specifications for arguments
2044 to the function call. */
2045 rdwr_map rdwr_idx;
2046 init_attr_rdwr_indices (&rdwr_idx, TYPE_ATTRIBUTES (fntype));
2047
2048 check_function_attr_access (call, callee_fndecl, ctxt, rdwr_idx);
2049 check_function_attr_null_terminated_string_arg (call, callee_fndecl,
2050 ctxt, rdwr_idx);
2051}
2052
ef7827b0
DM
2053/* Handle a call CALL to a function with unknown behavior.
2054
2055 Traverse the regions in this model, determining what regions are
2056 reachable from pointer arguments to CALL and from global variables,
2057 recursively.
2058
2059 Set all reachable regions to new unknown values and purge sm-state
2060 from their values, and from values that point to them. */
2061
2062void
2063region_model::handle_unrecognized_call (const gcall *call,
2064 region_model_context *ctxt)
2065{
2066 tree fndecl = get_fndecl_for_call (call, ctxt);
2067
b6eaf90c 2068 if (fndecl && ctxt)
cd7dadcd 2069 check_function_attrs (call, fndecl, ctxt);
b6eaf90c 2070
c710051a 2071 reachable_regions reachable_regs (this);
ef7827b0
DM
2072
2073 /* Determine the reachable regions and their mutability. */
2074 {
808f4dfe
DM
2075 /* Add globals and regions that already escaped in previous
2076 unknown calls. */
2077 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
2078 &reachable_regs);
ef7827b0
DM
2079
2080 /* Params that are pointers. */
2081 tree iter_param_types = NULL_TREE;
2082 if (fndecl)
2083 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2084 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
2085 {
2086 /* Track expected param type, where available. */
2087 tree param_type = NULL_TREE;
2088 if (iter_param_types)
2089 {
2090 param_type = TREE_VALUE (iter_param_types);
2091 gcc_assert (param_type);
2092 iter_param_types = TREE_CHAIN (iter_param_types);
2093 }
2094
2095 tree parm = gimple_call_arg (call, arg_idx);
808f4dfe
DM
2096 const svalue *parm_sval = get_rvalue (parm, ctxt);
2097 reachable_regs.handle_parm (parm_sval, param_type);
ef7827b0
DM
2098 }
2099 }
2100
33255ad3 2101 uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULL;
3a66c289 2102
808f4dfe
DM
2103 /* Purge sm-state for the svalues that were reachable,
2104 both in non-mutable and mutable form. */
2105 for (svalue_set::iterator iter
2106 = reachable_regs.begin_reachable_svals ();
2107 iter != reachable_regs.end_reachable_svals (); ++iter)
ef7827b0 2108 {
808f4dfe 2109 const svalue *sval = (*iter);
33255ad3
DM
2110 if (ctxt)
2111 ctxt->on_unknown_change (sval, false);
808f4dfe
DM
2112 }
2113 for (svalue_set::iterator iter
2114 = reachable_regs.begin_mutable_svals ();
2115 iter != reachable_regs.end_mutable_svals (); ++iter)
2116 {
2117 const svalue *sval = (*iter);
33255ad3
DM
2118 if (ctxt)
2119 ctxt->on_unknown_change (sval, true);
3a66c289
DM
2120 if (uncertainty)
2121 uncertainty->on_mutable_sval_at_unknown_call (sval);
808f4dfe 2122 }
ef7827b0 2123
808f4dfe 2124 /* Mark any clusters that have escaped. */
af66094d 2125 reachable_regs.mark_escaped_clusters (ctxt);
ef7827b0 2126
808f4dfe
DM
2127 /* Update bindings for all clusters that have escaped, whether above,
2128 or previously. */
3734527d
DM
2129 m_store.on_unknown_fncall (call, m_mgr->get_store_manager (),
2130 conjured_purge (this, ctxt));
9a2c9579
DM
2131
2132 /* Purge dynamic extents from any regions that have escaped mutably:
2133 realloc could have been called on them. */
2134 for (hash_set<const region *>::iterator
2135 iter = reachable_regs.begin_mutable_base_regs ();
2136 iter != reachable_regs.end_mutable_base_regs ();
2137 ++iter)
2138 {
2139 const region *base_reg = (*iter);
2140 unset_dynamic_extents (base_reg);
2141 }
808f4dfe 2142}
ef7827b0 2143
808f4dfe
DM
2144/* Traverse the regions in this model, determining what regions are
2145 reachable from the store and populating *OUT.
ef7827b0 2146
808f4dfe
DM
2147 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
2148 for reachability (for handling return values from functions when
2149 analyzing return of the only function on the stack).
2150
3a66c289
DM
2151 If UNCERTAINTY is non-NULL, treat any svalues that were recorded
2152 within it as being maybe-bound as additional "roots" for reachability.
2153
808f4dfe
DM
2154 Find svalues that haven't leaked. */
2155
2156void
2157region_model::get_reachable_svalues (svalue_set *out,
3a66c289
DM
2158 const svalue *extra_sval,
2159 const uncertainty_t *uncertainty)
808f4dfe 2160{
c710051a 2161 reachable_regions reachable_regs (this);
808f4dfe
DM
2162
2163 /* Add globals and regions that already escaped in previous
2164 unknown calls. */
2165 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
2166 &reachable_regs);
2167
2168 if (extra_sval)
2169 reachable_regs.handle_sval (extra_sval);
ef7827b0 2170
3a66c289
DM
2171 if (uncertainty)
2172 for (uncertainty_t::iterator iter
2173 = uncertainty->begin_maybe_bound_svals ();
2174 iter != uncertainty->end_maybe_bound_svals (); ++iter)
2175 reachable_regs.handle_sval (*iter);
2176
808f4dfe
DM
2177 /* Get regions for locals that have explicitly bound values. */
2178 for (store::cluster_map_t::iterator iter = m_store.begin ();
2179 iter != m_store.end (); ++iter)
2180 {
2181 const region *base_reg = (*iter).first;
2182 if (const region *parent = base_reg->get_parent_region ())
2183 if (parent->get_kind () == RK_FRAME)
2184 reachable_regs.add (base_reg, false);
2185 }
2186
2187 /* Populate *OUT based on the values that were reachable. */
2188 for (svalue_set::iterator iter
2189 = reachable_regs.begin_reachable_svals ();
2190 iter != reachable_regs.end_reachable_svals (); ++iter)
2191 out->add (*iter);
757bf1df
DM
2192}
2193
2194/* Update this model for the RETURN_STMT, using CTXT to report any
2195 diagnostics. */
2196
2197void
2198region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
2199{
2200 tree callee = get_current_function ()->decl;
2201 tree lhs = DECL_RESULT (callee);
2202 tree rhs = gimple_return_retval (return_stmt);
2203
2204 if (lhs && rhs)
13ad6d9f
DM
2205 {
2206 const svalue *sval = get_rvalue (rhs, ctxt);
2207 const region *ret_reg = get_lvalue (lhs, ctxt);
2208 set_value (ret_reg, sval, ctxt);
2209 }
757bf1df
DM
2210}
2211
342e14ff
DM
2212/* Update this model for a call and return of setjmp/sigsetjmp at CALL within
2213 ENODE, using CTXT to report any diagnostics.
757bf1df 2214
342e14ff
DM
2215 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
2216 0), as opposed to any second return due to longjmp/sigsetjmp. */
757bf1df
DM
2217
2218void
2219region_model::on_setjmp (const gcall *call, const exploded_node *enode,
2220 region_model_context *ctxt)
2221{
808f4dfe
DM
2222 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
2223 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
2224 ctxt);
757bf1df 2225
808f4dfe
DM
2226 /* Create a setjmp_svalue for this call and store it in BUF_REG's
2227 region. */
2228 if (buf_reg)
757bf1df 2229 {
fd9982bb 2230 setjmp_record r (enode, call);
808f4dfe
DM
2231 const svalue *sval
2232 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
2233 set_value (buf_reg, sval, ctxt);
757bf1df
DM
2234 }
2235
2236 /* Direct calls to setjmp return 0. */
2237 if (tree lhs = gimple_call_lhs (call))
2238 {
1aff29d4
DM
2239 const svalue *new_sval
2240 = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
808f4dfe
DM
2241 const region *lhs_reg = get_lvalue (lhs, ctxt);
2242 set_value (lhs_reg, new_sval, ctxt);
757bf1df
DM
2243 }
2244}
2245
2246/* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
2247 to a "setjmp" at SETJMP_CALL where the final stack depth should be
808f4dfe
DM
2248 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
2249 done, and should be done by the caller. */
757bf1df
DM
2250
2251void
2252region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
808f4dfe 2253 int setjmp_stack_depth, region_model_context *ctxt)
757bf1df
DM
2254{
2255 /* Evaluate the val, using the frame of the "longjmp". */
2256 tree fake_retval = gimple_call_arg (longjmp_call, 1);
808f4dfe 2257 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
757bf1df
DM
2258
2259 /* Pop any frames until we reach the stack depth of the function where
2260 setjmp was called. */
2261 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
2262 while (get_stack_depth () > setjmp_stack_depth)
430d7d88 2263 pop_frame (NULL, NULL, ctxt, false);
757bf1df
DM
2264
2265 gcc_assert (get_stack_depth () == setjmp_stack_depth);
2266
2267 /* Assign to LHS of "setjmp" in new_state. */
2268 if (tree lhs = gimple_call_lhs (setjmp_call))
2269 {
2270 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
1aff29d4
DM
2271 const svalue *zero_sval
2272 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
808f4dfe 2273 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
757bf1df
DM
2274 /* If we have 0, use 1. */
2275 if (eq_zero.is_true ())
2276 {
808f4dfe 2277 const svalue *one_sval
1aff29d4 2278 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
808f4dfe 2279 fake_retval_sval = one_sval;
757bf1df
DM
2280 }
2281 else
2282 {
2283 /* Otherwise note that the value is nonzero. */
808f4dfe 2284 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
757bf1df
DM
2285 }
2286
808f4dfe
DM
2287 /* Decorate the return value from setjmp as being unmergeable,
2288 so that we don't attempt to merge states with it as zero
2289 with states in which it's nonzero, leading to a clean distinction
2290 in the exploded_graph betweeen the first return and the second
2291 return. */
2292 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
757bf1df 2293
808f4dfe
DM
2294 const region *lhs_reg = get_lvalue (lhs, ctxt);
2295 set_value (lhs_reg, fake_retval_sval, ctxt);
2296 }
757bf1df
DM
2297}
2298
2299/* Update this region_model for a phi stmt of the form
2300 LHS = PHI <...RHS...>.
e0a7a675
DM
2301 where RHS is for the appropriate edge.
2302 Get state from OLD_STATE so that all of the phi stmts for a basic block
2303 are effectively handled simultaneously. */
757bf1df
DM
2304
2305void
8525d1f5 2306region_model::handle_phi (const gphi *phi,
808f4dfe 2307 tree lhs, tree rhs,
e0a7a675 2308 const region_model &old_state,
841008d3 2309 hash_set<const svalue *> &svals_changing_meaning,
757bf1df
DM
2310 region_model_context *ctxt)
2311{
2312 /* For now, don't bother tracking the .MEM SSA names. */
2313 if (tree var = SSA_NAME_VAR (lhs))
2314 if (TREE_CODE (var) == VAR_DECL)
2315 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
2316 return;
2317
e0a7a675
DM
2318 const svalue *src_sval = old_state.get_rvalue (rhs, ctxt);
2319 const region *dst_reg = old_state.get_lvalue (lhs, ctxt);
757bf1df 2320
841008d3
DM
2321 const svalue *sval = old_state.get_rvalue (lhs, nullptr);
2322 if (sval->get_kind () == SK_WIDENING)
2323 svals_changing_meaning.add (sval);
2324
e0a7a675 2325 set_value (dst_reg, src_sval, ctxt);
8525d1f5
DM
2326
2327 if (ctxt)
2328 ctxt->on_phi (phi, rhs);
757bf1df
DM
2329}
2330
2331/* Implementation of region_model::get_lvalue; the latter adds type-checking.
2332
2333 Get the id of the region for PV within this region_model,
2334 emitting any diagnostics to CTXT. */
2335
808f4dfe 2336const region *
53cb324c 2337region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2338{
2339 tree expr = pv.m_tree;
2340
2341 gcc_assert (expr);
2342
2343 switch (TREE_CODE (expr))
2344 {
2345 default:
808f4dfe
DM
2346 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
2347 dump_location_t ());
757bf1df
DM
2348
2349 case ARRAY_REF:
2350 {
2351 tree array = TREE_OPERAND (expr, 0);
2352 tree index = TREE_OPERAND (expr, 1);
757bf1df 2353
808f4dfe
DM
2354 const region *array_reg = get_lvalue (array, ctxt);
2355 const svalue *index_sval = get_rvalue (index, ctxt);
2356 return m_mgr->get_element_region (array_reg,
2357 TREE_TYPE (TREE_TYPE (array)),
2358 index_sval);
757bf1df
DM
2359 }
2360 break;
2361
93e759fc
DM
2362 case BIT_FIELD_REF:
2363 {
2364 tree inner_expr = TREE_OPERAND (expr, 0);
2365 const region *inner_reg = get_lvalue (inner_expr, ctxt);
2366 tree num_bits = TREE_OPERAND (expr, 1);
2367 tree first_bit_offset = TREE_OPERAND (expr, 2);
2368 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2369 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2370 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2371 TREE_INT_CST_LOW (num_bits));
2372 return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits);
2373 }
2374 break;
2375
757bf1df
DM
2376 case MEM_REF:
2377 {
2378 tree ptr = TREE_OPERAND (expr, 0);
2379 tree offset = TREE_OPERAND (expr, 1);
808f4dfe
DM
2380 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2381 const svalue *offset_sval = get_rvalue (offset, ctxt);
2382 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
2383 return m_mgr->get_offset_region (star_ptr,
2384 TREE_TYPE (expr),
2385 offset_sval);
757bf1df
DM
2386 }
2387 break;
2388
808f4dfe
DM
2389 case FUNCTION_DECL:
2390 return m_mgr->get_region_for_fndecl (expr);
2391
2392 case LABEL_DECL:
2393 return m_mgr->get_region_for_label (expr);
2394
757bf1df
DM
2395 case VAR_DECL:
2396 /* Handle globals. */
2397 if (is_global_var (expr))
808f4dfe 2398 return m_mgr->get_region_for_global (expr);
757bf1df
DM
2399
2400 /* Fall through. */
2401
2402 case SSA_NAME:
2403 case PARM_DECL:
2404 case RESULT_DECL:
2405 {
2406 gcc_assert (TREE_CODE (expr) == SSA_NAME
2407 || TREE_CODE (expr) == PARM_DECL
778aca1b 2408 || VAR_P (expr)
757bf1df
DM
2409 || TREE_CODE (expr) == RESULT_DECL);
2410
808f4dfe
DM
2411 int stack_index = pv.m_stack_depth;
2412 const frame_region *frame = get_frame_at_index (stack_index);
757bf1df 2413 gcc_assert (frame);
4cebae09 2414 return frame->get_region_for_local (m_mgr, expr, ctxt);
757bf1df
DM
2415 }
2416
2417 case COMPONENT_REF:
2418 {
2419 /* obj.field */
2420 tree obj = TREE_OPERAND (expr, 0);
2421 tree field = TREE_OPERAND (expr, 1);
808f4dfe
DM
2422 const region *obj_reg = get_lvalue (obj, ctxt);
2423 return m_mgr->get_field_region (obj_reg, field);
41a9e940
DM
2424 }
2425 break;
2426
757bf1df 2427 case STRING_CST:
808f4dfe 2428 return m_mgr->get_region_for_string (expr);
757bf1df
DM
2429 }
2430}
2431
2432/* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
2433
09bea584
DM
2434static void
2435assert_compat_types (tree src_type, tree dst_type)
2436{
2437 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
808f4dfe
DM
2438 {
2439#if CHECKING_P
2440 if (!(useless_type_conversion_p (src_type, dst_type)))
2441 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
2442#endif
2443 }
09bea584 2444}
757bf1df 2445
ea4e3218
DM
2446/* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
2447
e66b9f67 2448bool
ea4e3218
DM
2449compat_types_p (tree src_type, tree dst_type)
2450{
2451 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2452 if (!(useless_type_conversion_p (src_type, dst_type)))
2453 return false;
2454 return true;
2455}
2456
808f4dfe 2457/* Get the region for PV within this region_model,
757bf1df
DM
2458 emitting any diagnostics to CTXT. */
2459
808f4dfe 2460const region *
53cb324c 2461region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2462{
2463 if (pv.m_tree == NULL_TREE)
808f4dfe 2464 return NULL;
757bf1df 2465
808f4dfe
DM
2466 const region *result_reg = get_lvalue_1 (pv, ctxt);
2467 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
2468 return result_reg;
757bf1df
DM
2469}
2470
808f4dfe 2471/* Get the region for EXPR within this region_model (assuming the most
757bf1df
DM
2472 recent stack frame if it's a local). */
2473
808f4dfe 2474const region *
53cb324c 2475region_model::get_lvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
2476{
2477 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2478}
2479
2480/* Implementation of region_model::get_rvalue; the latter adds type-checking.
2481
2482 Get the value of PV within this region_model,
2483 emitting any diagnostics to CTXT. */
2484
808f4dfe 2485const svalue *
53cb324c 2486region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2487{
2488 gcc_assert (pv.m_tree);
2489
2490 switch (TREE_CODE (pv.m_tree))
2491 {
2492 default:
2242b975 2493 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
757bf1df
DM
2494
2495 case ADDR_EXPR:
2496 {
2497 /* "&EXPR". */
2498 tree expr = pv.m_tree;
2499 tree op0 = TREE_OPERAND (expr, 0);
808f4dfe
DM
2500 const region *expr_reg = get_lvalue (op0, ctxt);
2501 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
757bf1df
DM
2502 }
2503 break;
2504
808f4dfe 2505 case BIT_FIELD_REF:
d3b1ef7a
DM
2506 {
2507 tree expr = pv.m_tree;
2508 tree op0 = TREE_OPERAND (expr, 0);
2509 const region *reg = get_lvalue (op0, ctxt);
2510 tree num_bits = TREE_OPERAND (expr, 1);
2511 tree first_bit_offset = TREE_OPERAND (expr, 2);
2512 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2513 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2514 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2515 TREE_INT_CST_LOW (num_bits));
9faf8348 2516 return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
d3b1ef7a 2517 }
808f4dfe 2518
808f4dfe 2519 case VAR_DECL:
20bd258d
DM
2520 if (DECL_HARD_REGISTER (pv.m_tree))
2521 {
2522 /* If it has a hard register, it doesn't have a memory region
2523 and can't be referred to as an lvalue. */
2524 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
2525 }
2526 /* Fall through. */
808f4dfe 2527 case PARM_DECL:
20bd258d 2528 case SSA_NAME:
808f4dfe 2529 case RESULT_DECL:
757bf1df
DM
2530 case ARRAY_REF:
2531 {
da7c2773 2532 const region *reg = get_lvalue (pv, ctxt);
9faf8348 2533 return get_store_value (reg, ctxt);
757bf1df
DM
2534 }
2535
808f4dfe
DM
2536 case REALPART_EXPR:
2537 case IMAGPART_EXPR:
2538 case VIEW_CONVERT_EXPR:
2539 {
2540 tree expr = pv.m_tree;
2541 tree arg = TREE_OPERAND (expr, 0);
2542 const svalue *arg_sval = get_rvalue (arg, ctxt);
2543 const svalue *sval_unaryop
2544 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
2545 arg_sval);
2546 return sval_unaryop;
2547 };
2548
757bf1df
DM
2549 case INTEGER_CST:
2550 case REAL_CST:
808f4dfe
DM
2551 case COMPLEX_CST:
2552 case VECTOR_CST:
757bf1df 2553 case STRING_CST:
808f4dfe
DM
2554 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
2555
2556 case POINTER_PLUS_EXPR:
2557 {
2558 tree expr = pv.m_tree;
2559 tree ptr = TREE_OPERAND (expr, 0);
2560 tree offset = TREE_OPERAND (expr, 1);
2561 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2562 const svalue *offset_sval = get_rvalue (offset, ctxt);
2563 const svalue *sval_binop
2564 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
2565 ptr_sval, offset_sval);
2566 return sval_binop;
2567 }
2568
2569 /* Binary ops. */
2570 case PLUS_EXPR:
2571 case MULT_EXPR:
4d3b7be2
DM
2572 case BIT_AND_EXPR:
2573 case BIT_IOR_EXPR:
2574 case BIT_XOR_EXPR:
808f4dfe
DM
2575 {
2576 tree expr = pv.m_tree;
2577 tree arg0 = TREE_OPERAND (expr, 0);
2578 tree arg1 = TREE_OPERAND (expr, 1);
2579 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
2580 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
2581 const svalue *sval_binop
2582 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
2583 arg0_sval, arg1_sval);
2584 return sval_binop;
2585 }
757bf1df
DM
2586
2587 case COMPONENT_REF:
2588 case MEM_REF:
757bf1df 2589 {
808f4dfe 2590 const region *ref_reg = get_lvalue (pv, ctxt);
9faf8348 2591 return get_store_value (ref_reg, ctxt);
757bf1df 2592 }
1b342485
AS
2593 case OBJ_TYPE_REF:
2594 {
2595 tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
2596 return get_rvalue (expr, ctxt);
2597 }
757bf1df
DM
2598 }
2599}
2600
2601/* Get the value of PV within this region_model,
2602 emitting any diagnostics to CTXT. */
2603
808f4dfe 2604const svalue *
53cb324c 2605region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2606{
2607 if (pv.m_tree == NULL_TREE)
808f4dfe 2608 return NULL;
757bf1df 2609
808f4dfe 2610 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
757bf1df 2611
808f4dfe
DM
2612 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
2613
2fdc8546 2614 result_sval = check_for_poison (result_sval, pv.m_tree, NULL, ctxt);
33255ad3 2615
808f4dfe 2616 return result_sval;
757bf1df
DM
2617}
2618
2619/* Get the value of EXPR within this region_model (assuming the most
2620 recent stack frame if it's a local). */
2621
808f4dfe 2622const svalue *
53cb324c 2623region_model::get_rvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
2624{
2625 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2626}
2627
623bc027
DM
2628/* Return true if this model is on a path with "main" as the entrypoint
2629 (as opposed to one in which we're merely analyzing a subset of the
2630 path through the code). */
2631
2632bool
2633region_model::called_from_main_p () const
2634{
2635 if (!m_current_frame)
2636 return false;
2637 /* Determine if the oldest stack frame in this model is for "main". */
2638 const frame_region *frame0 = get_frame_at_index (0);
2639 gcc_assert (frame0);
c0d8a64e 2640 return id_equal (DECL_NAME (frame0->get_function ().decl), "main");
623bc027
DM
2641}
2642
2643/* Subroutine of region_model::get_store_value for when REG is (or is within)
2644 a global variable that hasn't been touched since the start of this path
2645 (or was implicitly touched due to a call to an unknown function). */
2646
2647const svalue *
2648region_model::get_initial_value_for_global (const region *reg) const
2649{
2650 /* Get the decl that REG is for (or is within). */
2651 const decl_region *base_reg
2652 = reg->get_base_region ()->dyn_cast_decl_region ();
2653 gcc_assert (base_reg);
2654 tree decl = base_reg->get_decl ();
2655
2656 /* Special-case: to avoid having to explicitly update all previously
2657 untracked globals when calling an unknown fn, they implicitly have
2658 an unknown value if an unknown call has occurred, unless this is
2659 static to-this-TU and hasn't escaped. Globals that have escaped
2660 are explicitly tracked, so we shouldn't hit this case for them. */
af66094d
DM
2661 if (m_store.called_unknown_fn_p ()
2662 && TREE_PUBLIC (decl)
2663 && !TREE_READONLY (decl))
623bc027
DM
2664 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2665
2666 /* If we are on a path from the entrypoint from "main" and we have a
2667 global decl defined in this TU that hasn't been touched yet, then
2668 the initial value of REG can be taken from the initialization value
2669 of the decl. */
16ad9ae8 2670 if (called_from_main_p () || TREE_READONLY (decl))
fe9771b5 2671 return reg->get_initial_value_at_main (m_mgr);
623bc027
DM
2672
2673 /* Otherwise, return INIT_VAL(REG). */
2674 return m_mgr->get_or_create_initial_value (reg);
2675}
2676
808f4dfe 2677/* Get a value for REG, looking it up in the store, or otherwise falling
9faf8348
DM
2678 back to "initial" or "unknown" values.
2679 Use CTXT to report any warnings associated with reading from REG. */
757bf1df 2680
808f4dfe 2681const svalue *
9faf8348
DM
2682region_model::get_store_value (const region *reg,
2683 region_model_context *ctxt) const
757bf1df 2684{
dfe2ef7f
DM
2685 /* Getting the value of an empty region gives an unknown_svalue. */
2686 if (reg->empty_p ())
2687 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
2688
1eb90f46 2689 bool check_poisoned = true;
9589a46d 2690 if (check_region_for_read (reg, ctxt))
1eb90f46 2691 check_poisoned = false;
9faf8348 2692
2867118d
DM
2693 /* Special-case: handle var_decls in the constant pool. */
2694 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
2695 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
2696 return sval;
2697
808f4dfe
DM
2698 const svalue *sval
2699 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
2700 if (sval)
757bf1df 2701 {
808f4dfe
DM
2702 if (reg->get_type ())
2703 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
2704 return sval;
757bf1df 2705 }
757bf1df 2706
808f4dfe
DM
2707 /* Special-case: read at a constant index within a STRING_CST. */
2708 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
2709 if (tree byte_offset_cst
2710 = offset_reg->get_byte_offset ()->maybe_get_constant ())
2711 if (const string_region *str_reg
2712 = reg->get_parent_region ()->dyn_cast_string_region ())
757bf1df 2713 {
808f4dfe
DM
2714 tree string_cst = str_reg->get_string_cst ();
2715 if (const svalue *char_sval
2716 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2717 byte_offset_cst))
2718 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
757bf1df 2719 }
757bf1df 2720
808f4dfe
DM
2721 /* Special-case: read the initial char of a STRING_CST. */
2722 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
2723 if (const string_region *str_reg
2724 = cast_reg->get_original_region ()->dyn_cast_string_region ())
2725 {
2726 tree string_cst = str_reg->get_string_cst ();
59067ddf 2727 tree byte_offset_cst = integer_zero_node;
808f4dfe
DM
2728 if (const svalue *char_sval
2729 = m_mgr->maybe_get_char_from_string_cst (string_cst,
2730 byte_offset_cst))
2731 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
2732 }
757bf1df 2733
808f4dfe
DM
2734 /* Otherwise we implicitly have the initial value of the region
2735 (if the cluster had been touched, binding_cluster::get_any_binding,
2736 would have returned UNKNOWN, and we would already have returned
2737 that above). */
757bf1df 2738
623bc027
DM
2739 /* Handle globals. */
2740 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
2741 == RK_GLOBALS)
2742 return get_initial_value_for_global (reg);
757bf1df 2743
1eb90f46 2744 return m_mgr->get_or_create_initial_value (reg, check_poisoned);
757bf1df
DM
2745}
2746
808f4dfe
DM
2747/* Return false if REG does not exist, true if it may do.
2748 This is for detecting regions within the stack that don't exist anymore
2749 after frames are popped. */
757bf1df 2750
808f4dfe
DM
2751bool
2752region_model::region_exists_p (const region *reg) const
757bf1df 2753{
808f4dfe
DM
2754 /* If within a stack frame, check that the stack frame is live. */
2755 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
757bf1df 2756 {
808f4dfe
DM
2757 /* Check that the current frame is the enclosing frame, or is called
2758 by it. */
2759 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
2760 iter_frame = iter_frame->get_calling_frame ())
2761 if (iter_frame == enclosing_frame)
2762 return true;
2763 return false;
757bf1df 2764 }
808f4dfe
DM
2765
2766 return true;
757bf1df
DM
2767}
2768
808f4dfe
DM
2769/* Get a region for referencing PTR_SVAL, creating a region if need be, and
2770 potentially generating warnings via CTXT.
35e3f082 2771 PTR_SVAL must be of pointer type.
808f4dfe 2772 PTR_TREE if non-NULL can be used when emitting diagnostics. */
757bf1df 2773
808f4dfe
DM
2774const region *
2775region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
021077b9
DM
2776 region_model_context *ctxt,
2777 bool add_nonnull_constraint) const
757bf1df 2778{
808f4dfe 2779 gcc_assert (ptr_sval);
35e3f082 2780 gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
757bf1df 2781
49bfbf18
DM
2782 /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
2783 as a constraint. This suppresses false positives from
2784 -Wanalyzer-null-dereference for the case where we later have an
2785 if (PTR_SVAL) that would occur if we considered the false branch
2786 and transitioned the malloc state machine from start->null. */
021077b9
DM
2787 if (add_nonnull_constraint)
2788 {
2789 tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
2790 const svalue *null_ptr
2791 = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
2792 m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
2793 }
49bfbf18 2794
808f4dfe 2795 switch (ptr_sval->get_kind ())
757bf1df 2796 {
808f4dfe 2797 default:
23ebfda0 2798 break;
808f4dfe 2799
757bf1df
DM
2800 case SK_REGION:
2801 {
808f4dfe
DM
2802 const region_svalue *region_sval
2803 = as_a <const region_svalue *> (ptr_sval);
757bf1df
DM
2804 return region_sval->get_pointee ();
2805 }
2806
808f4dfe
DM
2807 case SK_BINOP:
2808 {
2809 const binop_svalue *binop_sval
2810 = as_a <const binop_svalue *> (ptr_sval);
2811 switch (binop_sval->get_op ())
2812 {
2813 case POINTER_PLUS_EXPR:
2814 {
2815 /* If we have a symbolic value expressing pointer arithmentic,
2816 try to convert it to a suitable region. */
2817 const region *parent_region
2818 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
2819 const svalue *offset = binop_sval->get_arg1 ();
2820 tree type= TREE_TYPE (ptr_sval->get_type ());
2821 return m_mgr->get_offset_region (parent_region, type, offset);
2822 }
2823 default:
23ebfda0 2824 break;
808f4dfe
DM
2825 }
2826 }
23ebfda0 2827 break;
757bf1df
DM
2828
2829 case SK_POISONED:
2830 {
2831 if (ctxt)
808f4dfe
DM
2832 {
2833 tree ptr = get_representative_tree (ptr_sval);
2834 /* If we can't get a representative tree for PTR_SVAL
2835 (e.g. if it hasn't been bound into the store), then
2836 fall back on PTR_TREE, if non-NULL. */
2837 if (!ptr)
2838 ptr = ptr_tree;
2839 if (ptr)
2840 {
2841 const poisoned_svalue *poisoned_sval
2842 = as_a <const poisoned_svalue *> (ptr_sval);
2843 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
0e466e97
DM
2844 ctxt->warn (::make_unique<poisoned_value_diagnostic>
2845 (ptr, pkind, nullptr, nullptr));
808f4dfe
DM
2846 }
2847 }
757bf1df 2848 }
23ebfda0 2849 break;
757bf1df
DM
2850 }
2851
23ebfda0 2852 return m_mgr->get_symbolic_region (ptr_sval);
757bf1df
DM
2853}
2854
d3b1ef7a
DM
2855/* Attempt to get BITS within any value of REG, as TYPE.
2856 In particular, extract values from compound_svalues for the case
2857 where there's a concrete binding at BITS.
9faf8348
DM
2858 Return an unknown svalue if we can't handle the given case.
2859 Use CTXT to report any warnings associated with reading from REG. */
d3b1ef7a
DM
2860
2861const svalue *
2862region_model::get_rvalue_for_bits (tree type,
2863 const region *reg,
9faf8348
DM
2864 const bit_range &bits,
2865 region_model_context *ctxt) const
d3b1ef7a 2866{
9faf8348 2867 const svalue *sval = get_store_value (reg, ctxt);
e61ffa20 2868 return m_mgr->get_or_create_bits_within (type, bits, sval);
d3b1ef7a
DM
2869}
2870
3175d40f
DM
2871/* A subclass of pending_diagnostic for complaining about writes to
2872 constant regions of memory. */
2873
2874class write_to_const_diagnostic
2875: public pending_diagnostic_subclass<write_to_const_diagnostic>
2876{
2877public:
2878 write_to_const_diagnostic (const region *reg, tree decl)
2879 : m_reg (reg), m_decl (decl)
2880 {}
2881
ff171cb1 2882 const char *get_kind () const final override
3175d40f
DM
2883 {
2884 return "write_to_const_diagnostic";
2885 }
2886
2887 bool operator== (const write_to_const_diagnostic &other) const
2888 {
2889 return (m_reg == other.m_reg
2890 && m_decl == other.m_decl);
2891 }
2892
ff171cb1 2893 int get_controlling_option () const final override
7fd6e36e
DM
2894 {
2895 return OPT_Wanalyzer_write_to_const;
2896 }
2897
12b67d1e 2898 bool emit (diagnostic_emission_context &ctxt) final override
3175d40f 2899 {
111fd515
DM
2900 auto_diagnostic_group d;
2901 bool warned;
2902 switch (m_reg->get_kind ())
2903 {
2904 default:
12b67d1e 2905 warned = ctxt.warn ("write to %<const%> object %qE", m_decl);
111fd515
DM
2906 break;
2907 case RK_FUNCTION:
12b67d1e 2908 warned = ctxt.warn ("write to function %qE", m_decl);
111fd515
DM
2909 break;
2910 case RK_LABEL:
12b67d1e 2911 warned = ctxt.warn ("write to label %qE", m_decl);
111fd515
DM
2912 break;
2913 }
3175d40f
DM
2914 if (warned)
2915 inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
2916 return warned;
2917 }
2918
ff171cb1 2919 label_text describe_final_event (const evdesc::final_event &ev) final override
3175d40f 2920 {
111fd515
DM
2921 switch (m_reg->get_kind ())
2922 {
2923 default:
2924 return ev.formatted_print ("write to %<const%> object %qE here", m_decl);
2925 case RK_FUNCTION:
2926 return ev.formatted_print ("write to function %qE here", m_decl);
2927 case RK_LABEL:
2928 return ev.formatted_print ("write to label %qE here", m_decl);
2929 }
3175d40f
DM
2930 }
2931
2932private:
2933 const region *m_reg;
2934 tree m_decl;
2935};
2936
2937/* A subclass of pending_diagnostic for complaining about writes to
2938 string literals. */
2939
2940class write_to_string_literal_diagnostic
2941: public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
2942{
2943public:
2944 write_to_string_literal_diagnostic (const region *reg)
2945 : m_reg (reg)
2946 {}
2947
ff171cb1 2948 const char *get_kind () const final override
3175d40f
DM
2949 {
2950 return "write_to_string_literal_diagnostic";
2951 }
2952
2953 bool operator== (const write_to_string_literal_diagnostic &other) const
2954 {
2955 return m_reg == other.m_reg;
2956 }
2957
ff171cb1 2958 int get_controlling_option () const final override
7fd6e36e
DM
2959 {
2960 return OPT_Wanalyzer_write_to_string_literal;
2961 }
2962
12b67d1e 2963 bool emit (diagnostic_emission_context &ctxt) final override
3175d40f 2964 {
12b67d1e 2965 return ctxt.warn ("write to string literal");
3175d40f
DM
2966 /* Ideally we would show the location of the STRING_CST as well,
2967 but it is not available at this point. */
2968 }
2969
ff171cb1 2970 label_text describe_final_event (const evdesc::final_event &ev) final override
3175d40f
DM
2971 {
2972 return ev.formatted_print ("write to string literal here");
2973 }
2974
2975private:
2976 const region *m_reg;
2977};
2978
2979/* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
2980
2981void
2982region_model::check_for_writable_region (const region* dest_reg,
2983 region_model_context *ctxt) const
2984{
2985 /* Fail gracefully if CTXT is NULL. */
2986 if (!ctxt)
2987 return;
2988
2989 const region *base_reg = dest_reg->get_base_region ();
2990 switch (base_reg->get_kind ())
2991 {
2992 default:
2993 break;
111fd515
DM
2994 case RK_FUNCTION:
2995 {
2996 const function_region *func_reg = as_a <const function_region *> (base_reg);
2997 tree fndecl = func_reg->get_fndecl ();
6341f14e
DM
2998 ctxt->warn (make_unique<write_to_const_diagnostic>
2999 (func_reg, fndecl));
111fd515
DM
3000 }
3001 break;
3002 case RK_LABEL:
3003 {
3004 const label_region *label_reg = as_a <const label_region *> (base_reg);
3005 tree label = label_reg->get_label ();
6341f14e
DM
3006 ctxt->warn (make_unique<write_to_const_diagnostic>
3007 (label_reg, label));
111fd515
DM
3008 }
3009 break;
3175d40f
DM
3010 case RK_DECL:
3011 {
3012 const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
3013 tree decl = decl_reg->get_decl ();
3014 /* Warn about writes to const globals.
3015 Don't warn for writes to const locals, and params in particular,
3016 since we would warn in push_frame when setting them up (e.g the
3017 "this" param is "T* const"). */
3018 if (TREE_READONLY (decl)
3019 && is_global_var (decl))
6341f14e 3020 ctxt->warn (make_unique<write_to_const_diagnostic> (dest_reg, decl));
3175d40f
DM
3021 }
3022 break;
3023 case RK_STRING:
6341f14e 3024 ctxt->warn (make_unique<write_to_string_literal_diagnostic> (dest_reg));
3175d40f
DM
3025 break;
3026 }
3027}
3028
9a2c9579
DM
3029/* Get the capacity of REG in bytes. */
3030
3031const svalue *
3032region_model::get_capacity (const region *reg) const
3033{
3034 switch (reg->get_kind ())
3035 {
3036 default:
3037 break;
3038 case RK_DECL:
3039 {
3040 const decl_region *decl_reg = as_a <const decl_region *> (reg);
3041 tree decl = decl_reg->get_decl ();
3042 if (TREE_CODE (decl) == SSA_NAME)
3043 {
3044 tree type = TREE_TYPE (decl);
3045 tree size = TYPE_SIZE (type);
3046 return get_rvalue (size, NULL);
3047 }
3048 else
3049 {
3050 tree size = decl_init_size (decl, false);
3051 if (size)
3052 return get_rvalue (size, NULL);
3053 }
3054 }
3055 break;
e61ffa20
DM
3056 case RK_SIZED:
3057 /* Look through sized regions to get at the capacity
3058 of the underlying regions. */
3059 return get_capacity (reg->get_parent_region ());
0e466e97
DM
3060 case RK_STRING:
3061 {
3062 /* "Capacity" here means "size". */
3063 const string_region *string_reg = as_a <const string_region *> (reg);
3064 tree string_cst = string_reg->get_string_cst ();
3065 return m_mgr->get_or_create_int_cst (size_type_node,
3066 TREE_STRING_LENGTH (string_cst));
3067 }
3068 break;
9a2c9579
DM
3069 }
3070
3071 if (const svalue *recorded = get_dynamic_extents (reg))
3072 return recorded;
3073
3074 return m_mgr->get_or_create_unknown_svalue (sizetype);
3075}
3076
9faf8348 3077/* If CTXT is non-NULL, use it to warn about any problems accessing REG,
9589a46d 3078 using DIR to determine if this access is a read or write.
1eb90f46 3079 Return TRUE if an OOB access was detected.
0e466e97
DM
3080 If SVAL_HINT is non-NULL, use it as a hint in diagnostics
3081 about the value that would be written to REG. */
9faf8348 3082
9589a46d 3083bool
9faf8348
DM
3084region_model::check_region_access (const region *reg,
3085 enum access_direction dir,
0e466e97 3086 const svalue *sval_hint,
9faf8348
DM
3087 region_model_context *ctxt) const
3088{
3089 /* Fail gracefully if CTXT is NULL. */
3090 if (!ctxt)
9589a46d 3091 return false;
9faf8348 3092
1eb90f46 3093 bool oob_access_detected = false;
b9365b93 3094 check_region_for_taint (reg, dir, ctxt);
0e466e97 3095 if (!check_region_bounds (reg, dir, sval_hint, ctxt))
1eb90f46 3096 oob_access_detected = true;
b9365b93 3097
9faf8348
DM
3098 switch (dir)
3099 {
3100 default:
3101 gcc_unreachable ();
3102 case DIR_READ:
3103 /* Currently a no-op. */
3104 break;
3105 case DIR_WRITE:
3106 check_for_writable_region (reg, ctxt);
3107 break;
3108 }
1eb90f46 3109 return oob_access_detected;
9faf8348
DM
3110}
3111
3112/* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
3113
3114void
3115region_model::check_region_for_write (const region *dest_reg,
0e466e97 3116 const svalue *sval_hint,
9faf8348
DM
3117 region_model_context *ctxt) const
3118{
0e466e97 3119 check_region_access (dest_reg, DIR_WRITE, sval_hint, ctxt);
9faf8348
DM
3120}
3121
9589a46d 3122/* If CTXT is non-NULL, use it to warn about any problems reading from REG.
1eb90f46 3123 Returns TRUE if an OOB read was detected. */
9faf8348 3124
9589a46d 3125bool
9faf8348
DM
3126region_model::check_region_for_read (const region *src_reg,
3127 region_model_context *ctxt) const
3128{
0e466e97 3129 return check_region_access (src_reg, DIR_READ, NULL, ctxt);
9faf8348
DM
3130}
3131
e6c3bb37
TL
3132/* Concrete subclass for casts of pointers that lead to trailing bytes. */
3133
3134class dubious_allocation_size
3135: public pending_diagnostic_subclass<dubious_allocation_size>
3136{
3137public:
021077b9 3138 dubious_allocation_size (const region *lhs, const region *rhs,
181f753d 3139 const svalue *capacity_sval, tree expr,
021077b9 3140 const gimple *stmt)
181f753d
DM
3141 : m_lhs (lhs), m_rhs (rhs),
3142 m_capacity_sval (capacity_sval), m_expr (expr),
3143 m_stmt (stmt),
f5758fe5 3144 m_has_allocation_event (false)
181f753d
DM
3145 {
3146 gcc_assert (m_capacity_sval);
3147 }
e6c3bb37
TL
3148
3149 const char *get_kind () const final override
3150 {
3151 return "dubious_allocation_size";
3152 }
3153
3154 bool operator== (const dubious_allocation_size &other) const
3155 {
021077b9
DM
3156 return (m_stmt == other.m_stmt
3157 && pending_diagnostic::same_tree_p (m_expr, other.m_expr));
e6c3bb37
TL
3158 }
3159
3160 int get_controlling_option () const final override
3161 {
3162 return OPT_Wanalyzer_allocation_size;
3163 }
3164
12b67d1e 3165 bool emit (diagnostic_emission_context &ctxt) final override
e6c3bb37 3166 {
12b67d1e 3167 ctxt.add_cwe (131);
e6c3bb37 3168
12b67d1e
DM
3169 return ctxt.warn ("allocated buffer size is not a multiple"
3170 " of the pointee's size");
e6c3bb37
TL
3171 }
3172
e6c3bb37
TL
3173 label_text describe_final_event (const evdesc::final_event &ev) final
3174 override
3175 {
3176 tree pointee_type = TREE_TYPE (m_lhs->get_type ());
f5758fe5 3177 if (m_has_allocation_event)
e6c3bb37
TL
3178 return ev.formatted_print ("assigned to %qT here;"
3179 " %<sizeof (%T)%> is %qE",
3180 m_lhs->get_type (), pointee_type,
3181 size_in_bytes (pointee_type));
f5758fe5
DM
3182 /* Fallback: Typically, we should always see an allocation_event
3183 before. */
e6c3bb37
TL
3184 if (m_expr)
3185 {
3186 if (TREE_CODE (m_expr) == INTEGER_CST)
3187 return ev.formatted_print ("allocated %E bytes and assigned to"
3188 " %qT here; %<sizeof (%T)%> is %qE",
3189 m_expr, m_lhs->get_type (), pointee_type,
3190 size_in_bytes (pointee_type));
3191 else
3192 return ev.formatted_print ("allocated %qE bytes and assigned to"
3193 " %qT here; %<sizeof (%T)%> is %qE",
3194 m_expr, m_lhs->get_type (), pointee_type,
3195 size_in_bytes (pointee_type));
3196 }
3197
3198 return ev.formatted_print ("allocated and assigned to %qT here;"
3199 " %<sizeof (%T)%> is %qE",
3200 m_lhs->get_type (), pointee_type,
3201 size_in_bytes (pointee_type));
3202 }
3203
f5758fe5
DM
3204 void
3205 add_region_creation_events (const region *,
3206 tree capacity,
e24fe128 3207 const event_loc_info &loc_info,
f5758fe5
DM
3208 checker_path &emission_path) final override
3209 {
3210 emission_path.add_event
e24fe128 3211 (make_unique<region_creation_event_allocation_size> (capacity, loc_info));
f5758fe5
DM
3212
3213 m_has_allocation_event = true;
3214 }
3215
e6c3bb37
TL
3216 void mark_interesting_stuff (interesting_t *interest) final override
3217 {
3218 interest->add_region_creation (m_rhs);
3219 }
3220
181f753d
DM
3221 void maybe_add_sarif_properties (sarif_object &result_obj)
3222 const final override
3223 {
3224 sarif_property_bag &props = result_obj.get_or_create_properties ();
3225#define PROPERTY_PREFIX "gcc/analyzer/dubious_allocation_size/"
3226 props.set (PROPERTY_PREFIX "lhs", m_lhs->to_json ());
3227 props.set (PROPERTY_PREFIX "rhs", m_rhs->to_json ());
3228 props.set (PROPERTY_PREFIX "capacity_sval", m_capacity_sval->to_json ());
3229#undef PROPERTY_PREFIX
3230 }
3231
e6c3bb37
TL
3232private:
3233 const region *m_lhs;
3234 const region *m_rhs;
181f753d 3235 const svalue *m_capacity_sval;
e6c3bb37 3236 const tree m_expr;
021077b9 3237 const gimple *m_stmt;
f5758fe5 3238 bool m_has_allocation_event;
e6c3bb37
TL
3239};
3240
3241/* Return true on dubious allocation sizes for constant sizes. */
3242
3243static bool
3244capacity_compatible_with_type (tree cst, tree pointee_size_tree,
3245 bool is_struct)
3246{
3247 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
3248 gcc_assert (TREE_CODE (pointee_size_tree) == INTEGER_CST);
3249
3250 unsigned HOST_WIDE_INT pointee_size = TREE_INT_CST_LOW (pointee_size_tree);
3251 unsigned HOST_WIDE_INT alloc_size = TREE_INT_CST_LOW (cst);
3252
3253 if (is_struct)
b4cc945c 3254 return alloc_size == 0 || alloc_size >= pointee_size;
e6c3bb37
TL
3255 return alloc_size % pointee_size == 0;
3256}
3257
3258static bool
3259capacity_compatible_with_type (tree cst, tree pointee_size_tree)
3260{
3261 return capacity_compatible_with_type (cst, pointee_size_tree, false);
3262}
3263
3264/* Checks whether SVAL could be a multiple of SIZE_CST.
3265
3266 It works by visiting all svalues inside SVAL until it reaches
3267 atomic nodes. From those, it goes back up again and adds each
1d57a223 3268 node that is not a multiple of SIZE_CST to the RESULT_SET. */
e6c3bb37
TL
3269
3270class size_visitor : public visitor
3271{
3272public:
c83e9731
TL
3273 size_visitor (tree size_cst, const svalue *root_sval, constraint_manager *cm)
3274 : m_size_cst (size_cst), m_root_sval (root_sval), m_cm (cm)
e6c3bb37 3275 {
c83e9731 3276 m_root_sval->accept (this);
e6c3bb37
TL
3277 }
3278
1d57a223 3279 bool is_dubious_capacity ()
e6c3bb37 3280 {
c83e9731 3281 return result_set.contains (m_root_sval);
e6c3bb37
TL
3282 }
3283
3284 void visit_constant_svalue (const constant_svalue *sval) final override
3285 {
c83e9731 3286 check_constant (sval->get_constant (), sval);
e6c3bb37
TL
3287 }
3288
bdd385b2 3289 void visit_unaryop_svalue (const unaryop_svalue *sval) final override
e6c3bb37 3290 {
1d57a223
TL
3291 if (CONVERT_EXPR_CODE_P (sval->get_op ())
3292 && result_set.contains (sval->get_arg ()))
e6c3bb37
TL
3293 result_set.add (sval);
3294 }
3295
3296 void visit_binop_svalue (const binop_svalue *sval) final override
3297 {
3298 const svalue *arg0 = sval->get_arg0 ();
3299 const svalue *arg1 = sval->get_arg1 ();
3300
1d57a223 3301 switch (sval->get_op ())
e6c3bb37 3302 {
1d57a223
TL
3303 case MULT_EXPR:
3304 if (result_set.contains (arg0) && result_set.contains (arg1))
3305 result_set.add (sval);
3306 break;
3307 case PLUS_EXPR:
3308 case MINUS_EXPR:
3309 if (result_set.contains (arg0) || result_set.contains (arg1))
3310 result_set.add (sval);
3311 break;
3312 default:
3313 break;
e6c3bb37
TL
3314 }
3315 }
3316
e6c3bb37
TL
3317 void visit_unmergeable_svalue (const unmergeable_svalue *sval) final override
3318 {
e6c3bb37
TL
3319 if (result_set.contains (sval->get_arg ()))
3320 result_set.add (sval);
3321 }
3322
3323 void visit_widening_svalue (const widening_svalue *sval) final override
3324 {
3325 const svalue *base = sval->get_base_svalue ();
3326 const svalue *iter = sval->get_iter_svalue ();
3327
1d57a223 3328 if (result_set.contains (base) || result_set.contains (iter))
e6c3bb37
TL
3329 result_set.add (sval);
3330 }
3331
1d57a223 3332 void visit_initial_svalue (const initial_svalue *sval) final override
e6c3bb37 3333 {
1d57a223 3334 equiv_class_id id = equiv_class_id::null ();
e6c3bb37
TL
3335 if (m_cm->get_equiv_class_by_svalue (sval, &id))
3336 {
c83e9731
TL
3337 if (tree cst = id.get_obj (*m_cm).get_any_constant ())
3338 check_constant (cst, sval);
1d57a223
TL
3339 }
3340 else if (!m_cm->sval_constrained_p (sval))
3341 {
3342 result_set.add (sval);
e6c3bb37
TL
3343 }
3344 }
3345
1d57a223 3346 void visit_conjured_svalue (const conjured_svalue *sval) final override
e6c3bb37 3347 {
1d57a223
TL
3348 equiv_class_id id = equiv_class_id::null ();
3349 if (m_cm->get_equiv_class_by_svalue (sval, &id))
3350 if (tree cst = id.get_obj (*m_cm).get_any_constant ())
3351 check_constant (cst, sval);
e6c3bb37
TL
3352 }
3353
3354private:
c83e9731
TL
3355 void check_constant (tree cst, const svalue *sval)
3356 {
3357 switch (TREE_CODE (cst))
3358 {
3359 default:
3360 /* Assume all unhandled operands are compatible. */
c83e9731
TL
3361 break;
3362 case INTEGER_CST:
1d57a223 3363 if (!capacity_compatible_with_type (cst, m_size_cst))
c83e9731
TL
3364 result_set.add (sval);
3365 break;
3366 }
3367 }
3368
e6c3bb37 3369 tree m_size_cst;
c83e9731 3370 const svalue *m_root_sval;
e6c3bb37
TL
3371 constraint_manager *m_cm;
3372 svalue_set result_set; /* Used as a mapping of svalue*->bool. */
3373};
3374
9f382376
DM
3375/* Return true if SIZE_CST is a power of 2, and we have
3376 CAPACITY_SVAL == ((X | (Y - 1) ) + 1), since it is then a multiple
3377 of SIZE_CST, as used by Linux kernel's round_up macro. */
3378
3379static bool
3380is_round_up (tree size_cst,
3381 const svalue *capacity_sval)
3382{
3383 if (!integer_pow2p (size_cst))
3384 return false;
3385 const binop_svalue *binop_sval = capacity_sval->dyn_cast_binop_svalue ();
3386 if (!binop_sval)
3387 return false;
3388 if (binop_sval->get_op () != PLUS_EXPR)
3389 return false;
3390 tree rhs_cst = binop_sval->get_arg1 ()->maybe_get_constant ();
3391 if (!rhs_cst)
3392 return false;
3393 if (!integer_onep (rhs_cst))
3394 return false;
3395
3396 /* We have CAPACITY_SVAL == (LHS + 1) for some LHS expression. */
3397
3398 const binop_svalue *lhs_binop_sval
3399 = binop_sval->get_arg0 ()->dyn_cast_binop_svalue ();
3400 if (!lhs_binop_sval)
3401 return false;
3402 if (lhs_binop_sval->get_op () != BIT_IOR_EXPR)
3403 return false;
3404
3405 tree inner_rhs_cst = lhs_binop_sval->get_arg1 ()->maybe_get_constant ();
3406 if (!inner_rhs_cst)
3407 return false;
3408
3409 if (wi::to_widest (inner_rhs_cst) + 1 != wi::to_widest (size_cst))
3410 return false;
3411 return true;
3412}
3413
3414/* Return true if CAPACITY_SVAL is known to be a multiple of SIZE_CST. */
3415
3416static bool
3417is_multiple_p (tree size_cst,
3418 const svalue *capacity_sval)
3419{
3420 if (const svalue *sval = capacity_sval->maybe_undo_cast ())
3421 return is_multiple_p (size_cst, sval);
3422
3423 if (is_round_up (size_cst, capacity_sval))
3424 return true;
3425
3426 return false;
3427}
3428
3429/* Return true if we should emit a dubious_allocation_size warning
3430 on assigning a region of capacity CAPACITY_SVAL bytes to a pointer
3431 of type with size SIZE_CST, where CM expresses known constraints. */
3432
3433static bool
3434is_dubious_capacity (tree size_cst,
3435 const svalue *capacity_sval,
3436 constraint_manager *cm)
3437{
3438 if (is_multiple_p (size_cst, capacity_sval))
3439 return false;
3440 size_visitor v (size_cst, capacity_sval, cm);
3441 return v.is_dubious_capacity ();
3442}
3443
3444
e6c3bb37
TL
3445/* Return true if a struct or union either uses the inheritance pattern,
3446 where the first field is a base struct, or the flexible array member
3447 pattern, where the last field is an array without a specified size. */
3448
3449static bool
3450struct_or_union_with_inheritance_p (tree struc)
3451{
3452 tree iter = TYPE_FIELDS (struc);
3453 if (iter == NULL_TREE)
3454 return false;
3455 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (iter)))
3456 return true;
3457
3458 tree last_field;
3459 while (iter != NULL_TREE)
3460 {
3461 last_field = iter;
3462 iter = DECL_CHAIN (iter);
3463 }
3464
3465 if (last_field != NULL_TREE
3466 && TREE_CODE (TREE_TYPE (last_field)) == ARRAY_TYPE)
3467 return true;
3468
3469 return false;
3470}
3471
3472/* Return true if the lhs and rhs of an assignment have different types. */
3473
3474static bool
3475is_any_cast_p (const gimple *stmt)
3476{
c83e9731 3477 if (const gassign *assign = dyn_cast <const gassign *> (stmt))
e6c3bb37
TL
3478 return gimple_assign_cast_p (assign)
3479 || !pending_diagnostic::same_tree_p (
3480 TREE_TYPE (gimple_assign_lhs (assign)),
3481 TREE_TYPE (gimple_assign_rhs1 (assign)));
c83e9731 3482 else if (const gcall *call = dyn_cast <const gcall *> (stmt))
e6c3bb37
TL
3483 {
3484 tree lhs = gimple_call_lhs (call);
3485 return lhs != NULL_TREE && !pending_diagnostic::same_tree_p (
3486 TREE_TYPE (gimple_call_lhs (call)),
3487 gimple_call_return_type (call));
3488 }
3489
3490 return false;
3491}
3492
3493/* On pointer assignments, check whether the buffer size of
3494 RHS_SVAL is compatible with the type of the LHS_REG.
3495 Use a non-null CTXT to report allocation size warnings. */
3496
3497void
3498region_model::check_region_size (const region *lhs_reg, const svalue *rhs_sval,
3499 region_model_context *ctxt) const
3500{
3501 if (!ctxt || ctxt->get_stmt () == NULL)
3502 return;
3503 /* Only report warnings on assignments that actually change the type. */
3504 if (!is_any_cast_p (ctxt->get_stmt ()))
3505 return;
3506
e6c3bb37
TL
3507 tree pointer_type = lhs_reg->get_type ();
3508 if (pointer_type == NULL_TREE || !POINTER_TYPE_P (pointer_type))
3509 return;
3510
3511 tree pointee_type = TREE_TYPE (pointer_type);
3512 /* Make sure that the type on the left-hand size actually has a size. */
3513 if (pointee_type == NULL_TREE || VOID_TYPE_P (pointee_type)
3514 || TYPE_SIZE_UNIT (pointee_type) == NULL_TREE)
3515 return;
3516
3517 /* Bail out early on pointers to structs where we can
3518 not deduce whether the buffer size is compatible. */
3519 bool is_struct = RECORD_OR_UNION_TYPE_P (pointee_type);
3520 if (is_struct && struct_or_union_with_inheritance_p (pointee_type))
3521 return;
3522
3523 tree pointee_size_tree = size_in_bytes (pointee_type);
3524 /* We give up if the type size is not known at compile-time or the
3525 type size is always compatible regardless of the buffer size. */
3526 if (TREE_CODE (pointee_size_tree) != INTEGER_CST
3527 || integer_zerop (pointee_size_tree)
3528 || integer_onep (pointee_size_tree))
3529 return;
3530
021077b9 3531 const region *rhs_reg = deref_rvalue (rhs_sval, NULL_TREE, ctxt, false);
e6c3bb37
TL
3532 const svalue *capacity = get_capacity (rhs_reg);
3533 switch (capacity->get_kind ())
3534 {
3535 case svalue_kind::SK_CONSTANT:
3536 {
3537 const constant_svalue *cst_cap_sval
c83e9731 3538 = as_a <const constant_svalue *> (capacity);
e6c3bb37 3539 tree cst_cap = cst_cap_sval->get_constant ();
c83e9731
TL
3540 if (TREE_CODE (cst_cap) == INTEGER_CST
3541 && !capacity_compatible_with_type (cst_cap, pointee_size_tree,
3542 is_struct))
6341f14e 3543 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg, rhs_reg,
181f753d 3544 capacity, cst_cap,
021077b9 3545 ctxt->get_stmt ()));
e6c3bb37
TL
3546 }
3547 break;
3548 default:
3549 {
3550 if (!is_struct)
3551 {
9f382376
DM
3552 if (is_dubious_capacity (pointee_size_tree,
3553 capacity,
3554 m_constraints))
e6c3bb37
TL
3555 {
3556 tree expr = get_representative_tree (capacity);
6341f14e
DM
3557 ctxt->warn (make_unique <dubious_allocation_size> (lhs_reg,
3558 rhs_reg,
181f753d 3559 capacity, expr,
021077b9 3560 ctxt->get_stmt ()));
e6c3bb37
TL
3561 }
3562 }
3563 break;
3564 }
3565 }
3566}
3567
808f4dfe 3568/* Set the value of the region given by LHS_REG to the value given
9faf8348
DM
3569 by RHS_SVAL.
3570 Use CTXT to report any warnings associated with writing to LHS_REG. */
757bf1df 3571
808f4dfe
DM
3572void
3573region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
3175d40f 3574 region_model_context *ctxt)
757bf1df 3575{
808f4dfe
DM
3576 gcc_assert (lhs_reg);
3577 gcc_assert (rhs_sval);
3578
dfe2ef7f
DM
3579 /* Setting the value of an empty region is a no-op. */
3580 if (lhs_reg->empty_p ())
3581 return;
3582
e6c3bb37
TL
3583 check_region_size (lhs_reg, rhs_sval, ctxt);
3584
0e466e97 3585 check_region_for_write (lhs_reg, rhs_sval, ctxt);
3175d40f 3586
808f4dfe 3587 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 3588 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
3589}
3590
808f4dfe 3591/* Set the value of the region given by LHS to the value given by RHS. */
757bf1df
DM
3592
3593void
808f4dfe 3594region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
757bf1df 3595{
808f4dfe
DM
3596 const region *lhs_reg = get_lvalue (lhs, ctxt);
3597 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
3598 gcc_assert (lhs_reg);
3599 gcc_assert (rhs_sval);
3600 set_value (lhs_reg, rhs_sval, ctxt);
757bf1df
DM
3601}
3602
325f9e88
DM
3603/* Issue a note specifying that a particular function parameter is expected
3604 to be a valid null-terminated string. */
3605
3606static void
3607inform_about_expected_null_terminated_string_arg (const call_arg_details &ad)
3608{
3609 // TODO: ideally we'd underline the param here
3610 inform (DECL_SOURCE_LOCATION (ad.m_called_fndecl),
3611 "argument %d of %qD must be a pointer to a null-terminated string",
3612 ad.m_arg_idx + 1, ad.m_called_fndecl);
3613}
3614
fe97f09a 3615/* A binding of a specific svalue at a concrete byte range. */
325f9e88 3616
fe97f09a 3617struct fragment
325f9e88 3618{
fe97f09a
DM
3619 fragment ()
3620 : m_byte_range (0, 0), m_sval (nullptr)
325f9e88 3621 {
325f9e88
DM
3622 }
3623
fe97f09a
DM
3624 fragment (const byte_range &bytes, const svalue *sval)
3625 : m_byte_range (bytes), m_sval (sval)
325f9e88 3626 {
325f9e88
DM
3627 }
3628
fe97f09a 3629 static int cmp_ptrs (const void *p1, const void *p2)
325f9e88 3630 {
fe97f09a
DM
3631 const fragment *f1 = (const fragment *)p1;
3632 const fragment *f2 = (const fragment *)p2;
3633 return byte_range::cmp (f1->m_byte_range, f2->m_byte_range);
325f9e88
DM
3634 }
3635
84096e66
DM
3636 void
3637 dump_to_pp (pretty_printer *pp) const
325f9e88 3638 {
84096e66
DM
3639 pp_string (pp, "fragment(");
3640 m_byte_range.dump_to_pp (pp);
3641 pp_string (pp, ", sval: ");
3642 if (m_sval)
3643 m_sval->dump_to_pp (pp, true);
3644 else
3645 pp_string (pp, "nullptr");
3646 pp_string (pp, ")");
3647 }
3648
3649 byte_range m_byte_range;
3650 const svalue *m_sval;
3651};
3652
3653/* Determine if there is a zero terminator somewhere in the
3654 part of STRING_CST covered by BYTES (where BYTES is relative to the
3655 start of the constant).
3656
3657 Return a tristate:
3658 - true if there definitely is a zero byte, writing to *OUT_BYTES_READ
3659 the number of bytes from that would be read, including the zero byte.
3660 - false if there definitely isn't a zero byte
3661 - unknown if we don't know. */
3662
3663static tristate
3664string_cst_has_null_terminator (tree string_cst,
3665 const byte_range &bytes,
3666 byte_offset_t *out_bytes_read)
3667{
3668 gcc_assert (bytes.m_start_byte_offset >= 0);
0a6a5f86
DM
3669
3670 /* If we're beyond the string_cst, reads are unsuccessful. */
3671 if (tree cst_size = get_string_cst_size (string_cst))
3672 if (TREE_CODE (cst_size) == INTEGER_CST)
3673 if (bytes.m_start_byte_offset >= TREE_INT_CST_LOW (cst_size))
3674 return tristate::unknown ();
3675
3676 /* Assume all bytes after TREE_STRING_LENGTH are zero. This handles
3677 the case where an array is initialized with a string_cst that isn't
3678 as long as the array, where the remaining elements are
3679 empty-initialized and thus zeroed. */
3680 if (bytes.m_start_byte_offset >= TREE_STRING_LENGTH (string_cst))
3681 {
3682 *out_bytes_read = 1;
3683 return tristate (true);
3684 }
84096e66
DM
3685
3686 /* Look for the first 0 byte within STRING_CST
3687 from START_READ_OFFSET onwards. */
3688 const byte_offset_t num_bytes_to_search
3689 = std::min<byte_offset_t> ((TREE_STRING_LENGTH (string_cst)
3690 - bytes.m_start_byte_offset),
3691 bytes.m_size_in_bytes);
3692 const char *start = (TREE_STRING_POINTER (string_cst)
3693 + bytes.m_start_byte_offset.slow ());
3694 if (num_bytes_to_search >= 0)
3695 if (const void *p = memchr (start, 0, bytes.m_size_in_bytes.slow ()))
fe97f09a 3696 {
84096e66
DM
3697 *out_bytes_read = (const char *)p - start + 1;
3698 return tristate (true);
3699 }
fe97f09a 3700
84096e66
DM
3701 *out_bytes_read = bytes.m_size_in_bytes;
3702 return tristate (false);
3703}
d99d73c7 3704
84096e66
DM
3705static tristate
3706svalue_byte_range_has_null_terminator (const svalue *sval,
3707 const byte_range &bytes,
3708 byte_offset_t *out_bytes_read,
3709 logger *logger);
d99d73c7 3710
84096e66
DM
3711/* Determine if there is a zero terminator somewhere in the
3712 part of SVAL covered by BYTES (where BYTES is relative to the svalue).
2bad0eeb 3713
84096e66
DM
3714 Return a tristate:
3715 - true if there definitely is a zero byte, writing to *OUT_BYTES_READ
3716 the number of bytes from that would be read, including the zero byte.
3717 - false if there definitely isn't a zero byte
3718 - unknown if we don't know.
3719
3720 Use LOGGER (if non-null) for any logging. */
3721
3722static tristate
3723svalue_byte_range_has_null_terminator_1 (const svalue *sval,
3724 const byte_range &bytes,
3725 byte_offset_t *out_bytes_read,
3726 logger *logger)
3727{
d5604feb
DM
3728 if (bytes.m_start_byte_offset == 0
3729 && sval->all_zeroes_p ())
3730 {
3731 /* The initial byte of an all-zeroes SVAL is a zero byte. */
3732 *out_bytes_read = 1;
3733 return tristate (true);
3734 }
3735
84096e66
DM
3736 switch (sval->get_kind ())
3737 {
3738 case SK_CONSTANT:
3739 {
3740 tree cst
3741 = as_a <const constant_svalue *> (sval)->get_constant ();
3742 switch (TREE_CODE (cst))
3743 {
3744 case STRING_CST:
3745 return string_cst_has_null_terminator (cst, bytes, out_bytes_read);
3746 case INTEGER_CST:
3747 if (bytes.m_start_byte_offset == 0
3748 && integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (cst))))
3749 {
3750 /* Model accesses to the initial byte of a 1-byte
3751 INTEGER_CST. */
3752 *out_bytes_read = 1;
3753 if (zerop (cst))
3754 return tristate (true);
3755 else
3756 return tristate (false);
3757 }
3758 /* Treat any other access to an INTEGER_CST as unknown. */
3759 return tristate::TS_UNKNOWN;
3760
3761 default:
84096e66
DM
3762 break;
3763 }
fe97f09a 3764 }
84096e66 3765 break;
325f9e88 3766
84096e66
DM
3767 case SK_INITIAL:
3768 {
3769 const initial_svalue *initial_sval = (const initial_svalue *)sval;
3770 const region *reg = initial_sval->get_region ();
3771 if (const string_region *string_reg = reg->dyn_cast_string_region ())
3772 {
3773 tree string_cst = string_reg->get_string_cst ();
3774 return string_cst_has_null_terminator (string_cst,
3775 bytes,
3776 out_bytes_read);
3777 }
3778 return tristate::TS_UNKNOWN;
3779 }
3780 break;
d99d73c7 3781
84096e66
DM
3782 case SK_BITS_WITHIN:
3783 {
3784 const bits_within_svalue *bits_within_sval
3785 = (const bits_within_svalue *)sval;
3786 byte_range bytes_within_inner (0, 0);
3787 if (bits_within_sval->get_bits ().as_byte_range (&bytes_within_inner))
3788 {
3789 /* Consider e.g. looking for null terminator of
3790 bytes 2-4 of BITS_WITHIN(bytes 10-15 of inner_sval)
3791
3792 This is equivalent to looking within bytes 12-14 of
3793 inner_sval. */
3794 const byte_offset_t start_byte_relative_to_inner
3795 = (bytes.m_start_byte_offset
3796 + bytes_within_inner.m_start_byte_offset);
3797 const byte_offset_t next_byte_relative_to_inner
3798 = (bytes.get_next_byte_offset ()
3799 + bytes_within_inner.m_start_byte_offset);
3800 if (next_byte_relative_to_inner > start_byte_relative_to_inner)
3801 {
3802 const byte_range relative_to_inner
3803 (start_byte_relative_to_inner,
3804 next_byte_relative_to_inner - start_byte_relative_to_inner);
3805 const svalue *inner_sval
3806 = bits_within_sval->get_inner_svalue ();
3807 return svalue_byte_range_has_null_terminator (inner_sval,
3808 relative_to_inner,
3809 out_bytes_read,
3810 logger);
3811 }
3812 }
3813 }
3814 break;
d99d73c7 3815
84096e66
DM
3816 default:
3817 // TODO: it may be possible to handle other cases here.
3818 break;
3819 }
3820 return tristate::TS_UNKNOWN;
3821}
3822
3823/* Like svalue_byte_range_has_null_terminator_1, but add logging. */
3824
3825static tristate
3826svalue_byte_range_has_null_terminator (const svalue *sval,
3827 const byte_range &bytes,
3828 byte_offset_t *out_bytes_read,
3829 logger *logger)
3830{
3831 LOG_SCOPE (logger);
3832 if (logger)
3833 {
3834 pretty_printer *pp = logger->get_printer ();
3835 logger->start_log_line ();
3836 bytes.dump_to_pp (pp);
3837 logger->log_partial (" of sval: ");
3838 sval->dump_to_pp (pp, true);
3839 logger->end_log_line ();
3840 }
3841 tristate ts
3842 = svalue_byte_range_has_null_terminator_1 (sval, bytes,
3843 out_bytes_read, logger);
3844 if (logger)
3845 {
3846 pretty_printer *pp = logger->get_printer ();
3847 logger->start_log_line ();
3848 pp_printf (pp, "has null terminator: %s", ts.as_string ());
3849 if (ts.is_true ())
3850 {
3851 pp_string (pp, "; bytes read: ");
3852 pp_wide_int (pp, *out_bytes_read, SIGNED);
3853 }
3854 logger->end_log_line ();
3855 }
3856 return ts;
3857}
fe97f09a
DM
3858
3859/* A frozen copy of a single base region's binding_cluster within a store,
3860 optimized for traversal of the concrete parts in byte order.
3861 This only captures concrete bindings, and is an implementation detail
3862 of region_model::scan_for_null_terminator. */
3863
3864class iterable_cluster
3865{
3866public:
3867 iterable_cluster (const binding_cluster *cluster)
325f9e88 3868 {
fe97f09a
DM
3869 if (!cluster)
3870 return;
3871 for (auto iter : *cluster)
3872 {
3873 const binding_key *key = iter.first;
3874 const svalue *sval = iter.second;
3875
3876 if (const concrete_binding *concrete_key
3877 = key->dyn_cast_concrete_binding ())
3878 {
3879 byte_range fragment_bytes (0, 0);
3880 if (concrete_key->get_byte_range (&fragment_bytes))
3881 m_fragments.safe_push (fragment (fragment_bytes, sval));
3882 }
5ef89c5c
DM
3883 else
3884 m_symbolic_bindings.safe_push (key);
fe97f09a
DM
3885 }
3886 m_fragments.qsort (fragment::cmp_ptrs);
325f9e88
DM
3887 }
3888
fe97f09a
DM
3889 bool
3890 get_fragment_for_byte (byte_offset_t byte, fragment *out_frag) const
325f9e88 3891 {
fe97f09a
DM
3892 /* TODO: binary search rather than linear. */
3893 unsigned iter_idx;
3894 for (iter_idx = 0; iter_idx < m_fragments.length (); iter_idx++)
3895 {
3896 if (m_fragments[iter_idx].m_byte_range.contains_p (byte))
3897 {
3898 *out_frag = m_fragments[iter_idx];
3899 return true;
3900 }
3901 }
3902 return false;
325f9e88
DM
3903 }
3904
5ef89c5c
DM
3905 bool has_symbolic_bindings_p () const
3906 {
3907 return !m_symbolic_bindings.is_empty ();
3908 }
3909
84096e66
DM
3910 void dump_to_pp (pretty_printer *pp) const
3911 {
3912 pp_string (pp, "iterable_cluster (fragments: [");
3913 for (auto const &iter : &m_fragments)
3914 {
3915 if (&iter != m_fragments.begin ())
3916 pp_string (pp, ", ");
3917 iter.dump_to_pp (pp);
3918 }
3919 pp_printf (pp, "], symbolic bindings: [");
3920 for (auto const &iter : m_symbolic_bindings)
3921 {
3922 if (&iter != m_symbolic_bindings.begin ())
3923 pp_string (pp, ", ");
3924 (*iter).dump_to_pp (pp, true);
3925 }
3926 pp_string (pp, "])");
3927 }
3928
325f9e88 3929private:
fe97f09a 3930 auto_vec<fragment> m_fragments;
5ef89c5c 3931 auto_vec<const binding_key *> m_symbolic_bindings;
325f9e88
DM
3932};
3933
fe97f09a
DM
3934/* Simulate reading the bytes at BYTES from BASE_REG.
3935 Complain to CTXT about any issues with the read e.g. out-of-bounds. */
3936
3937const svalue *
3938region_model::get_store_bytes (const region *base_reg,
3939 const byte_range &bytes,
3940 region_model_context *ctxt) const
3941{
0ae07a72
DM
3942 /* Shortcut reading all of a string_region. */
3943 if (bytes.get_start_byte_offset () == 0)
3944 if (const string_region *string_reg = base_reg->dyn_cast_string_region ())
3945 if (bytes.m_size_in_bytes
3946 == TREE_STRING_LENGTH (string_reg->get_string_cst ()))
3947 return m_mgr->get_or_create_initial_value (base_reg);
3948
fe97f09a
DM
3949 const svalue *index_sval
3950 = m_mgr->get_or_create_int_cst (size_type_node,
3951 bytes.get_start_byte_offset ());
3952 const region *offset_reg = m_mgr->get_offset_region (base_reg,
3953 NULL_TREE,
3954 index_sval);
3955 const svalue *byte_size_sval
3956 = m_mgr->get_or_create_int_cst (size_type_node, bytes.m_size_in_bytes);
3957 const region *read_reg = m_mgr->get_sized_region (offset_reg,
3958 NULL_TREE,
3959 byte_size_sval);
3960
3961 /* Simulate reading those bytes from the store. */
3962 const svalue *sval = get_store_value (read_reg, ctxt);
3963 return sval;
3964}
3965
3966static tree
3967get_tree_for_byte_offset (tree ptr_expr, byte_offset_t byte_offset)
3968{
3969 gcc_assert (ptr_expr);
c87f1f3d 3970 tree ptype = build_pointer_type_for_mode (char_type_node, ptr_mode, true);
fe97f09a
DM
3971 return fold_build2 (MEM_REF,
3972 char_type_node,
c87f1f3d 3973 ptr_expr, wide_int_to_tree (ptype, byte_offset));
fe97f09a
DM
3974}
3975
3976/* Simulate a series of reads of REG until we find a 0 byte
3977 (equivalent to calling strlen).
3978
3979 Complain to CTXT and return NULL if:
3980 - the buffer pointed to isn't null-terminated
84096e66 3981 - the buffer pointed to has any uninitialized bytes before any 0-terminator
fe97f09a
DM
3982 - any of the reads aren't within the bounds of the underlying base region
3983
3984 Otherwise, return a svalue for the number of bytes read (strlen + 1),
3985 and, if OUT_SVAL is non-NULL, write to *OUT_SVAL with an svalue
3986 representing the content of REG up to and including the terminator.
3987
3988 Algorithm
3989 =========
3990
3991 Get offset for first byte to read.
3992 Find the binding (if any) that contains it.
3993 Find the size in bits of that binding.
3994 Round to the nearest byte (which way???)
3995 Or maybe give up if we have a partial binding there.
3996 Get the svalue from the binding.
3997 Determine the strlen (if any) of that svalue.
3998 Does it have a 0-terminator within it?
3999 If so, we have a partial read up to and including that terminator
4000 Read those bytes from the store; add to the result in the correct place.
4001 Finish
4002 If not, we have a full read of that svalue
4003 Read those bytes from the store; add to the result in the correct place.
4004 Update read/write offsets
4005 Continue
4006 If unknown:
4007 Result is unknown
4008 Finish
4009*/
4010
4011const svalue *
84096e66
DM
4012region_model::scan_for_null_terminator_1 (const region *reg,
4013 tree expr,
4014 const svalue **out_sval,
4015 region_model_context *ctxt) const
fe97f09a 4016{
84096e66 4017 logger *logger = ctxt ? ctxt->get_logger () : nullptr;
fe97f09a
DM
4018 store_manager *store_mgr = m_mgr->get_store_manager ();
4019
4020 region_offset offset = reg->get_offset (m_mgr);
4021 if (offset.symbolic_p ())
4022 {
4023 if (out_sval)
0ae07a72 4024 *out_sval = get_store_value (reg, nullptr);
84096e66
DM
4025 if (logger)
4026 logger->log ("offset is symbolic");
fe97f09a
DM
4027 return m_mgr->get_or_create_unknown_svalue (size_type_node);
4028 }
4029 byte_offset_t src_byte_offset;
4030 if (!offset.get_concrete_byte_offset (&src_byte_offset))
4031 {
4032 if (out_sval)
0ae07a72 4033 *out_sval = get_store_value (reg, nullptr);
84096e66
DM
4034 if (logger)
4035 logger->log ("can't get concrete byte offset");
fe97f09a
DM
4036 return m_mgr->get_or_create_unknown_svalue (size_type_node);
4037 }
4038 const byte_offset_t initial_src_byte_offset = src_byte_offset;
4039 byte_offset_t dst_byte_offset = 0;
4040
4041 const region *base_reg = reg->get_base_region ();
4042
4043 if (const string_region *str_reg = base_reg->dyn_cast_string_region ())
4044 {
4045 tree string_cst = str_reg->get_string_cst ();
4046 if (const void *p = memchr (TREE_STRING_POINTER (string_cst),
4047 0,
4048 TREE_STRING_LENGTH (string_cst)))
4049 {
4050 size_t num_bytes_read
4051 = (const char *)p - TREE_STRING_POINTER (string_cst) + 1;
4052 /* Simulate the read. */
4053 byte_range bytes_to_read (0, num_bytes_read);
4054 const svalue *sval = get_store_bytes (reg, bytes_to_read, ctxt);
4055 if (out_sval)
4056 *out_sval = sval;
84096e66
DM
4057 if (logger)
4058 logger->log ("using string_cst");
fe97f09a
DM
4059 return m_mgr->get_or_create_int_cst (size_type_node,
4060 num_bytes_read);
4061 }
4062 }
4063
4064 const binding_cluster *cluster = m_store.get_cluster (base_reg);
4065 iterable_cluster c (cluster);
84096e66
DM
4066 if (logger)
4067 {
4068 pretty_printer *pp = logger->get_printer ();
4069 logger->start_log_line ();
4070 c.dump_to_pp (pp);
4071 logger->end_log_line ();
4072 }
4073
fe97f09a
DM
4074 binding_map result;
4075
4076 while (1)
4077 {
4078 fragment f;
4079 if (c.get_fragment_for_byte (src_byte_offset, &f))
4080 {
84096e66
DM
4081 if (logger)
4082 {
4083 logger->start_log_line ();
4084 pretty_printer *pp = logger->get_printer ();
4085 pp_printf (pp, "src_byte_offset: ");
4086 pp_wide_int (pp, src_byte_offset, SIGNED);
4087 pp_string (pp, ": ");
4088 f.dump_to_pp (pp);
4089 logger->end_log_line ();
4090 }
4091 gcc_assert (f.m_byte_range.contains_p (src_byte_offset));
4092 /* src_byte_offset and f.m_byte_range are both expressed relative to
4093 the base region.
4094 Convert to a byte_range relative to the svalue. */
4095 const byte_range bytes_relative_to_svalue
4096 (src_byte_offset - f.m_byte_range.get_start_byte_offset (),
4097 f.m_byte_range.get_next_byte_offset () - src_byte_offset);
fe97f09a
DM
4098 byte_offset_t fragment_bytes_read;
4099 tristate is_terminated
84096e66
DM
4100 = svalue_byte_range_has_null_terminator (f.m_sval,
4101 bytes_relative_to_svalue,
4102 &fragment_bytes_read,
4103 logger);
fe97f09a
DM
4104 if (is_terminated.is_unknown ())
4105 {
4106 if (out_sval)
0ae07a72 4107 *out_sval = get_store_value (reg, nullptr);
fe97f09a
DM
4108 return m_mgr->get_or_create_unknown_svalue (size_type_node);
4109 }
4110
4111 /* Simulate reading those bytes from the store. */
4112 byte_range bytes_to_read (src_byte_offset, fragment_bytes_read);
4113 const svalue *sval = get_store_bytes (base_reg, bytes_to_read, ctxt);
4114 check_for_poison (sval, expr, nullptr, ctxt);
4115
4116 if (out_sval)
4117 {
4118 byte_range bytes_to_write (dst_byte_offset, fragment_bytes_read);
4119 const binding_key *key
4120 = store_mgr->get_concrete_binding (bytes_to_write);
4121 result.put (key, sval);
4122 }
4123
4124 src_byte_offset += fragment_bytes_read;
4125 dst_byte_offset += fragment_bytes_read;
4126
4127 if (is_terminated.is_true ())
4128 {
4129 if (out_sval)
4130 *out_sval = m_mgr->get_or_create_compound_svalue (NULL_TREE,
4131 result);
84096e66
DM
4132 if (logger)
4133 logger->log ("got terminator");
fe97f09a
DM
4134 return m_mgr->get_or_create_int_cst (size_type_node,
4135 dst_byte_offset);
4136 }
4137 }
4138 else
4139 break;
4140 }
4141
4142 /* No binding for this base_region, or no binding at src_byte_offset
4143 (or a symbolic binding). */
4144
5ef89c5c
DM
4145 if (c.has_symbolic_bindings_p ())
4146 {
4147 if (out_sval)
0ae07a72 4148 *out_sval = get_store_value (reg, nullptr);
84096e66
DM
4149 if (logger)
4150 logger->log ("got symbolic binding");
5ef89c5c
DM
4151 return m_mgr->get_or_create_unknown_svalue (size_type_node);
4152 }
4153
fe97f09a
DM
4154 /* TODO: the various special-cases seen in
4155 region_model::get_store_value. */
4156
4157 /* Simulate reading from this byte, then give up. */
4158 byte_range bytes_to_read (src_byte_offset, 1);
4159 const svalue *sval = get_store_bytes (base_reg, bytes_to_read, ctxt);
4160 tree byte_expr
f65f63c4
DM
4161 = (expr
4162 ? get_tree_for_byte_offset (expr,
4163 src_byte_offset - initial_src_byte_offset)
4164 : NULL_TREE);
fe97f09a
DM
4165 check_for_poison (sval, byte_expr, nullptr, ctxt);
4166 if (base_reg->can_have_initial_svalue_p ())
4167 {
4168 if (out_sval)
0ae07a72 4169 *out_sval = get_store_value (reg, nullptr);
fe97f09a
DM
4170 return m_mgr->get_or_create_unknown_svalue (size_type_node);
4171 }
4172 else
4173 return nullptr;
4174}
4175
84096e66
DM
4176/* Like region_model::scan_for_null_terminator_1, but add logging. */
4177
4178const svalue *
4179region_model::scan_for_null_terminator (const region *reg,
4180 tree expr,
4181 const svalue **out_sval,
4182 region_model_context *ctxt) const
4183{
4184 logger *logger = ctxt ? ctxt->get_logger () : nullptr;
4185 LOG_SCOPE (logger);
4186 if (logger)
4187 {
4188 pretty_printer *pp = logger->get_printer ();
4189 logger->start_log_line ();
4190 logger->log_partial ("region: ");
4191 reg->dump_to_pp (pp, true);
4192 logger->end_log_line ();
4193 }
4194 const svalue *sval = scan_for_null_terminator_1 (reg, expr, out_sval, ctxt);
4195 if (logger)
4196 {
4197 pretty_printer *pp = logger->get_printer ();
4198 logger->start_log_line ();
4199 logger->log_partial ("length result: ");
4200 if (sval)
4201 sval->dump_to_pp (pp, true);
4202 else
4203 pp_printf (pp, "NULL");
4204 logger->end_log_line ();
4205 if (out_sval)
4206 {
4207 logger->start_log_line ();
4208 logger->log_partial ("content result: ");
4209 if (*out_sval)
4210 (*out_sval)->dump_to_pp (pp, true);
4211 else
4212 pp_printf (pp, "NULL");
4213 logger->end_log_line ();
4214 }
4215 }
4216 return sval;
4217}
4218
325f9e88
DM
4219/* Check that argument ARG_IDX (0-based) to the call described by CD
4220 is a pointer to a valid null-terminated string.
4221
fe97f09a
DM
4222 Simulate scanning through the buffer, reading until we find a 0 byte
4223 (equivalent to calling strlen).
325f9e88 4224
fe97f09a
DM
4225 Complain and return NULL if:
4226 - the buffer pointed to isn't null-terminated
4227 - the buffer pointed to has any uninitalized bytes before any 0-terminator
4228 - any of the reads aren't within the bounds of the underlying base region
325f9e88 4229
bbdc0e0d
DM
4230 Otherwise, return a svalue for strlen of the buffer (*not* including
4231 the null terminator).
4232
4233 TODO: we should also complain if:
4234 - the pointer is NULL (or could be). */
4235
cd7dadcd 4236const svalue *
bbdc0e0d 4237region_model::check_for_null_terminated_string_arg (const call_details &cd,
cd7dadcd 4238 unsigned arg_idx) const
bbdc0e0d 4239{
cd7dadcd
DM
4240 return check_for_null_terminated_string_arg (cd,
4241 arg_idx,
4242 false, /* include_terminator */
4243 nullptr); // out_sval
bbdc0e0d
DM
4244}
4245
4246
4247/* Check that argument ARG_IDX (0-based) to the call described by CD
4248 is a pointer to a valid null-terminated string.
4249
4250 Simulate scanning through the buffer, reading until we find a 0 byte
4251 (equivalent to calling strlen).
4252
4253 Complain and return NULL if:
4254 - the buffer pointed to isn't null-terminated
4255 - the buffer pointed to has any uninitalized bytes before any 0-terminator
4256 - any of the reads aren't within the bounds of the underlying base region
4257
4258 Otherwise, return a svalue. This will be the number of bytes read
4259 (including the null terminator) if INCLUDE_TERMINATOR is true, or strlen
4260 of the buffer (not including the null terminator) if it is false.
4261
4262 Also, when returning an svalue, if OUT_SVAL is non-NULL, write to
4263 *OUT_SVAL with an svalue representing the content of the buffer up to
4264 and including the terminator.
325f9e88 4265
fe97f09a
DM
4266 TODO: we should also complain if:
4267 - the pointer is NULL (or could be). */
4268
4269const svalue *
325f9e88 4270region_model::check_for_null_terminated_string_arg (const call_details &cd,
fe97f09a 4271 unsigned arg_idx,
bbdc0e0d 4272 bool include_terminator,
cd7dadcd 4273 const svalue **out_sval) const
325f9e88 4274{
fe97f09a
DM
4275 class null_terminator_check_event : public custom_event
4276 {
4277 public:
4278 null_terminator_check_event (const event_loc_info &loc_info,
4279 const call_arg_details &arg_details)
4280 : custom_event (loc_info),
4281 m_arg_details (arg_details)
4282 {
4283 }
4284
4285 label_text get_desc (bool can_colorize) const final override
4286 {
4287 if (m_arg_details.m_arg_expr)
4288 return make_label_text (can_colorize,
4289 "while looking for null terminator"
4290 " for argument %i (%qE) of %qD...",
4291 m_arg_details.m_arg_idx + 1,
4292 m_arg_details.m_arg_expr,
4293 m_arg_details.m_called_fndecl);
4294 else
4295 return make_label_text (can_colorize,
4296 "while looking for null terminator"
4297 " for argument %i of %qD...",
4298 m_arg_details.m_arg_idx + 1,
4299 m_arg_details.m_called_fndecl);
4300 }
4301
4302 private:
4303 const call_arg_details m_arg_details;
4304 };
4305
4306 class null_terminator_check_decl_note
4307 : public pending_note_subclass<null_terminator_check_decl_note>
4308 {
4309 public:
4310 null_terminator_check_decl_note (const call_arg_details &arg_details)
4311 : m_arg_details (arg_details)
4312 {
4313 }
4314
4315 const char *get_kind () const final override
4316 {
4317 return "null_terminator_check_decl_note";
4318 }
4319
4320 void emit () const final override
4321 {
4322 inform_about_expected_null_terminated_string_arg (m_arg_details);
4323 }
4324
4325 bool operator== (const null_terminator_check_decl_note &other) const
4326 {
4327 return m_arg_details == other.m_arg_details;
4328 }
4329
4330 private:
4331 const call_arg_details m_arg_details;
4332 };
4333
4334 /* Subclass of decorated_region_model_context that
4335 adds the above event and note to any saved diagnostics. */
4336 class annotating_ctxt : public annotating_context
4337 {
4338 public:
4339 annotating_ctxt (const call_details &cd,
4340 unsigned arg_idx)
4341 : annotating_context (cd.get_ctxt ()),
4342 m_cd (cd),
4343 m_arg_idx (arg_idx)
4344 {
4345 }
4346 void add_annotations () final override
4347 {
4348 call_arg_details arg_details (m_cd, m_arg_idx);
4349 event_loc_info loc_info (m_cd.get_location (),
4350 m_cd.get_model ()->get_current_function ()->decl,
4351 m_cd.get_model ()->get_stack_depth ());
4352
4353 add_event (make_unique<null_terminator_check_event> (loc_info,
4354 arg_details));
4355 add_note (make_unique <null_terminator_check_decl_note> (arg_details));
4356 }
4357 private:
4358 const call_details &m_cd;
4359 unsigned m_arg_idx;
4360 };
4361
4362 /* Use this ctxt below so that any diagnostics that get added
4363 get annotated. */
4364 annotating_ctxt my_ctxt (cd, arg_idx);
325f9e88
DM
4365
4366 const svalue *arg_sval = cd.get_arg_svalue (arg_idx);
4367 const region *buf_reg
fe97f09a 4368 = deref_rvalue (arg_sval, cd.get_arg_tree (arg_idx), &my_ctxt);
325f9e88 4369
bbdc0e0d
DM
4370 if (const svalue *num_bytes_read_sval
4371 = scan_for_null_terminator (buf_reg,
4372 cd.get_arg_tree (arg_idx),
4373 out_sval,
4374 &my_ctxt))
4375 {
4376 if (include_terminator)
4377 return num_bytes_read_sval;
4378 else
4379 {
4380 /* strlen is (bytes_read - 1). */
4381 const svalue *one = m_mgr->get_or_create_int_cst (size_type_node, 1);
4382 return m_mgr->get_or_create_binop (size_type_node,
4383 MINUS_EXPR,
4384 num_bytes_read_sval,
4385 one);
4386 }
4387 }
4388 else
4389 return nullptr;
325f9e88
DM
4390}
4391
808f4dfe 4392/* Remove all bindings overlapping REG within the store. */
884d9141
DM
4393
4394void
808f4dfe
DM
4395region_model::clobber_region (const region *reg)
4396{
4397 m_store.clobber_region (m_mgr->get_store_manager(), reg);
4398}
4399
4400/* Remove any bindings for REG within the store. */
4401
4402void
4403region_model::purge_region (const region *reg)
4404{
4405 m_store.purge_region (m_mgr->get_store_manager(), reg);
4406}
4407
b923978a
DM
4408/* Fill REG with SVAL.
4409 Use CTXT to report any warnings associated with the write
4410 (e.g. out-of-bounds). */
e61ffa20
DM
4411
4412void
b923978a
DM
4413region_model::fill_region (const region *reg,
4414 const svalue *sval,
4415 region_model_context *ctxt)
e61ffa20 4416{
b923978a 4417 check_region_for_write (reg, nullptr, ctxt);
e61ffa20
DM
4418 m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
4419}
4420
b923978a
DM
4421/* Zero-fill REG.
4422 Use CTXT to report any warnings associated with the write
4423 (e.g. out-of-bounds). */
808f4dfe
DM
4424
4425void
b923978a
DM
4426region_model::zero_fill_region (const region *reg,
4427 region_model_context *ctxt)
808f4dfe 4428{
b923978a 4429 check_region_for_write (reg, nullptr, ctxt);
808f4dfe
DM
4430 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
4431}
4432
0ae07a72
DM
4433/* Copy NUM_BYTES_SVAL of SVAL to DEST_REG.
4434 Use CTXT to report any warnings associated with the copy
4435 (e.g. out-of-bounds writes). */
4436
4437void
4438region_model::write_bytes (const region *dest_reg,
4439 const svalue *num_bytes_sval,
4440 const svalue *sval,
4441 region_model_context *ctxt)
4442{
4443 const region *sized_dest_reg
4444 = m_mgr->get_sized_region (dest_reg, NULL_TREE, num_bytes_sval);
4445 set_value (sized_dest_reg, sval, ctxt);
4446}
4447
8556d001
DM
4448/* Read NUM_BYTES_SVAL from SRC_REG.
4449 Use CTXT to report any warnings associated with the copy
4450 (e.g. out-of-bounds reads, copying of uninitialized values, etc). */
4451
4452const svalue *
4453region_model::read_bytes (const region *src_reg,
4454 tree src_ptr_expr,
4455 const svalue *num_bytes_sval,
4456 region_model_context *ctxt) const
4457{
b51cde34
DM
4458 if (num_bytes_sval->get_kind () == SK_UNKNOWN)
4459 return m_mgr->get_or_create_unknown_svalue (NULL_TREE);
8556d001
DM
4460 const region *sized_src_reg
4461 = m_mgr->get_sized_region (src_reg, NULL_TREE, num_bytes_sval);
4462 const svalue *src_contents_sval = get_store_value (sized_src_reg, ctxt);
4463 check_for_poison (src_contents_sval, src_ptr_expr,
4464 sized_src_reg, ctxt);
4465 return src_contents_sval;
4466}
4467
4468/* Copy NUM_BYTES_SVAL bytes from SRC_REG to DEST_REG.
4469 Use CTXT to report any warnings associated with the copy
4470 (e.g. out-of-bounds reads/writes, copying of uninitialized values,
4471 etc). */
4472
4473void
4474region_model::copy_bytes (const region *dest_reg,
4475 const region *src_reg,
4476 tree src_ptr_expr,
4477 const svalue *num_bytes_sval,
4478 region_model_context *ctxt)
4479{
4480 const svalue *data_sval
4481 = read_bytes (src_reg, src_ptr_expr, num_bytes_sval, ctxt);
4482 write_bytes (dest_reg, num_bytes_sval, data_sval, ctxt);
4483}
4484
808f4dfe
DM
4485/* Mark REG as having unknown content. */
4486
4487void
3a66c289
DM
4488region_model::mark_region_as_unknown (const region *reg,
4489 uncertainty_t *uncertainty)
884d9141 4490{
14f5e56a 4491 svalue_set maybe_live_values;
3a66c289 4492 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
14f5e56a
DM
4493 uncertainty, &maybe_live_values);
4494 m_store.on_maybe_live_values (maybe_live_values);
884d9141
DM
4495}
4496
808f4dfe 4497/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
757bf1df
DM
4498 this model. */
4499
4500tristate
808f4dfe
DM
4501region_model::eval_condition (const svalue *lhs,
4502 enum tree_code op,
4503 const svalue *rhs) const
757bf1df 4504{
757bf1df
DM
4505 gcc_assert (lhs);
4506 gcc_assert (rhs);
4507
808f4dfe
DM
4508 /* For now, make no attempt to capture constraints on floating-point
4509 values. */
4510 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
4511 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
4512 return tristate::unknown ();
4513
9bbcee45
DM
4514 /* See what we know based on the values. */
4515
808f4dfe
DM
4516 /* Unwrap any unmergeable values. */
4517 lhs = lhs->unwrap_any_unmergeable ();
4518 rhs = rhs->unwrap_any_unmergeable ();
4519
4520 if (lhs == rhs)
757bf1df 4521 {
808f4dfe
DM
4522 /* If we have the same svalue, then we have equality
4523 (apart from NaN-handling).
4524 TODO: should this definitely be the case for poisoned values? */
4525 /* Poisoned and unknown values are "unknowable". */
4526 if (lhs->get_kind () == SK_POISONED
4527 || lhs->get_kind () == SK_UNKNOWN)
4528 return tristate::TS_UNKNOWN;
e978955d 4529
808f4dfe 4530 switch (op)
757bf1df 4531 {
808f4dfe
DM
4532 case EQ_EXPR:
4533 case GE_EXPR:
4534 case LE_EXPR:
4535 return tristate::TS_TRUE;
07c86323 4536
808f4dfe
DM
4537 case NE_EXPR:
4538 case GT_EXPR:
4539 case LT_EXPR:
4540 return tristate::TS_FALSE;
4541
4542 default:
4543 /* For other ops, use the logic below. */
4544 break;
757bf1df 4545 }
808f4dfe 4546 }
757bf1df 4547
808f4dfe
DM
4548 /* If we have a pair of region_svalues, compare them. */
4549 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
4550 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
4551 {
4552 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
4553 if (res.is_known ())
4554 return res;
4555 /* Otherwise, only known through constraints. */
4556 }
757bf1df 4557
808f4dfe 4558 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
18faaeb3
DM
4559 {
4560 /* If we have a pair of constants, compare them. */
4561 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
4562 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
4563 else
4564 {
4565 /* When we have one constant, put it on the RHS. */
4566 std::swap (lhs, rhs);
4567 op = swap_tree_comparison (op);
4568 }
4569 }
4570 gcc_assert (lhs->get_kind () != SK_CONSTANT);
757bf1df 4571
e82e0f14
DM
4572 /* Handle comparison against zero. */
4573 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
4574 if (zerop (cst_rhs->get_constant ()))
4575 {
4576 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
4577 {
4578 /* A region_svalue is a non-NULL pointer, except in certain
4579 special cases (see the comment for region::non_null_p). */
4580 const region *pointee = ptr->get_pointee ();
4581 if (pointee->non_null_p ())
4582 {
4583 switch (op)
4584 {
4585 default:
4586 gcc_unreachable ();
4587
4588 case EQ_EXPR:
4589 case GE_EXPR:
4590 case LE_EXPR:
4591 return tristate::TS_FALSE;
4592
4593 case NE_EXPR:
4594 case GT_EXPR:
4595 case LT_EXPR:
4596 return tristate::TS_TRUE;
4597 }
4598 }
4599 }
4600 else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
4601 {
4602 /* Treat offsets from a non-NULL pointer as being non-NULL. This
4603 isn't strictly true, in that eventually ptr++ will wrap
4604 around and be NULL, but it won't occur in practise and thus
4605 can be used to suppress effectively false positives that we
4606 shouldn't warn for. */
4607 if (binop->get_op () == POINTER_PLUS_EXPR)
4608 {
9bbcee45 4609 tristate lhs_ts = eval_condition (binop->get_arg0 (), op, rhs);
e82e0f14
DM
4610 if (lhs_ts.is_known ())
4611 return lhs_ts;
4612 }
4613 }
0b737090
DM
4614 else if (const unaryop_svalue *unaryop
4615 = lhs->dyn_cast_unaryop_svalue ())
4616 {
4617 if (unaryop->get_op () == NEGATE_EXPR)
4618 {
4619 /* e.g. "-X <= 0" is equivalent to X >= 0". */
4620 tristate lhs_ts = eval_condition (unaryop->get_arg (),
4621 swap_tree_comparison (op),
4622 rhs);
4623 if (lhs_ts.is_known ())
4624 return lhs_ts;
4625 }
4626 }
e82e0f14 4627 }
808f4dfe
DM
4628
4629 /* Handle rejection of equality for comparisons of the initial values of
4630 "external" values (such as params) with the address of locals. */
4631 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
4632 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
4633 {
4634 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
4635 if (res.is_known ())
4636 return res;
4637 }
4638 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
4639 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
4640 {
4641 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
4642 if (res.is_known ())
4643 return res;
4644 }
4645
4646 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
4647 if (tree rhs_cst = rhs->maybe_get_constant ())
4648 {
4649 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
4650 if (res.is_known ())
4651 return res;
4652 }
4653
7a6564c9 4654 /* Handle comparisons between two svalues with more than one operand. */
9bbcee45 4655 if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
7a6564c9
TL
4656 {
4657 switch (op)
4658 {
4659 default:
4660 break;
4661 case EQ_EXPR:
4662 {
4663 /* TODO: binops can be equal even if they are not structurally
4664 equal in case of commutative operators. */
4665 tristate res = structural_equality (lhs, rhs);
4666 if (res.is_true ())
4667 return res;
4668 }
4669 break;
4670 case LE_EXPR:
4671 {
4672 tristate res = structural_equality (lhs, rhs);
4673 if (res.is_true ())
4674 return res;
4675 }
4676 break;
4677 case GE_EXPR:
4678 {
4679 tristate res = structural_equality (lhs, rhs);
4680 if (res.is_true ())
4681 return res;
4682 res = symbolic_greater_than (binop, rhs);
4683 if (res.is_true ())
4684 return res;
4685 }
4686 break;
4687 case GT_EXPR:
4688 {
4689 tristate res = symbolic_greater_than (binop, rhs);
4690 if (res.is_true ())
4691 return res;
4692 }
4693 break;
4694 }
4695 }
4696
e7b26744 4697 /* Attempt to unwrap cast if there is one, and the types match. */
4698 tree lhs_type = lhs->get_type ();
4699 tree rhs_type = rhs->get_type ();
4700 if (lhs_type && rhs_type)
4701 {
4702 const unaryop_svalue *lhs_un_op = dyn_cast <const unaryop_svalue *> (lhs);
4703 const unaryop_svalue *rhs_un_op = dyn_cast <const unaryop_svalue *> (rhs);
4704 if (lhs_un_op && CONVERT_EXPR_CODE_P (lhs_un_op->get_op ())
4705 && rhs_un_op && CONVERT_EXPR_CODE_P (rhs_un_op->get_op ())
4706 && lhs_type == rhs_type)
7a5a4a44
DM
4707 {
4708 tristate res = eval_condition (lhs_un_op->get_arg (),
4709 op,
4710 rhs_un_op->get_arg ());
4711 if (res.is_known ())
4712 return res;
4713 }
e7b26744 4714 else if (lhs_un_op && CONVERT_EXPR_CODE_P (lhs_un_op->get_op ())
4715 && lhs_type == rhs_type)
7a5a4a44
DM
4716 {
4717 tristate res = eval_condition (lhs_un_op->get_arg (), op, rhs);
4718 if (res.is_known ())
4719 return res;
4720 }
e7b26744 4721 else if (rhs_un_op && CONVERT_EXPR_CODE_P (rhs_un_op->get_op ())
4722 && lhs_type == rhs_type)
7a5a4a44
DM
4723 {
4724 tristate res = eval_condition (lhs, op, rhs_un_op->get_arg ());
4725 if (res.is_known ())
4726 return res;
4727 }
e7b26744 4728 }
4729
9bbcee45
DM
4730 /* Otherwise, try constraints.
4731 Cast to const to ensure we don't change the constraint_manager as we
4732 do this (e.g. by creating equivalence classes). */
4733 const constraint_manager *constraints = m_constraints;
4734 return constraints->eval_condition (lhs, op, rhs);
808f4dfe
DM
4735}
4736
9bbcee45 4737/* Subroutine of region_model::eval_condition, for rejecting
808f4dfe
DM
4738 equality of INIT_VAL(PARM) with &LOCAL. */
4739
4740tristate
4741region_model::compare_initial_and_pointer (const initial_svalue *init,
4742 const region_svalue *ptr) const
4743{
4744 const region *pointee = ptr->get_pointee ();
4745
4746 /* If we have a pointer to something within a stack frame, it can't be the
4747 initial value of a param. */
4748 if (pointee->maybe_get_frame_region ())
e0139b2a
DM
4749 if (init->initial_value_of_param_p ())
4750 return tristate::TS_FALSE;
757bf1df
DM
4751
4752 return tristate::TS_UNKNOWN;
4753}
4754
7a6564c9
TL
4755/* Return true if SVAL is definitely positive. */
4756
4757static bool
4758is_positive_svalue (const svalue *sval)
4759{
4760 if (tree cst = sval->maybe_get_constant ())
4761 return !zerop (cst) && get_range_pos_neg (cst) == 1;
4762 tree type = sval->get_type ();
4763 if (!type)
4764 return false;
4765 /* Consider a binary operation size_t + int. The analyzer wraps the int in
4766 an unaryop_svalue, converting it to a size_t, but in the dynamic execution
4767 the result is smaller than the first operand. Thus, we have to look if
4768 the argument of the unaryop_svalue is also positive. */
4769 if (const unaryop_svalue *un_op = dyn_cast <const unaryop_svalue *> (sval))
4770 return CONVERT_EXPR_CODE_P (un_op->get_op ()) && TYPE_UNSIGNED (type)
4771 && is_positive_svalue (un_op->get_arg ());
4772 return TYPE_UNSIGNED (type);
4773}
4774
4775/* Return true if A is definitely larger than B.
4776
4777 Limitation: does not account for integer overflows and does not try to
4778 return false, so it can not be used negated. */
4779
4780tristate
4781region_model::symbolic_greater_than (const binop_svalue *bin_a,
4782 const svalue *b) const
4783{
4784 if (bin_a->get_op () == PLUS_EXPR || bin_a->get_op () == MULT_EXPR)
4785 {
4786 /* Eliminate the right-hand side of both svalues. */
4787 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
4788 if (bin_a->get_op () == bin_b->get_op ()
9bbcee45
DM
4789 && eval_condition (bin_a->get_arg1 (),
4790 GT_EXPR,
4791 bin_b->get_arg1 ()).is_true ()
4792 && eval_condition (bin_a->get_arg0 (),
4793 GE_EXPR,
4794 bin_b->get_arg0 ()).is_true ())
7a6564c9
TL
4795 return tristate (tristate::TS_TRUE);
4796
4797 /* Otherwise, try to remove a positive offset or factor from BIN_A. */
4798 if (is_positive_svalue (bin_a->get_arg1 ())
9bbcee45
DM
4799 && eval_condition (bin_a->get_arg0 (),
4800 GE_EXPR, b).is_true ())
7a6564c9
TL
4801 return tristate (tristate::TS_TRUE);
4802 }
4803 return tristate::unknown ();
4804}
4805
4806/* Return true if A and B are equal structurally.
4807
4808 Structural equality means that A and B are equal if the svalues A and B have
4809 the same nodes at the same positions in the tree and the leafs are equal.
4810 Equality for conjured_svalues and initial_svalues is determined by comparing
4811 the pointers while constants are compared by value. That behavior is useful
4812 to check for binaryop_svlaues that evaluate to the same concrete value but
4813 might use one operand with a different type but the same constant value.
4814
4815 For example,
4816 binop_svalue (mult_expr,
4817 initial_svalue (‘size_t’, decl_region (..., 'some_var')),
4818 constant_svalue (‘size_t’, 4))
4819 and
4820 binop_svalue (mult_expr,
4821 initial_svalue (‘size_t’, decl_region (..., 'some_var'),
4822 constant_svalue (‘sizetype’, 4))
4823 are structurally equal. A concrete C code example, where this occurs, can
4824 be found in test7 of out-of-bounds-5.c. */
4825
4826tristate
4827region_model::structural_equality (const svalue *a, const svalue *b) const
4828{
4829 /* If A and B are referentially equal, they are also structurally equal. */
4830 if (a == b)
4831 return tristate (tristate::TS_TRUE);
4832
4833 switch (a->get_kind ())
4834 {
4835 default:
4836 return tristate::unknown ();
4837 /* SK_CONJURED and SK_INITIAL are already handled
4838 by the referential equality above. */
4839 case SK_CONSTANT:
4840 {
4841 tree a_cst = a->maybe_get_constant ();
4842 tree b_cst = b->maybe_get_constant ();
4843 if (a_cst && b_cst)
4844 return tristate (tree_int_cst_equal (a_cst, b_cst));
4845 }
4846 return tristate (tristate::TS_FALSE);
4847 case SK_UNARYOP:
4848 {
4849 const unaryop_svalue *un_a = as_a <const unaryop_svalue *> (a);
4850 if (const unaryop_svalue *un_b = dyn_cast <const unaryop_svalue *> (b))
4851 return tristate (pending_diagnostic::same_tree_p (un_a->get_type (),
4852 un_b->get_type ())
4853 && un_a->get_op () == un_b->get_op ()
4854 && structural_equality (un_a->get_arg (),
4855 un_b->get_arg ()));
4856 }
4857 return tristate (tristate::TS_FALSE);
4858 case SK_BINOP:
4859 {
4860 const binop_svalue *bin_a = as_a <const binop_svalue *> (a);
4861 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
4862 return tristate (bin_a->get_op () == bin_b->get_op ()
4863 && structural_equality (bin_a->get_arg0 (),
4864 bin_b->get_arg0 ())
4865 && structural_equality (bin_a->get_arg1 (),
4866 bin_b->get_arg1 ()));
4867 }
4868 return tristate (tristate::TS_FALSE);
4869 }
4870}
4871
48e8a7a6
DM
4872/* Handle various constraints of the form:
4873 LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
4874 OP : == or !=
4875 RHS: zero
4876 and (with a cast):
4877 LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
4878 OP : == or !=
4879 RHS: zero
4880 by adding constraints for INNER_LHS INNEROP INNER_RHS.
4881
4882 Return true if this function can fully handle the constraint; if
4883 so, add the implied constraint(s) and write true to *OUT if they
4884 are consistent with existing constraints, or write false to *OUT
4885 if they contradicts existing constraints.
4886
4887 Return false for cases that this function doeesn't know how to handle.
4888
4889 For example, if we're checking a stored conditional, we'll have
4890 something like:
4891 LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
4892 OP : NE_EXPR
4893 RHS: zero
4894 which this function can turn into an add_constraint of:
4895 (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
4896
4897 Similarly, optimized && and || conditionals lead to e.g.
4898 if (p && q)
4899 becoming gimple like this:
4900 _1 = p_6 == 0B;
4901 _2 = q_8 == 0B
4902 _3 = _1 | _2
4903 On the "_3 is false" branch we can have constraints of the form:
4904 ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
4905 | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
4906 == 0
4907 which implies that both _1 and _2 are false,
4908 which this function can turn into a pair of add_constraints of
4909 (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
4910 and:
4911 (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
4912
4913bool
4914region_model::add_constraints_from_binop (const svalue *outer_lhs,
4915 enum tree_code outer_op,
4916 const svalue *outer_rhs,
4917 bool *out,
4918 region_model_context *ctxt)
4919{
4920 while (const svalue *cast = outer_lhs->maybe_undo_cast ())
4921 outer_lhs = cast;
4922 const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
4923 if (!binop_sval)
4924 return false;
4925 if (!outer_rhs->all_zeroes_p ())
4926 return false;
4927
4928 const svalue *inner_lhs = binop_sval->get_arg0 ();
4929 enum tree_code inner_op = binop_sval->get_op ();
4930 const svalue *inner_rhs = binop_sval->get_arg1 ();
4931
4932 if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
4933 return false;
4934
4935 /* We have either
4936 - "OUTER_LHS != false" (i.e. OUTER is true), or
4937 - "OUTER_LHS == false" (i.e. OUTER is false). */
4938 bool is_true = outer_op == NE_EXPR;
4939
4940 switch (inner_op)
4941 {
4942 default:
4943 return false;
4944
4945 case EQ_EXPR:
4946 case NE_EXPR:
50b5199c 4947 case GE_EXPR:
4948 case GT_EXPR:
4949 case LE_EXPR:
4950 case LT_EXPR:
48e8a7a6
DM
4951 {
4952 /* ...and "(inner_lhs OP inner_rhs) == 0"
4953 then (inner_lhs OP inner_rhs) must have the same
4954 logical value as LHS. */
4955 if (!is_true)
4956 inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
4957 *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
4958 return true;
4959 }
4960 break;
4961
4962 case BIT_AND_EXPR:
4963 if (is_true)
4964 {
4965 /* ...and "(inner_lhs & inner_rhs) != 0"
4966 then both inner_lhs and inner_rhs must be true. */
4967 const svalue *false_sval
4968 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
4969 bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
4970 bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
4971 *out = sat1 && sat2;
4972 return true;
4973 }
4974 return false;
4975
4976 case BIT_IOR_EXPR:
4977 if (!is_true)
4978 {
4979 /* ...and "(inner_lhs | inner_rhs) == 0"
4980 i.e. "(inner_lhs | inner_rhs)" is false
4981 then both inner_lhs and inner_rhs must be false. */
4982 const svalue *false_sval
4983 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
4984 bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
4985 bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
4986 *out = sat1 && sat2;
4987 return true;
4988 }
4989 return false;
4990 }
4991}
4992
757bf1df
DM
4993/* Attempt to add the constraint "LHS OP RHS" to this region_model.
4994 If it is consistent with existing constraints, add it, and return true.
4995 Return false if it contradicts existing constraints.
4996 Use CTXT for reporting any diagnostics associated with the accesses. */
4997
4998bool
4999region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
5000 region_model_context *ctxt)
5001{
e978955d
DM
5002 /* For now, make no attempt to capture constraints on floating-point
5003 values. */
5004 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
5005 return true;
5006
808f4dfe
DM
5007 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
5008 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
757bf1df 5009
48e8a7a6
DM
5010 return add_constraint (lhs_sval, op, rhs_sval, ctxt);
5011}
5012
841008d3
DM
5013static bool
5014unusable_in_infinite_loop_constraint_p (const svalue *sval)
5015{
5016 if (sval->get_kind () == SK_WIDENING)
5017 return true;
5018 return false;
5019}
5020
48e8a7a6
DM
5021/* Attempt to add the constraint "LHS OP RHS" to this region_model.
5022 If it is consistent with existing constraints, add it, and return true.
5023 Return false if it contradicts existing constraints.
5024 Use CTXT for reporting any diagnostics associated with the accesses. */
5025
5026bool
5027region_model::add_constraint (const svalue *lhs,
5028 enum tree_code op,
5029 const svalue *rhs,
5030 region_model_context *ctxt)
5031{
841008d3
DM
5032 const bool checking_for_infinite_loop
5033 = ctxt ? ctxt->checking_for_infinite_loop_p () : false;
5034
5035 if (checking_for_infinite_loop)
5036 {
5037 if (unusable_in_infinite_loop_constraint_p (lhs)
5038 || unusable_in_infinite_loop_constraint_p (rhs))
5039 {
5040 gcc_assert (ctxt);
5041 ctxt->on_unusable_in_infinite_loop ();
5042 return false;
5043 }
5044 }
5045
48e8a7a6 5046 tristate t_cond = eval_condition (lhs, op, rhs);
757bf1df
DM
5047
5048 /* If we already have the condition, do nothing. */
5049 if (t_cond.is_true ())
5050 return true;
5051
5052 /* Reject a constraint that would contradict existing knowledge, as
5053 unsatisfiable. */
5054 if (t_cond.is_false ())
5055 return false;
5056
841008d3
DM
5057 if (checking_for_infinite_loop)
5058 {
5059 /* Here, we don't have a definite true/false value, so bail out
5060 when checking for infinite loops. */
5061 gcc_assert (ctxt);
5062 ctxt->on_unusable_in_infinite_loop ();
5063 return false;
5064 }
5065
48e8a7a6
DM
5066 bool out;
5067 if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
5068 return out;
757bf1df 5069
c4b8f373
DM
5070 /* Attempt to store the constraint. */
5071 if (!m_constraints->add_constraint (lhs, op, rhs))
5072 return false;
757bf1df
DM
5073
5074 /* Notify the context, if any. This exists so that the state machines
5075 in a program_state can be notified about the condition, and so can
5076 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
5077 when synthesizing constraints as above. */
5078 if (ctxt)
5079 ctxt->on_condition (lhs, op, rhs);
5080
9a2c9579
DM
5081 /* If we have &REGION == NULL, then drop dynamic extents for REGION (for
5082 the case where REGION is heap-allocated and thus could be NULL). */
48e8a7a6
DM
5083 if (tree rhs_cst = rhs->maybe_get_constant ())
5084 if (op == EQ_EXPR && zerop (rhs_cst))
5085 if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
5086 unset_dynamic_extents (region_sval->get_pointee ());
9a2c9579 5087
757bf1df
DM
5088 return true;
5089}
5090
84fb3546
DM
5091/* As above, but when returning false, if OUT is non-NULL, write a
5092 new rejected_constraint to *OUT. */
5093
5094bool
5095region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
5096 region_model_context *ctxt,
8878f7ab 5097 std::unique_ptr<rejected_constraint> *out)
84fb3546
DM
5098{
5099 bool sat = add_constraint (lhs, op, rhs, ctxt);
5100 if (!sat && out)
8878f7ab 5101 *out = make_unique <rejected_op_constraint> (*this, lhs, op, rhs);
84fb3546
DM
5102 return sat;
5103}
5104
757bf1df
DM
5105/* Determine what is known about the condition "LHS OP RHS" within
5106 this model.
5107 Use CTXT for reporting any diagnostics associated with the accesses. */
5108
5109tristate
5110region_model::eval_condition (tree lhs,
5111 enum tree_code op,
5112 tree rhs,
5c6546ca 5113 region_model_context *ctxt) const
757bf1df 5114{
e978955d
DM
5115 /* For now, make no attempt to model constraints on floating-point
5116 values. */
5117 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
5118 return tristate::unknown ();
5119
757bf1df
DM
5120 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
5121}
5122
467a4820
DM
5123/* Implementation of region_model::get_representative_path_var.
5124 Attempt to return a path_var that represents SVAL, or return NULL_TREE.
808f4dfe
DM
5125 Use VISITED to prevent infinite mutual recursion with the overload for
5126 regions. */
757bf1df 5127
808f4dfe 5128path_var
467a4820
DM
5129region_model::get_representative_path_var_1 (const svalue *sval,
5130 svalue_set *visited) const
757bf1df 5131{
467a4820 5132 gcc_assert (sval);
757bf1df 5133
808f4dfe
DM
5134 /* Prevent infinite recursion. */
5135 if (visited->contains (sval))
0e466e97
DM
5136 {
5137 if (sval->get_kind () == SK_CONSTANT)
5138 return path_var (sval->maybe_get_constant (), 0);
5139 else
5140 return path_var (NULL_TREE, 0);
5141 }
808f4dfe 5142 visited->add (sval);
757bf1df 5143
467a4820
DM
5144 /* Handle casts by recursion into get_representative_path_var. */
5145 if (const svalue *cast_sval = sval->maybe_undo_cast ())
5146 {
5147 path_var result = get_representative_path_var (cast_sval, visited);
5148 tree orig_type = sval->get_type ();
5149 /* If necessary, wrap the result in a cast. */
5150 if (result.m_tree && orig_type)
5151 result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
5152 return result;
5153 }
5154
808f4dfe
DM
5155 auto_vec<path_var> pvs;
5156 m_store.get_representative_path_vars (this, visited, sval, &pvs);
757bf1df 5157
808f4dfe
DM
5158 if (tree cst = sval->maybe_get_constant ())
5159 pvs.safe_push (path_var (cst, 0));
757bf1df 5160
90f7c300 5161 /* Handle string literals and various other pointers. */
808f4dfe
DM
5162 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
5163 {
5164 const region *reg = ptr_sval->get_pointee ();
5165 if (path_var pv = get_representative_path_var (reg, visited))
5166 return path_var (build1 (ADDR_EXPR,
467a4820 5167 sval->get_type (),
808f4dfe
DM
5168 pv.m_tree),
5169 pv.m_stack_depth);
5170 }
5171
5172 /* If we have a sub_svalue, look for ways to represent the parent. */
5173 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
90f7c300 5174 {
808f4dfe
DM
5175 const svalue *parent_sval = sub_sval->get_parent ();
5176 const region *subreg = sub_sval->get_subregion ();
5177 if (path_var parent_pv
5178 = get_representative_path_var (parent_sval, visited))
5179 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
5180 return path_var (build3 (COMPONENT_REF,
5181 sval->get_type (),
5182 parent_pv.m_tree,
5183 field_reg->get_field (),
5184 NULL_TREE),
5185 parent_pv.m_stack_depth);
90f7c300
DM
5186 }
5187
b9365b93
DM
5188 /* Handle binops. */
5189 if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
5190 if (path_var lhs_pv
5191 = get_representative_path_var (binop_sval->get_arg0 (), visited))
5192 if (path_var rhs_pv
5193 = get_representative_path_var (binop_sval->get_arg1 (), visited))
5194 return path_var (build2 (binop_sval->get_op (),
5195 sval->get_type (),
5196 lhs_pv.m_tree, rhs_pv.m_tree),
5197 lhs_pv.m_stack_depth);
5198
808f4dfe
DM
5199 if (pvs.length () < 1)
5200 return path_var (NULL_TREE, 0);
5201
5202 pvs.qsort (readability_comparator);
5203 return pvs[0];
757bf1df
DM
5204}
5205
467a4820
DM
5206/* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
5207 Use VISITED to prevent infinite mutual recursion with the overload for
5208 regions
5209
5210 This function defers to get_representative_path_var_1 to do the work;
5211 it adds verification that get_representative_path_var_1 returned a tree
5212 of the correct type. */
5213
5214path_var
5215region_model::get_representative_path_var (const svalue *sval,
5216 svalue_set *visited) const
5217{
5218 if (sval == NULL)
5219 return path_var (NULL_TREE, 0);
5220
5221 tree orig_type = sval->get_type ();
5222
5223 path_var result = get_representative_path_var_1 (sval, visited);
5224
5225 /* Verify that the result has the same type as SVAL, if any. */
5226 if (result.m_tree && orig_type)
5227 gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
5228
5229 return result;
5230}
5231
5232/* Attempt to return a tree that represents SVAL, or return NULL_TREE.
5233
5234 Strip off any top-level cast, to avoid messages like
5235 double-free of '(void *)ptr'
5236 from analyzer diagnostics. */
757bf1df 5237
808f4dfe
DM
5238tree
5239region_model::get_representative_tree (const svalue *sval) const
757bf1df 5240{
808f4dfe 5241 svalue_set visited;
467a4820
DM
5242 tree expr = get_representative_path_var (sval, &visited).m_tree;
5243
5244 /* Strip off any top-level cast. */
7e3b45be
TL
5245 if (expr && TREE_CODE (expr) == NOP_EXPR)
5246 expr = TREE_OPERAND (expr, 0);
5247
5248 return fixup_tree_for_diagnostic (expr);
5249}
5250
5251tree
5252region_model::get_representative_tree (const region *reg) const
5253{
5254 svalue_set visited;
5255 tree expr = get_representative_path_var (reg, &visited).m_tree;
5256
5257 /* Strip off any top-level cast. */
467a4820 5258 if (expr && TREE_CODE (expr) == NOP_EXPR)
e4bb1bd6 5259 expr = TREE_OPERAND (expr, 0);
467a4820 5260
e4bb1bd6 5261 return fixup_tree_for_diagnostic (expr);
808f4dfe
DM
5262}
5263
467a4820
DM
5264/* Implementation of region_model::get_representative_path_var.
5265
5266 Attempt to return a path_var that represents REG, or return
808f4dfe
DM
5267 the NULL path_var.
5268 For example, a region for a field of a local would be a path_var
5269 wrapping a COMPONENT_REF.
5270 Use VISITED to prevent infinite mutual recursion with the overload for
5271 svalues. */
757bf1df 5272
808f4dfe 5273path_var
467a4820
DM
5274region_model::get_representative_path_var_1 (const region *reg,
5275 svalue_set *visited) const
808f4dfe
DM
5276{
5277 switch (reg->get_kind ())
757bf1df 5278 {
808f4dfe
DM
5279 default:
5280 gcc_unreachable ();
e516294a 5281
808f4dfe
DM
5282 case RK_FRAME:
5283 case RK_GLOBALS:
5284 case RK_CODE:
5285 case RK_HEAP:
5286 case RK_STACK:
358dab90 5287 case RK_THREAD_LOCAL:
808f4dfe
DM
5288 case RK_ROOT:
5289 /* Regions that represent memory spaces are not expressible as trees. */
5290 return path_var (NULL_TREE, 0);
757bf1df 5291
808f4dfe 5292 case RK_FUNCTION:
884d9141 5293 {
808f4dfe
DM
5294 const function_region *function_reg
5295 = as_a <const function_region *> (reg);
5296 return path_var (function_reg->get_fndecl (), 0);
884d9141 5297 }
808f4dfe 5298 case RK_LABEL:
9e78634c
DM
5299 {
5300 const label_region *label_reg = as_a <const label_region *> (reg);
5301 return path_var (label_reg->get_label (), 0);
5302 }
90f7c300 5303
808f4dfe
DM
5304 case RK_SYMBOLIC:
5305 {
5306 const symbolic_region *symbolic_reg
5307 = as_a <const symbolic_region *> (reg);
5308 const svalue *pointer = symbolic_reg->get_pointer ();
5309 path_var pointer_pv = get_representative_path_var (pointer, visited);
5310 if (!pointer_pv)
5311 return path_var (NULL_TREE, 0);
5312 tree offset = build_int_cst (pointer->get_type (), 0);
5313 return path_var (build2 (MEM_REF,
5314 reg->get_type (),
5315 pointer_pv.m_tree,
5316 offset),
5317 pointer_pv.m_stack_depth);
5318 }
5319 case RK_DECL:
5320 {
5321 const decl_region *decl_reg = as_a <const decl_region *> (reg);
5322 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
5323 }
5324 case RK_FIELD:
5325 {
5326 const field_region *field_reg = as_a <const field_region *> (reg);
5327 path_var parent_pv
5328 = get_representative_path_var (reg->get_parent_region (), visited);
5329 if (!parent_pv)
5330 return path_var (NULL_TREE, 0);
5331 return path_var (build3 (COMPONENT_REF,
5332 reg->get_type (),
5333 parent_pv.m_tree,
5334 field_reg->get_field (),
5335 NULL_TREE),
5336 parent_pv.m_stack_depth);
5337 }
757bf1df 5338
808f4dfe
DM
5339 case RK_ELEMENT:
5340 {
5341 const element_region *element_reg
5342 = as_a <const element_region *> (reg);
5343 path_var parent_pv
5344 = get_representative_path_var (reg->get_parent_region (), visited);
5345 if (!parent_pv)
5346 return path_var (NULL_TREE, 0);
5347 path_var index_pv
5348 = get_representative_path_var (element_reg->get_index (), visited);
5349 if (!index_pv)
5350 return path_var (NULL_TREE, 0);
5351 return path_var (build4 (ARRAY_REF,
5352 reg->get_type (),
5353 parent_pv.m_tree, index_pv.m_tree,
5354 NULL_TREE, NULL_TREE),
5355 parent_pv.m_stack_depth);
5356 }
757bf1df 5357
808f4dfe 5358 case RK_OFFSET:
757bf1df 5359 {
808f4dfe
DM
5360 const offset_region *offset_reg
5361 = as_a <const offset_region *> (reg);
5362 path_var parent_pv
5363 = get_representative_path_var (reg->get_parent_region (), visited);
5364 if (!parent_pv)
5365 return path_var (NULL_TREE, 0);
5366 path_var offset_pv
5367 = get_representative_path_var (offset_reg->get_byte_offset (),
5368 visited);
29f5db8e 5369 if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
808f4dfe 5370 return path_var (NULL_TREE, 0);
29f5db8e
DM
5371 tree addr_parent = build1 (ADDR_EXPR,
5372 build_pointer_type (reg->get_type ()),
5373 parent_pv.m_tree);
c87f1f3d
JJ
5374 tree ptype = build_pointer_type_for_mode (char_type_node, ptr_mode,
5375 true);
5376 return path_var (build2 (MEM_REF, reg->get_type (), addr_parent,
5377 fold_convert (ptype, offset_pv.m_tree)),
808f4dfe 5378 parent_pv.m_stack_depth);
757bf1df 5379 }
757bf1df 5380
e61ffa20
DM
5381 case RK_SIZED:
5382 return path_var (NULL_TREE, 0);
5383
808f4dfe
DM
5384 case RK_CAST:
5385 {
5386 path_var parent_pv
5387 = get_representative_path_var (reg->get_parent_region (), visited);
5388 if (!parent_pv)
5389 return path_var (NULL_TREE, 0);
5390 return path_var (build1 (NOP_EXPR,
5391 reg->get_type (),
5392 parent_pv.m_tree),
5393 parent_pv.m_stack_depth);
5394 }
757bf1df 5395
808f4dfe
DM
5396 case RK_HEAP_ALLOCATED:
5397 case RK_ALLOCA:
5398 /* No good way to express heap-allocated/alloca regions as trees. */
5399 return path_var (NULL_TREE, 0);
757bf1df 5400
808f4dfe
DM
5401 case RK_STRING:
5402 {
5403 const string_region *string_reg = as_a <const string_region *> (reg);
5404 return path_var (string_reg->get_string_cst (), 0);
5405 }
757bf1df 5406
2402dc6b 5407 case RK_VAR_ARG:
358dab90 5408 case RK_ERRNO:
808f4dfe 5409 case RK_UNKNOWN:
f65f63c4 5410 case RK_PRIVATE:
808f4dfe
DM
5411 return path_var (NULL_TREE, 0);
5412 }
757bf1df
DM
5413}
5414
467a4820
DM
5415/* Attempt to return a path_var that represents REG, or return
5416 the NULL path_var.
5417 For example, a region for a field of a local would be a path_var
5418 wrapping a COMPONENT_REF.
5419 Use VISITED to prevent infinite mutual recursion with the overload for
5420 svalues.
5421
5422 This function defers to get_representative_path_var_1 to do the work;
5423 it adds verification that get_representative_path_var_1 returned a tree
5424 of the correct type. */
5425
5426path_var
5427region_model::get_representative_path_var (const region *reg,
5428 svalue_set *visited) const
5429{
5430 path_var result = get_representative_path_var_1 (reg, visited);
5431
5432 /* Verify that the result has the same type as REG, if any. */
5433 if (result.m_tree && reg->get_type ())
5434 gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
5435
5436 return result;
5437}
5438
757bf1df
DM
5439/* Update this model for any phis in SNODE, assuming we came from
5440 LAST_CFG_SUPEREDGE. */
5441
5442void
5443region_model::update_for_phis (const supernode *snode,
5444 const cfg_superedge *last_cfg_superedge,
5445 region_model_context *ctxt)
5446{
5447 gcc_assert (last_cfg_superedge);
5448
e0a7a675
DM
5449 /* Copy this state and pass it to handle_phi so that all of the phi stmts
5450 are effectively handled simultaneously. */
5451 const region_model old_state (*this);
5452
841008d3
DM
5453 hash_set<const svalue *> svals_changing_meaning;
5454
757bf1df
DM
5455 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
5456 !gsi_end_p (gpi); gsi_next (&gpi))
5457 {
5458 gphi *phi = gpi.phi ();
5459
5460 tree src = last_cfg_superedge->get_phi_arg (phi);
5461 tree lhs = gimple_phi_result (phi);
5462
e0a7a675 5463 /* Update next_state based on phi and old_state. */
841008d3 5464 handle_phi (phi, lhs, src, old_state, svals_changing_meaning, ctxt);
757bf1df 5465 }
841008d3
DM
5466
5467 for (auto iter : svals_changing_meaning)
5468 m_constraints->purge_state_involving (iter);
757bf1df
DM
5469}
5470
5471/* Attempt to update this model for taking EDGE (where the last statement
5472 was LAST_STMT), returning true if the edge can be taken, false
5473 otherwise.
84fb3546
DM
5474 When returning false, if OUT is non-NULL, write a new rejected_constraint
5475 to it.
757bf1df
DM
5476
5477 For CFG superedges where LAST_STMT is a conditional or a switch
5478 statement, attempt to add the relevant conditions for EDGE to this
5479 model, returning true if they are feasible, or false if they are
5480 impossible.
5481
5482 For call superedges, push frame information and store arguments
5483 into parameters.
5484
5485 For return superedges, pop frame information and store return
5486 values into any lhs.
5487
5488 Rejection of call/return superedges happens elsewhere, in
5489 program_point::on_edge (i.e. based on program point, rather
5490 than program state). */
5491
5492bool
5493region_model::maybe_update_for_edge (const superedge &edge,
5494 const gimple *last_stmt,
84fb3546 5495 region_model_context *ctxt,
8878f7ab 5496 std::unique_ptr<rejected_constraint> *out)
757bf1df
DM
5497{
5498 /* Handle frame updates for interprocedural edges. */
5499 switch (edge.m_kind)
5500 {
5501 default:
5502 break;
5503
5504 case SUPEREDGE_CALL:
5505 {
5506 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
5507 update_for_call_superedge (*call_edge, ctxt);
5508 }
5509 break;
5510
5511 case SUPEREDGE_RETURN:
5512 {
5513 const return_superedge *return_edge
5514 = as_a <const return_superedge *> (&edge);
5515 update_for_return_superedge (*return_edge, ctxt);
5516 }
5517 break;
5518
5519 case SUPEREDGE_INTRAPROCEDURAL_CALL:
bfca9505
DM
5520 /* This is a no-op for call summaries; we should already
5521 have handled the effect of the call summary at the call stmt. */
757bf1df
DM
5522 break;
5523 }
5524
5525 if (last_stmt == NULL)
5526 return true;
5527
1b761fed 5528 /* Apply any constraints for conditionals/switch/computed-goto statements. */
757bf1df
DM
5529
5530 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
5531 {
5532 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
84fb3546 5533 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out);
757bf1df
DM
5534 }
5535
5536 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
5537 {
5538 const switch_cfg_superedge *switch_sedge
5539 = as_a <const switch_cfg_superedge *> (&edge);
84fb3546
DM
5540 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt,
5541 ctxt, out);
757bf1df
DM
5542 }
5543
1b761fed
DM
5544 if (const ggoto *goto_stmt = dyn_cast <const ggoto *> (last_stmt))
5545 {
5546 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
5547 return apply_constraints_for_ggoto (*cfg_sedge, goto_stmt, ctxt);
5548 }
5549
1690a839
DM
5550 /* Apply any constraints due to an exception being thrown. */
5551 if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge))
5552 if (cfg_sedge->get_flags () & EDGE_EH)
84fb3546 5553 return apply_constraints_for_exception (last_stmt, ctxt, out);
1690a839 5554
757bf1df
DM
5555 return true;
5556}
5557
5558/* Push a new frame_region on to the stack region.
5559 Populate the frame_region with child regions for the function call's
5560 parameters, using values from the arguments at the callsite in the
5561 caller's frame. */
5562
5563void
aef703cf 5564region_model::update_for_gcall (const gcall *call_stmt,
e92d0ff6
AS
5565 region_model_context *ctxt,
5566 function *callee)
757bf1df 5567{
808f4dfe 5568 /* Build a vec of argument svalues, using the current top
757bf1df 5569 frame for resolving tree expressions. */
808f4dfe 5570 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
757bf1df
DM
5571
5572 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
5573 {
5574 tree arg = gimple_call_arg (call_stmt, i);
808f4dfe 5575 arg_svals.quick_push (get_rvalue (arg, ctxt));
757bf1df
DM
5576 }
5577
e92d0ff6
AS
5578 if(!callee)
5579 {
5580 /* Get the function * from the gcall. */
5581 tree fn_decl = get_fndecl_for_call (call_stmt,ctxt);
5582 callee = DECL_STRUCT_FUNCTION (fn_decl);
5583 }
5584
c0d8a64e
DM
5585 gcc_assert (callee);
5586 push_frame (*callee, &arg_svals, ctxt);
757bf1df
DM
5587}
5588
a96f1c38
DM
5589/* Pop the top-most frame_region from the stack, and copy the return
5590 region's values (if any) into the region for the lvalue of the LHS of
757bf1df 5591 the call (if any). */
aef703cf 5592
757bf1df 5593void
aef703cf
AS
5594region_model::update_for_return_gcall (const gcall *call_stmt,
5595 region_model_context *ctxt)
757bf1df 5596{
4cebae09
DM
5597 /* Get the lvalue for the result of the call, passing it to pop_frame,
5598 so that pop_frame can determine the region with respect to the
5599 *caller* frame. */
757bf1df 5600 tree lhs = gimple_call_lhs (call_stmt);
4cebae09 5601 pop_frame (lhs, NULL, ctxt);
757bf1df
DM
5602}
5603
aef703cf
AS
5604/* Extract calling information from the superedge and update the model for the
5605 call */
5606
5607void
5608region_model::update_for_call_superedge (const call_superedge &call_edge,
5609 region_model_context *ctxt)
5610{
5611 const gcall *call_stmt = call_edge.get_call_stmt ();
e92d0ff6 5612 update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ());
aef703cf
AS
5613}
5614
5615/* Extract calling information from the return superedge and update the model
5616 for the returning call */
5617
5618void
5619region_model::update_for_return_superedge (const return_superedge &return_edge,
5620 region_model_context *ctxt)
5621{
5622 const gcall *call_stmt = return_edge.get_call_stmt ();
5623 update_for_return_gcall (call_stmt, ctxt);
5624}
5625
64aa48ce 5626/* Attempt to use R to replay SUMMARY into this object.
bfca9505 5627 Return true if it is possible. */
757bf1df 5628
bfca9505
DM
5629bool
5630region_model::replay_call_summary (call_summary_replay &r,
5631 const region_model &summary)
757bf1df 5632{
bfca9505
DM
5633 gcc_assert (summary.get_stack_depth () == 1);
5634
5635 m_store.replay_call_summary (r, summary.m_store);
757bf1df 5636
841008d3
DM
5637 if (r.get_ctxt ())
5638 r.get_ctxt ()->maybe_did_work ();
5639
bfca9505
DM
5640 if (!m_constraints->replay_call_summary (r, *summary.m_constraints))
5641 return false;
5642
5643 for (auto kv : summary.m_dynamic_extents)
5644 {
5645 const region *summary_reg = kv.first;
5646 const region *caller_reg = r.convert_region_from_summary (summary_reg);
5647 if (!caller_reg)
5648 continue;
5649 const svalue *summary_sval = kv.second;
5650 const svalue *caller_sval = r.convert_svalue_from_summary (summary_sval);
5651 if (!caller_sval)
5652 continue;
5653 m_dynamic_extents.put (caller_reg, caller_sval);
5654 }
5655
5656 return true;
757bf1df
DM
5657}
5658
5659/* Given a true or false edge guarded by conditional statement COND_STMT,
5660 determine appropriate constraints for the edge to be taken.
5661
5662 If they are feasible, add the constraints and return true.
5663
5664 Return false if the constraints contradict existing knowledge
84fb3546
DM
5665 (and so the edge should not be taken).
5666 When returning false, if OUT is non-NULL, write a new rejected_constraint
5667 to it. */
757bf1df
DM
5668
5669bool
8878f7ab
DM
5670region_model::
5671apply_constraints_for_gcond (const cfg_superedge &sedge,
5672 const gcond *cond_stmt,
5673 region_model_context *ctxt,
5674 std::unique_ptr<rejected_constraint> *out)
757bf1df
DM
5675{
5676 ::edge cfg_edge = sedge.get_cfg_edge ();
5677 gcc_assert (cfg_edge != NULL);
5678 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
5679
5680 enum tree_code op = gimple_cond_code (cond_stmt);
5681 tree lhs = gimple_cond_lhs (cond_stmt);
5682 tree rhs = gimple_cond_rhs (cond_stmt);
5683 if (cfg_edge->flags & EDGE_FALSE_VALUE)
5684 op = invert_tree_comparison (op, false /* honor_nans */);
84fb3546 5685 return add_constraint (lhs, op, rhs, ctxt, out);
757bf1df
DM
5686}
5687
ccd4df81
DM
5688/* Return true iff SWITCH_STMT has a non-default label that contains
5689 INT_CST. */
5690
5691static bool
5692has_nondefault_case_for_value_p (const gswitch *switch_stmt, tree int_cst)
5693{
5694 /* We expect the initial label to be the default; skip it. */
5695 gcc_assert (CASE_LOW (gimple_switch_label (switch_stmt, 0)) == NULL);
5696 unsigned min_idx = 1;
5697 unsigned max_idx = gimple_switch_num_labels (switch_stmt) - 1;
5698
5699 /* Binary search: try to find the label containing INT_CST.
5700 This requires the cases to be sorted by CASE_LOW (done by the
5701 gimplifier). */
5702 while (max_idx >= min_idx)
5703 {
5704 unsigned case_idx = (min_idx + max_idx) / 2;
5705 tree label = gimple_switch_label (switch_stmt, case_idx);
5706 tree low = CASE_LOW (label);
5707 gcc_assert (low);
5708 tree high = CASE_HIGH (label);
5709 if (!high)
5710 high = low;
5711 if (tree_int_cst_compare (int_cst, low) < 0)
5712 {
5713 /* INT_CST is below the range of this label. */
5714 gcc_assert (case_idx > 0);
5715 max_idx = case_idx - 1;
5716 }
5717 else if (tree_int_cst_compare (int_cst, high) > 0)
5718 {
5719 /* INT_CST is above the range of this case. */
5720 min_idx = case_idx + 1;
5721 }
5722 else
5723 /* This case contains INT_CST. */
5724 return true;
5725 }
5726 /* Not found. */
5727 return false;
5728}
5729
5730/* Return true iff SWITCH_STMT (which must be on an enum value)
5731 has nondefault cases handling all values in the enum. */
5732
5733static bool
3cbab07b
AO
5734has_nondefault_cases_for_all_enum_values_p (const gswitch *switch_stmt,
5735 tree type)
ccd4df81
DM
5736{
5737 gcc_assert (switch_stmt);
ccd4df81
DM
5738 gcc_assert (TREE_CODE (type) == ENUMERAL_TYPE);
5739
5740 for (tree enum_val_iter = TYPE_VALUES (type);
5741 enum_val_iter;
5742 enum_val_iter = TREE_CHAIN (enum_val_iter))
5743 {
5744 tree enum_val = TREE_VALUE (enum_val_iter);
5745 gcc_assert (TREE_CODE (enum_val) == CONST_DECL);
5746 gcc_assert (TREE_CODE (DECL_INITIAL (enum_val)) == INTEGER_CST);
5747 if (!has_nondefault_case_for_value_p (switch_stmt,
5748 DECL_INITIAL (enum_val)))
5749 return false;
5750 }
5751 return true;
5752}
5753
757bf1df
DM
5754/* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
5755 for the edge to be taken.
5756
5757 If they are feasible, add the constraints and return true.
5758
5759 Return false if the constraints contradict existing knowledge
84fb3546
DM
5760 (and so the edge should not be taken).
5761 When returning false, if OUT is non-NULL, write a new rejected_constraint
5762 to it. */
757bf1df
DM
5763
5764bool
8878f7ab
DM
5765region_model::
5766apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
5767 const gswitch *switch_stmt,
5768 region_model_context *ctxt,
5769 std::unique_ptr<rejected_constraint> *out)
757bf1df 5770{
ccd4df81
DM
5771 tree index = gimple_switch_index (switch_stmt);
5772 const svalue *index_sval = get_rvalue (index, ctxt);
3cbab07b
AO
5773 bool check_index_type = true;
5774
5775 /* With -fshort-enum, there may be a type cast. */
5776 if (ctxt && index_sval->get_kind () == SK_UNARYOP
5777 && TREE_CODE (index_sval->get_type ()) == INTEGER_TYPE)
5778 {
5779 const unaryop_svalue *unaryop = as_a <const unaryop_svalue *> (index_sval);
5780 if (unaryop->get_op () == NOP_EXPR
5781 && is_a <const initial_svalue *> (unaryop->get_arg ()))
5782 if (const initial_svalue *initvalop = (as_a <const initial_svalue *>
5783 (unaryop->get_arg ())))
e945d322
DM
5784 if (initvalop->get_type ()
5785 && TREE_CODE (initvalop->get_type ()) == ENUMERAL_TYPE)
3cbab07b
AO
5786 {
5787 index_sval = initvalop;
5788 check_index_type = false;
5789 }
5790 }
ccd4df81
DM
5791
5792 /* If we're switching based on an enum type, assume that the user is only
5793 working with values from the enum. Hence if this is an
5794 implicitly-created "default", assume it doesn't get followed.
5795 This fixes numerous "uninitialized" false positives where we otherwise
5796 consider jumping past the initialization cases. */
5797
5798 if (/* Don't check during feasibility-checking (when ctxt is NULL). */
5799 ctxt
5800 /* Must be an enum value. */
5801 && index_sval->get_type ()
3cbab07b
AO
5802 && (!check_index_type
5803 || TREE_CODE (TREE_TYPE (index)) == ENUMERAL_TYPE)
ccd4df81
DM
5804 && TREE_CODE (index_sval->get_type ()) == ENUMERAL_TYPE
5805 /* If we have a constant, then we can check it directly. */
5806 && index_sval->get_kind () != SK_CONSTANT
5807 && edge.implicitly_created_default_p ()
3cbab07b
AO
5808 && has_nondefault_cases_for_all_enum_values_p (switch_stmt,
5809 index_sval->get_type ())
ccd4df81
DM
5810 /* Don't do this if there's a chance that the index is
5811 attacker-controlled. */
5812 && !ctxt->possibly_tainted_p (index_sval))
5813 {
5814 if (out)
8878f7ab 5815 *out = make_unique <rejected_default_case> (*this);
ccd4df81
DM
5816 return false;
5817 }
5818
8ca7fa84
DM
5819 bounded_ranges_manager *ranges_mgr = get_range_manager ();
5820 const bounded_ranges *all_cases_ranges
5821 = ranges_mgr->get_or_create_ranges_for_switch (&edge, switch_stmt);
8ca7fa84
DM
5822 bool sat = m_constraints->add_bounded_ranges (index_sval, all_cases_ranges);
5823 if (!sat && out)
8878f7ab 5824 *out = make_unique <rejected_ranges_constraint> (*this, index, all_cases_ranges);
2c044ff1
DM
5825 if (sat && ctxt && !all_cases_ranges->empty_p ())
5826 ctxt->on_bounded_ranges (*index_sval, *all_cases_ranges);
8ca7fa84 5827 return sat;
757bf1df
DM
5828}
5829
1b761fed
DM
5830/* Given an edge reached by GOTO_STMT, determine appropriate constraints
5831 for the edge to be taken.
5832
5833 If they are feasible, add the constraints and return true.
5834
5835 Return false if the constraints contradict existing knowledge
5836 (and so the edge should not be taken). */
5837
5838bool
5839region_model::apply_constraints_for_ggoto (const cfg_superedge &edge,
5840 const ggoto *goto_stmt,
5841 region_model_context *ctxt)
5842{
5843 tree dest = gimple_goto_dest (goto_stmt);
5844 const svalue *dest_sval = get_rvalue (dest, ctxt);
5845
5846 /* If we know we were jumping to a specific label. */
5847 if (tree dst_label = edge.m_dest->get_label ())
5848 {
5849 const label_region *dst_label_reg
5850 = m_mgr->get_region_for_label (dst_label);
5851 const svalue *dst_label_ptr
5852 = m_mgr->get_ptr_svalue (ptr_type_node, dst_label_reg);
5853
5854 if (!add_constraint (dest_sval, EQ_EXPR, dst_label_ptr, ctxt))
5855 return false;
5856 }
5857
5858 return true;
5859}
5860
1690a839
DM
5861/* Apply any constraints due to an exception being thrown at LAST_STMT.
5862
5863 If they are feasible, add the constraints and return true.
5864
5865 Return false if the constraints contradict existing knowledge
84fb3546
DM
5866 (and so the edge should not be taken).
5867 When returning false, if OUT is non-NULL, write a new rejected_constraint
5868 to it. */
1690a839
DM
5869
5870bool
8878f7ab
DM
5871region_model::
5872apply_constraints_for_exception (const gimple *last_stmt,
5873 region_model_context *ctxt,
5874 std::unique_ptr<rejected_constraint> *out)
1690a839
DM
5875{
5876 gcc_assert (last_stmt);
5877 if (const gcall *call = dyn_cast <const gcall *> (last_stmt))
5878 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
5879 if (is_named_call_p (callee_fndecl, "operator new", call, 1)
5880 || is_named_call_p (callee_fndecl, "operator new []", call, 1))
5881 {
5882 /* We have an exception thrown from operator new.
5883 Add a constraint that the result was NULL, to avoid a false
5884 leak report due to the result being lost when following
5885 the EH edge. */
5886 if (tree lhs = gimple_call_lhs (call))
84fb3546 5887 return add_constraint (lhs, EQ_EXPR, null_pointer_node, ctxt, out);
1690a839
DM
5888 return true;
5889 }
5890 return true;
5891}
5892
808f4dfe
DM
5893/* For use with push_frame when handling a top-level call within the analysis.
5894 PARAM has a defined but unknown initial value.
5895 Anything it points to has escaped, since the calling context "knows"
5896 the pointer, and thus calls to unknown functions could read/write into
dcfc7ac9
DM
5897 the region.
5898 If NONNULL is true, then assume that PARAM must be non-NULL. */
757bf1df
DM
5899
5900void
808f4dfe 5901region_model::on_top_level_param (tree param,
dcfc7ac9
DM
5902 bool nonnull,
5903 region_model_context *ctxt)
757bf1df 5904{
808f4dfe 5905 if (POINTER_TYPE_P (TREE_TYPE (param)))
5eae0ac7 5906 {
808f4dfe
DM
5907 const region *param_reg = get_lvalue (param, ctxt);
5908 const svalue *init_ptr_sval
5909 = m_mgr->get_or_create_initial_value (param_reg);
5910 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
5911 m_store.mark_as_escaped (pointee_reg);
dcfc7ac9
DM
5912 if (nonnull)
5913 {
5914 const svalue *null_ptr_sval
5915 = m_mgr->get_or_create_null_ptr (TREE_TYPE (param));
5916 add_constraint (init_ptr_sval, NE_EXPR, null_ptr_sval, ctxt);
5917 }
5eae0ac7 5918 }
757bf1df
DM
5919}
5920
808f4dfe
DM
5921/* Update this region_model to reflect pushing a frame onto the stack
5922 for a call to FUN.
757bf1df 5923
808f4dfe
DM
5924 If ARG_SVALS is non-NULL, use it to populate the parameters
5925 in the new frame.
5926 Otherwise, the params have their initial_svalues.
757bf1df 5927
808f4dfe 5928 Return the frame_region for the new frame. */
757bf1df 5929
808f4dfe 5930const region *
c0d8a64e
DM
5931region_model::push_frame (const function &fun,
5932 const vec<const svalue *> *arg_svals,
808f4dfe 5933 region_model_context *ctxt)
757bf1df 5934{
808f4dfe
DM
5935 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
5936 if (arg_svals)
757bf1df 5937 {
808f4dfe 5938 /* Arguments supplied from a caller frame. */
c0d8a64e 5939 tree fndecl = fun.decl;
808f4dfe
DM
5940 unsigned idx = 0;
5941 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
5942 iter_parm = DECL_CHAIN (iter_parm), ++idx)
757bf1df 5943 {
808f4dfe
DM
5944 /* If there's a mismatching declaration, the call stmt might
5945 not have enough args. Handle this case by leaving the
5946 rest of the params as uninitialized. */
5947 if (idx >= arg_svals->length ())
5948 break;
294b6da2 5949 tree parm_lval = iter_parm;
c0d8a64e 5950 if (tree parm_default_ssa = get_ssa_default_def (fun, iter_parm))
294b6da2
DM
5951 parm_lval = parm_default_ssa;
5952 const region *parm_reg = get_lvalue (parm_lval, ctxt);
808f4dfe 5953 const svalue *arg_sval = (*arg_svals)[idx];
808f4dfe 5954 set_value (parm_reg, arg_sval, ctxt);
757bf1df 5955 }
2402dc6b
DM
5956
5957 /* Handle any variadic args. */
5958 unsigned va_arg_idx = 0;
5959 for (; idx < arg_svals->length (); idx++, va_arg_idx++)
5960 {
5961 const svalue *arg_sval = (*arg_svals)[idx];
5962 const region *var_arg_reg
5963 = m_mgr->get_var_arg_region (m_current_frame,
5964 va_arg_idx);
5965 set_value (var_arg_reg, arg_sval, ctxt);
5966 }
757bf1df 5967 }
808f4dfe 5968 else
757bf1df 5969 {
808f4dfe
DM
5970 /* Otherwise we have a top-level call within the analysis. The params
5971 have defined but unknown initial values.
5972 Anything they point to has escaped. */
c0d8a64e 5973 tree fndecl = fun.decl;
dcfc7ac9
DM
5974
5975 /* Handle "__attribute__((nonnull))". */
5976 tree fntype = TREE_TYPE (fndecl);
5977 bitmap nonnull_args = get_nonnull_args (fntype);
5978
5979 unsigned parm_idx = 0;
808f4dfe
DM
5980 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
5981 iter_parm = DECL_CHAIN (iter_parm))
757bf1df 5982 {
dcfc7ac9
DM
5983 bool non_null = (nonnull_args
5984 ? (bitmap_empty_p (nonnull_args)
5985 || bitmap_bit_p (nonnull_args, parm_idx))
5986 : false);
c0d8a64e 5987 if (tree parm_default_ssa = get_ssa_default_def (fun, iter_parm))
dcfc7ac9 5988 on_top_level_param (parm_default_ssa, non_null, ctxt);
294b6da2 5989 else
dcfc7ac9
DM
5990 on_top_level_param (iter_parm, non_null, ctxt);
5991 parm_idx++;
757bf1df 5992 }
dcfc7ac9
DM
5993
5994 BITMAP_FREE (nonnull_args);
757bf1df 5995 }
757bf1df 5996
808f4dfe 5997 return m_current_frame;
757bf1df
DM
5998}
5999
808f4dfe
DM
6000/* Get the function of the top-most frame in this region_model's stack.
6001 There must be such a frame. */
757bf1df 6002
c0d8a64e 6003const function *
808f4dfe 6004region_model::get_current_function () const
757bf1df 6005{
808f4dfe
DM
6006 const frame_region *frame = get_current_frame ();
6007 gcc_assert (frame);
c0d8a64e 6008 return &frame->get_function ();
757bf1df
DM
6009}
6010
808f4dfe 6011/* Pop the topmost frame_region from this region_model's stack;
757bf1df 6012
4cebae09
DM
6013 If RESULT_LVALUE is non-null, copy any return value from the frame
6014 into the corresponding region (evaluated with respect to the *caller*
6015 frame, rather than the called frame).
808f4dfe
DM
6016 If OUT_RESULT is non-null, copy any return value from the frame
6017 into *OUT_RESULT.
757bf1df 6018
430d7d88
DM
6019 If EVAL_RETURN_SVALUE is false, then don't evaluate the return value.
6020 This is for use when unwinding frames e.g. due to longjmp, to suppress
6021 erroneously reporting uninitialized return values.
6022
808f4dfe
DM
6023 Purge the frame region and all its descendent regions.
6024 Convert any pointers that point into such regions into
6025 POISON_KIND_POPPED_STACK svalues. */
757bf1df 6026
808f4dfe 6027void
4cebae09 6028region_model::pop_frame (tree result_lvalue,
808f4dfe 6029 const svalue **out_result,
430d7d88
DM
6030 region_model_context *ctxt,
6031 bool eval_return_svalue)
808f4dfe
DM
6032{
6033 gcc_assert (m_current_frame);
757bf1df 6034
597b9ec6 6035 const region_model pre_popped_model = *this;
808f4dfe 6036 const frame_region *frame_reg = m_current_frame;
5c6546ca
DM
6037
6038 /* Notify state machines. */
6039 if (ctxt)
6040 ctxt->on_pop_frame (frame_reg);
6041
6042 /* Evaluate the result, within the callee frame. */
c0d8a64e 6043 tree fndecl = m_current_frame->get_function ().decl;
808f4dfe 6044 tree result = DECL_RESULT (fndecl);
4cebae09 6045 const svalue *retval = NULL;
430d7d88
DM
6046 if (result
6047 && TREE_TYPE (result) != void_type_node
6048 && eval_return_svalue)
808f4dfe 6049 {
4cebae09 6050 retval = get_rvalue (result, ctxt);
808f4dfe 6051 if (out_result)
13ad6d9f 6052 *out_result = retval;
808f4dfe 6053 }
757bf1df 6054
808f4dfe
DM
6055 /* Pop the frame. */
6056 m_current_frame = m_current_frame->get_calling_frame ();
757bf1df 6057
4cebae09
DM
6058 if (result_lvalue && retval)
6059 {
430d7d88
DM
6060 gcc_assert (eval_return_svalue);
6061
4cebae09
DM
6062 /* Compute result_dst_reg using RESULT_LVALUE *after* popping
6063 the frame, but before poisoning pointers into the old frame. */
6064 const region *result_dst_reg = get_lvalue (result_lvalue, ctxt);
6065 set_value (result_dst_reg, retval, ctxt);
6066 }
6067
808f4dfe 6068 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
597b9ec6 6069 notify_on_pop_frame (this, &pre_popped_model, retval, ctxt);
757bf1df
DM
6070}
6071
808f4dfe 6072/* Get the number of frames in this region_model's stack. */
757bf1df 6073
808f4dfe
DM
6074int
6075region_model::get_stack_depth () const
757bf1df 6076{
808f4dfe
DM
6077 const frame_region *frame = get_current_frame ();
6078 if (frame)
6079 return frame->get_stack_depth ();
6080 else
6081 return 0;
757bf1df
DM
6082}
6083
808f4dfe
DM
6084/* Get the frame_region with the given index within the stack.
6085 The frame_region must exist. */
757bf1df 6086
808f4dfe
DM
6087const frame_region *
6088region_model::get_frame_at_index (int index) const
757bf1df 6089{
808f4dfe
DM
6090 const frame_region *frame = get_current_frame ();
6091 gcc_assert (frame);
6092 gcc_assert (index >= 0);
6093 gcc_assert (index <= frame->get_index ());
6094 while (index != frame->get_index ())
6095 {
6096 frame = frame->get_calling_frame ();
6097 gcc_assert (frame);
6098 }
6099 return frame;
757bf1df
DM
6100}
6101
808f4dfe
DM
6102/* Unbind svalues for any regions in REG and below.
6103 Find any pointers to such regions; convert them to
9a2c9579
DM
6104 poisoned values of kind PKIND.
6105 Also purge any dynamic extents. */
757bf1df 6106
808f4dfe
DM
6107void
6108region_model::unbind_region_and_descendents (const region *reg,
6109 enum poison_kind pkind)
757bf1df 6110{
808f4dfe
DM
6111 /* Gather a set of base regions to be unbound. */
6112 hash_set<const region *> base_regs;
6113 for (store::cluster_map_t::iterator iter = m_store.begin ();
6114 iter != m_store.end (); ++iter)
757bf1df 6115 {
808f4dfe
DM
6116 const region *iter_base_reg = (*iter).first;
6117 if (iter_base_reg->descendent_of_p (reg))
6118 base_regs.add (iter_base_reg);
757bf1df 6119 }
808f4dfe
DM
6120 for (hash_set<const region *>::iterator iter = base_regs.begin ();
6121 iter != base_regs.end (); ++iter)
6122 m_store.purge_cluster (*iter);
757bf1df 6123
808f4dfe
DM
6124 /* Find any pointers to REG or its descendents; convert to poisoned. */
6125 poison_any_pointers_to_descendents (reg, pkind);
9a2c9579
DM
6126
6127 /* Purge dynamic extents of any base regions in REG and below
6128 (e.g. VLAs and alloca stack regions). */
6129 for (auto iter : m_dynamic_extents)
6130 {
6131 const region *iter_reg = iter.first;
6132 if (iter_reg->descendent_of_p (reg))
6133 unset_dynamic_extents (iter_reg);
6134 }
757bf1df
DM
6135}
6136
808f4dfe
DM
6137/* Implementation of BindingVisitor.
6138 Update the bound svalues for regions below REG to use poisoned
6139 values instead. */
757bf1df 6140
808f4dfe 6141struct bad_pointer_finder
757bf1df 6142{
808f4dfe
DM
6143 bad_pointer_finder (const region *reg, enum poison_kind pkind,
6144 region_model_manager *mgr)
6145 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
6146 {}
757bf1df 6147
808f4dfe
DM
6148 void on_binding (const binding_key *, const svalue *&sval)
6149 {
6150 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
6151 {
6152 const region *ptr_dst = ptr_sval->get_pointee ();
6153 /* Poison ptrs to descendents of REG, but not to REG itself,
6154 otherwise double-free detection doesn't work (since sm-state
6155 for "free" is stored on the original ptr svalue). */
6156 if (ptr_dst->descendent_of_p (m_reg)
6157 && ptr_dst != m_reg)
6158 {
6159 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
6160 sval->get_type ());
6161 ++m_count;
6162 }
6163 }
6164 }
757bf1df 6165
808f4dfe
DM
6166 const region *m_reg;
6167 enum poison_kind m_pkind;
6168 region_model_manager *const m_mgr;
6169 int m_count;
6170};
757bf1df 6171
808f4dfe
DM
6172/* Find any pointers to REG or its descendents; convert them to
6173 poisoned values of kind PKIND.
6174 Return the number of pointers that were poisoned. */
757bf1df 6175
808f4dfe
DM
6176int
6177region_model::poison_any_pointers_to_descendents (const region *reg,
6178 enum poison_kind pkind)
6179{
6180 bad_pointer_finder bv (reg, pkind, m_mgr);
6181 m_store.for_each_binding (bv);
6182 return bv.m_count;
757bf1df
DM
6183}
6184
808f4dfe
DM
6185/* Attempt to merge THIS with OTHER_MODEL, writing the result
6186 to OUT_MODEL. Use POINT to distinguish values created as a
6187 result of merging. */
757bf1df 6188
808f4dfe
DM
6189bool
6190region_model::can_merge_with_p (const region_model &other_model,
6191 const program_point &point,
f573d351
DM
6192 region_model *out_model,
6193 const extrinsic_state *ext_state,
6194 const program_state *state_a,
6195 const program_state *state_b) const
757bf1df 6196{
808f4dfe
DM
6197 gcc_assert (out_model);
6198 gcc_assert (m_mgr == other_model.m_mgr);
6199 gcc_assert (m_mgr == out_model->m_mgr);
757bf1df 6200
808f4dfe
DM
6201 if (m_current_frame != other_model.m_current_frame)
6202 return false;
6203 out_model->m_current_frame = m_current_frame;
757bf1df 6204
f573d351
DM
6205 model_merger m (this, &other_model, point, out_model,
6206 ext_state, state_a, state_b);
757bf1df 6207
808f4dfe
DM
6208 if (!store::can_merge_p (&m_store, &other_model.m_store,
6209 &out_model->m_store, m_mgr->get_store_manager (),
6210 &m))
6211 return false;
6212
9a2c9579
DM
6213 if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
6214 &out_model->m_dynamic_extents))
6215 return false;
6216
808f4dfe
DM
6217 /* Merge constraints. */
6218 constraint_manager::merge (*m_constraints,
6219 *other_model.m_constraints,
c710051a 6220 out_model->m_constraints);
757bf1df 6221
841008d3
DM
6222 for (auto iter : m.m_svals_changing_meaning)
6223 out_model->m_constraints->purge_state_involving (iter);
6224
808f4dfe 6225 return true;
757bf1df
DM
6226}
6227
6228/* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
6229 otherwise. */
6230
6231tree
6232region_model::get_fndecl_for_call (const gcall *call,
6233 region_model_context *ctxt)
6234{
6235 tree fn_ptr = gimple_call_fn (call);
6236 if (fn_ptr == NULL_TREE)
6237 return NULL_TREE;
808f4dfe
DM
6238 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
6239 if (const region_svalue *fn_ptr_ptr
6240 = fn_ptr_sval->dyn_cast_region_svalue ())
757bf1df 6241 {
808f4dfe
DM
6242 const region *reg = fn_ptr_ptr->get_pointee ();
6243 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
757bf1df 6244 {
808f4dfe 6245 tree fn_decl = fn_reg->get_fndecl ();
0ba70d1b
DM
6246 cgraph_node *node = cgraph_node::get (fn_decl);
6247 if (!node)
6248 return NULL_TREE;
6249 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
91f993b7
DM
6250 if (ultimate_node)
6251 return ultimate_node->decl;
757bf1df
DM
6252 }
6253 }
6254
6255 return NULL_TREE;
6256}
6257
808f4dfe 6258/* Would be much simpler to use a lambda here, if it were supported. */
757bf1df 6259
faacafd2 6260struct append_regions_cb_data
757bf1df 6261{
808f4dfe
DM
6262 const region_model *model;
6263 auto_vec<const decl_region *> *out;
6264};
757bf1df 6265
faacafd2 6266/* Populate *OUT with all decl_regions in the current
808f4dfe 6267 frame that have clusters within the store. */
757bf1df
DM
6268
6269void
808f4dfe 6270region_model::
faacafd2 6271get_regions_for_current_frame (auto_vec<const decl_region *> *out) const
757bf1df 6272{
faacafd2 6273 append_regions_cb_data data;
808f4dfe
DM
6274 data.model = this;
6275 data.out = out;
faacafd2 6276 m_store.for_each_cluster (append_regions_cb, &data);
757bf1df
DM
6277}
6278
faacafd2 6279/* Implementation detail of get_regions_for_current_frame. */
757bf1df 6280
808f4dfe 6281void
faacafd2
DM
6282region_model::append_regions_cb (const region *base_reg,
6283 append_regions_cb_data *cb_data)
757bf1df 6284{
808f4dfe
DM
6285 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
6286 return;
6287 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
faacafd2 6288 cb_data->out->safe_push (decl_reg);
757bf1df
DM
6289}
6290
c83e9731
TL
6291
6292/* Abstract class for diagnostics related to the use of
6293 floating-point arithmetic where precision is needed. */
6294
6295class imprecise_floating_point_arithmetic : public pending_diagnostic
6296{
6297public:
6298 int get_controlling_option () const final override
6299 {
6300 return OPT_Wanalyzer_imprecise_fp_arithmetic;
6301 }
6302};
6303
6304/* Concrete diagnostic to complain about uses of floating-point arithmetic
6305 in the size argument of malloc etc. */
6306
6307class float_as_size_arg : public imprecise_floating_point_arithmetic
6308{
6309public:
6310 float_as_size_arg (tree arg) : m_arg (arg)
6311 {}
6312
6313 const char *get_kind () const final override
6314 {
6315 return "float_as_size_arg_diagnostic";
6316 }
6317
ac9230fb 6318 bool subclass_equal_p (const pending_diagnostic &other) const final override
c83e9731
TL
6319 {
6320 return same_tree_p (m_arg, ((const float_as_size_arg &) other).m_arg);
6321 }
6322
12b67d1e 6323 bool emit (diagnostic_emission_context &ctxt) final override
c83e9731 6324 {
12b67d1e
DM
6325 bool warned = ctxt.warn ("use of floating-point arithmetic here might"
6326 " yield unexpected results");
c83e9731 6327 if (warned)
12b67d1e
DM
6328 inform (ctxt.get_location (),
6329 "only use operands of an integer type"
6330 " inside the size argument");
c83e9731
TL
6331 return warned;
6332 }
6333
6334 label_text describe_final_event (const evdesc::final_event &ev) final
6335 override
6336 {
6337 if (m_arg)
6338 return ev.formatted_print ("operand %qE is of type %qT",
6339 m_arg, TREE_TYPE (m_arg));
6340 return ev.formatted_print ("at least one operand of the size argument is"
6341 " of a floating-point type");
6342 }
6343
6344private:
6345 tree m_arg;
6346};
6347
6348/* Visitor to find uses of floating-point variables/constants in an svalue. */
6349
6350class contains_floating_point_visitor : public visitor
6351{
6352public:
6353 contains_floating_point_visitor (const svalue *root_sval) : m_result (NULL)
6354 {
6355 root_sval->accept (this);
6356 }
6357
6358 const svalue *get_svalue_to_report ()
6359 {
6360 return m_result;
6361 }
6362
6363 void visit_constant_svalue (const constant_svalue *sval) final override
6364 {
6365 /* At the point the analyzer runs, constant integer operands in a floating
6366 point expression are already implictly converted to floating-points.
6367 Thus, we do prefer to report non-constants such that the diagnostic
6368 always reports a floating-point operand. */
6369 tree type = sval->get_type ();
6370 if (type && FLOAT_TYPE_P (type) && !m_result)
6371 m_result = sval;
6372 }
6373
6374 void visit_conjured_svalue (const conjured_svalue *sval) final override
6375 {
6376 tree type = sval->get_type ();
6377 if (type && FLOAT_TYPE_P (type))
6378 m_result = sval;
6379 }
6380
6381 void visit_initial_svalue (const initial_svalue *sval) final override
6382 {
6383 tree type = sval->get_type ();
6384 if (type && FLOAT_TYPE_P (type))
6385 m_result = sval;
6386 }
6387
6388private:
6389 /* Non-null if at least one floating-point operand was found. */
6390 const svalue *m_result;
6391};
6392
6393/* May complain about uses of floating-point operands in SIZE_IN_BYTES. */
6394
6395void
6396region_model::check_dynamic_size_for_floats (const svalue *size_in_bytes,
6397 region_model_context *ctxt) const
6398{
6399 gcc_assert (ctxt);
6400
6401 contains_floating_point_visitor v (size_in_bytes);
6402 if (const svalue *float_sval = v.get_svalue_to_report ())
6403 {
6404 tree diag_arg = get_representative_tree (float_sval);
6341f14e 6405 ctxt->warn (make_unique<float_as_size_arg> (diag_arg));
c83e9731
TL
6406 }
6407}
6408
ce917b04
DM
6409/* Return a region describing a heap-allocated block of memory.
6410 Use CTXT to complain about tainted sizes.
6411
6412 Reuse an existing heap_allocated_region if it's not being referenced by
38c00edd
EF
6413 this region_model; otherwise create a new one.
6414
6415 Optionally (update_state_machine) transitions the pointer pointing to the
6416 heap_allocated_region from start to assumed non-null. */
757bf1df 6417
808f4dfe 6418const region *
ce917b04 6419region_model::get_or_create_region_for_heap_alloc (const svalue *size_in_bytes,
38c00edd
EF
6420 region_model_context *ctxt,
6421 bool update_state_machine,
6422 const call_details *cd)
ce917b04
DM
6423{
6424 /* Determine which regions are referenced in this region_model, so that
6425 we can reuse an existing heap_allocated_region if it's not in use on
6426 this path. */
7dc0ecaf 6427 auto_bitmap base_regs_in_use;
ce917b04 6428 get_referenced_base_regions (base_regs_in_use);
b03a10b0
DM
6429
6430 /* Don't reuse regions that are marked as TOUCHED. */
6431 for (store::cluster_map_t::iterator iter = m_store.begin ();
6432 iter != m_store.end (); ++iter)
6433 if ((*iter).second->touched_p ())
6434 {
6435 const region *base_reg = (*iter).first;
6436 bitmap_set_bit (base_regs_in_use, base_reg->get_id ());
6437 }
6438
ce917b04
DM
6439 const region *reg
6440 = m_mgr->get_or_create_region_for_heap_alloc (base_regs_in_use);
688fc162
DM
6441 if (size_in_bytes)
6442 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
6443 set_dynamic_extents (reg, size_in_bytes, ctxt);
38c00edd
EF
6444
6445 if (update_state_machine && cd)
6446 {
6447 const svalue *ptr_sval
6448 = m_mgr->get_ptr_svalue (cd->get_lhs_type (), reg);
6449 transition_ptr_sval_non_null (ctxt, ptr_sval);
6450 }
6451
808f4dfe 6452 return reg;
757bf1df
DM
6453}
6454
ce917b04
DM
6455/* Populate OUT_IDS with the set of IDs of those base regions which are
6456 reachable in this region_model. */
6457
6458void
7dc0ecaf 6459region_model::get_referenced_base_regions (auto_bitmap &out_ids) const
ce917b04
DM
6460{
6461 reachable_regions reachable_regs (const_cast<region_model *> (this));
6462 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
6463 &reachable_regs);
6464 /* Get regions for locals that have explicitly bound values. */
6465 for (store::cluster_map_t::iterator iter = m_store.begin ();
6466 iter != m_store.end (); ++iter)
6467 {
6468 const region *base_reg = (*iter).first;
6469 if (const region *parent = base_reg->get_parent_region ())
6470 if (parent->get_kind () == RK_FRAME)
6471 reachable_regs.add (base_reg, false);
6472 }
6473
6474 bitmap_clear (out_ids);
6475 for (auto iter_reg : reachable_regs)
6476 bitmap_set_bit (out_ids, iter_reg->get_id ());
6477}
6478
808f4dfe 6479/* Return a new region describing a block of memory allocated within the
b9365b93
DM
6480 current frame.
6481 Use CTXT to complain about tainted sizes. */
757bf1df 6482
808f4dfe 6483const region *
b9365b93
DM
6484region_model::create_region_for_alloca (const svalue *size_in_bytes,
6485 region_model_context *ctxt)
757bf1df 6486{
808f4dfe 6487 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
ea4e3218 6488 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
b9365b93 6489 set_dynamic_extents (reg, size_in_bytes, ctxt);
808f4dfe 6490 return reg;
757bf1df
DM
6491}
6492
b9365b93
DM
6493/* Record that the size of REG is SIZE_IN_BYTES.
6494 Use CTXT to complain about tainted sizes. */
757bf1df
DM
6495
6496void
9a2c9579 6497region_model::set_dynamic_extents (const region *reg,
b9365b93
DM
6498 const svalue *size_in_bytes,
6499 region_model_context *ctxt)
9a2c9579
DM
6500{
6501 assert_compat_types (size_in_bytes->get_type (), size_type_node);
b9365b93 6502 if (ctxt)
c83e9731
TL
6503 {
6504 check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
6505 ctxt);
6506 check_dynamic_size_for_floats (size_in_bytes, ctxt);
6507 }
9a2c9579
DM
6508 m_dynamic_extents.put (reg, size_in_bytes);
6509}
6510
6511/* Get the recording of REG in bytes, or NULL if no dynamic size was
6512 recorded. */
6513
6514const svalue *
6515region_model::get_dynamic_extents (const region *reg) const
757bf1df 6516{
9a2c9579
DM
6517 if (const svalue * const *slot = m_dynamic_extents.get (reg))
6518 return *slot;
6519 return NULL;
6520}
6521
6522/* Unset any recorded dynamic size of REG. */
6523
6524void
6525region_model::unset_dynamic_extents (const region *reg)
6526{
6527 m_dynamic_extents.remove (reg);
757bf1df
DM
6528}
6529
c81b60b8
DM
6530/* A subclass of pending_diagnostic for complaining about uninitialized data
6531 being copied across a trust boundary to an untrusted output
6532 (e.g. copy_to_user infoleaks in the Linux kernel). */
6533
6534class exposure_through_uninit_copy
6535 : public pending_diagnostic_subclass<exposure_through_uninit_copy>
6536{
6537public:
6538 exposure_through_uninit_copy (const region *src_region,
6539 const region *dest_region,
ffaeb9dc 6540 const svalue *copied_sval)
c81b60b8
DM
6541 : m_src_region (src_region),
6542 m_dest_region (dest_region),
ffaeb9dc 6543 m_copied_sval (copied_sval)
c81b60b8
DM
6544 {
6545 gcc_assert (m_copied_sval->get_kind () == SK_POISONED
6546 || m_copied_sval->get_kind () == SK_COMPOUND);
6547 }
6548
6549 const char *get_kind () const final override
6550 {
6551 return "exposure_through_uninit_copy";
6552 }
6553
6554 bool operator== (const exposure_through_uninit_copy &other) const
6555 {
6556 return (m_src_region == other.m_src_region
6557 && m_dest_region == other.m_dest_region
6558 && m_copied_sval == other.m_copied_sval);
6559 }
6560
6561 int get_controlling_option () const final override
6562 {
6563 return OPT_Wanalyzer_exposure_through_uninit_copy;
6564 }
6565
12b67d1e 6566 bool emit (diagnostic_emission_context &ctxt) final override
c81b60b8 6567 {
c81b60b8 6568 /* CWE-200: Exposure of Sensitive Information to an Unauthorized Actor. */
12b67d1e 6569 ctxt.add_cwe (200);
c81b60b8
DM
6570 enum memory_space mem_space = get_src_memory_space ();
6571 bool warned;
6572 switch (mem_space)
6573 {
6574 default:
12b67d1e
DM
6575 warned = ctxt.warn ("potential exposure of sensitive information"
6576 " by copying uninitialized data"
6577 " across trust boundary");
c81b60b8
DM
6578 break;
6579 case MEMSPACE_STACK:
12b67d1e
DM
6580 warned = ctxt.warn ("potential exposure of sensitive information"
6581 " by copying uninitialized data from stack"
6582 " across trust boundary");
c81b60b8
DM
6583 break;
6584 case MEMSPACE_HEAP:
12b67d1e
DM
6585 warned = ctxt.warn ("potential exposure of sensitive information"
6586 " by copying uninitialized data from heap"
6587 " across trust boundary");
c81b60b8
DM
6588 break;
6589 }
6590 if (warned)
6591 {
12b67d1e 6592 const location_t loc = ctxt.get_location ();
c81b60b8
DM
6593 inform_number_of_uninit_bits (loc);
6594 complain_about_uninit_ranges (loc);
6595
6596 if (mem_space == MEMSPACE_STACK)
6597 maybe_emit_fixit_hint ();
6598 }
6599 return warned;
6600 }
6601
6602 label_text describe_final_event (const evdesc::final_event &) final override
6603 {
6604 enum memory_space mem_space = get_src_memory_space ();
6605 switch (mem_space)
6606 {
6607 default:
6608 return label_text::borrow ("uninitialized data copied here");
6609
6610 case MEMSPACE_STACK:
6611 return label_text::borrow ("uninitialized data copied from stack here");
6612
6613 case MEMSPACE_HEAP:
6614 return label_text::borrow ("uninitialized data copied from heap here");
6615 }
6616 }
6617
6618 void mark_interesting_stuff (interesting_t *interest) final override
6619 {
6620 if (m_src_region)
6621 interest->add_region_creation (m_src_region);
6622 }
6623
12b67d1e
DM
6624 void
6625 maybe_add_sarif_properties (sarif_object &result_obj) const final override
6626 {
6627 sarif_property_bag &props = result_obj.get_or_create_properties ();
6628#define PROPERTY_PREFIX "gcc/-Wanalyzer-exposure-through-uninit-copy/"
6629 props.set (PROPERTY_PREFIX "src_region", m_src_region->to_json ());
6630 props.set (PROPERTY_PREFIX "dest_region", m_dest_region->to_json ());
6631 props.set (PROPERTY_PREFIX "copied_sval", m_copied_sval->to_json ());
6632#undef PROPERTY_PREFIX
6633 }
6634
c81b60b8
DM
6635private:
6636 enum memory_space get_src_memory_space () const
6637 {
6638 return m_src_region ? m_src_region->get_memory_space () : MEMSPACE_UNKNOWN;
6639 }
6640
6641 bit_size_t calc_num_uninit_bits () const
6642 {
6643 switch (m_copied_sval->get_kind ())
6644 {
6645 default:
6646 gcc_unreachable ();
6647 break;
6648 case SK_POISONED:
6649 {
6650 const poisoned_svalue *poisoned_sval
6651 = as_a <const poisoned_svalue *> (m_copied_sval);
6652 gcc_assert (poisoned_sval->get_poison_kind () == POISON_KIND_UNINIT);
6653
6654 /* Give up if don't have type information. */
6655 if (m_copied_sval->get_type () == NULL_TREE)
6656 return 0;
6657
6658 bit_size_t size_in_bits;
6659 if (int_size_in_bits (m_copied_sval->get_type (), &size_in_bits))
6660 return size_in_bits;
6661
6662 /* Give up if we can't get the size of the type. */
6663 return 0;
6664 }
6665 break;
6666 case SK_COMPOUND:
6667 {
6668 const compound_svalue *compound_sval
6669 = as_a <const compound_svalue *> (m_copied_sval);
6670 bit_size_t result = 0;
6671 /* Find keys for uninit svals. */
6672 for (auto iter : *compound_sval)
6673 {
6674 const svalue *sval = iter.second;
6675 if (const poisoned_svalue *psval
6676 = sval->dyn_cast_poisoned_svalue ())
6677 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
6678 {
6679 const binding_key *key = iter.first;
6680 const concrete_binding *ckey
6681 = key->dyn_cast_concrete_binding ();
6682 gcc_assert (ckey);
6683 result += ckey->get_size_in_bits ();
6684 }
6685 }
6686 return result;
6687 }
6688 }
6689 }
6690
6691 void inform_number_of_uninit_bits (location_t loc) const
6692 {
6693 bit_size_t num_uninit_bits = calc_num_uninit_bits ();
6694 if (num_uninit_bits <= 0)
6695 return;
6696 if (num_uninit_bits % BITS_PER_UNIT == 0)
6697 {
6698 /* Express in bytes. */
6699 byte_size_t num_uninit_bytes = num_uninit_bits / BITS_PER_UNIT;
6700 if (num_uninit_bytes == 1)
6701 inform (loc, "1 byte is uninitialized");
6702 else
6703 inform (loc,
6704 "%wu bytes are uninitialized", num_uninit_bytes.to_uhwi ());
6705 }
6706 else
6707 {
6708 /* Express in bits. */
6709 if (num_uninit_bits == 1)
6710 inform (loc, "1 bit is uninitialized");
6711 else
6712 inform (loc,
6713 "%wu bits are uninitialized", num_uninit_bits.to_uhwi ());
6714 }
6715 }
6716
6717 void complain_about_uninit_ranges (location_t loc) const
6718 {
6719 if (const compound_svalue *compound_sval
6720 = m_copied_sval->dyn_cast_compound_svalue ())
6721 {
6722 /* Find keys for uninit svals. */
6723 auto_vec<const concrete_binding *> uninit_keys;
6724 for (auto iter : *compound_sval)
6725 {
6726 const svalue *sval = iter.second;
6727 if (const poisoned_svalue *psval
6728 = sval->dyn_cast_poisoned_svalue ())
6729 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
6730 {
6731 const binding_key *key = iter.first;
6732 const concrete_binding *ckey
6733 = key->dyn_cast_concrete_binding ();
6734 gcc_assert (ckey);
6735 uninit_keys.safe_push (ckey);
6736 }
6737 }
6738 /* Complain about them in sorted order. */
6739 uninit_keys.qsort (concrete_binding::cmp_ptr_ptr);
6740
6741 std::unique_ptr<record_layout> layout;
6742
6743 tree type = m_copied_sval->get_type ();
6744 if (type && TREE_CODE (type) == RECORD_TYPE)
6745 {
6746 // (std::make_unique is C++14)
6747 layout = std::unique_ptr<record_layout> (new record_layout (type));
6748
6749 if (0)
6750 layout->dump ();
6751 }
6752
6753 unsigned i;
6754 const concrete_binding *ckey;
6755 FOR_EACH_VEC_ELT (uninit_keys, i, ckey)
6756 {
6757 bit_offset_t start_bit = ckey->get_start_bit_offset ();
6758 bit_offset_t next_bit = ckey->get_next_bit_offset ();
6759 complain_about_uninit_range (loc, start_bit, next_bit,
6760 layout.get ());
6761 }
6762 }
6763 }
6764
6765 void complain_about_uninit_range (location_t loc,
6766 bit_offset_t start_bit,
6767 bit_offset_t next_bit,
6768 const record_layout *layout) const
6769 {
6770 if (layout)
6771 {
6772 while (start_bit < next_bit)
6773 {
6774 if (const record_layout::item *item
6775 = layout->get_item_at (start_bit))
6776 {
6777 gcc_assert (start_bit >= item->get_start_bit_offset ());
6778 gcc_assert (start_bit < item->get_next_bit_offset ());
6779 if (item->get_start_bit_offset () == start_bit
6780 && item->get_next_bit_offset () <= next_bit)
6781 complain_about_fully_uninit_item (*item);
6782 else
6783 complain_about_partially_uninit_item (*item);
6784 start_bit = item->get_next_bit_offset ();
6785 continue;
6786 }
6787 else
6788 break;
6789 }
6790 }
6791
6792 if (start_bit >= next_bit)
6793 return;
6794
6795 if (start_bit % 8 == 0 && next_bit % 8 == 0)
6796 {
6797 /* Express in bytes. */
6798 byte_offset_t start_byte = start_bit / 8;
6799 byte_offset_t last_byte = (next_bit / 8) - 1;
6800 if (last_byte == start_byte)
6801 inform (loc,
6802 "byte %wu is uninitialized",
6803 start_byte.to_uhwi ());
6804 else
6805 inform (loc,
6806 "bytes %wu - %wu are uninitialized",
6807 start_byte.to_uhwi (),
6808 last_byte.to_uhwi ());
6809 }
6810 else
6811 {
6812 /* Express in bits. */
6813 bit_offset_t last_bit = next_bit - 1;
6814 if (last_bit == start_bit)
6815 inform (loc,
6816 "bit %wu is uninitialized",
6817 start_bit.to_uhwi ());
6818 else
6819 inform (loc,
6820 "bits %wu - %wu are uninitialized",
6821 start_bit.to_uhwi (),
6822 last_bit.to_uhwi ());
6823 }
6824 }
6825
6826 static void
6827 complain_about_fully_uninit_item (const record_layout::item &item)
6828 {
6829 tree field = item.m_field;
6830 bit_size_t num_bits = item.m_bit_range.m_size_in_bits;
6831 if (item.m_is_padding)
6832 {
6833 if (num_bits % 8 == 0)
6834 {
6835 /* Express in bytes. */
6836 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
6837 if (num_bytes == 1)
6838 inform (DECL_SOURCE_LOCATION (field),
6839 "padding after field %qD is uninitialized (1 byte)",
6840 field);
6841 else
6842 inform (DECL_SOURCE_LOCATION (field),
6843 "padding after field %qD is uninitialized (%wu bytes)",
6844 field, num_bytes.to_uhwi ());
6845 }
6846 else
6847 {
6848 /* Express in bits. */
6849 if (num_bits == 1)
6850 inform (DECL_SOURCE_LOCATION (field),
6851 "padding after field %qD is uninitialized (1 bit)",
6852 field);
6853 else
6854 inform (DECL_SOURCE_LOCATION (field),
6855 "padding after field %qD is uninitialized (%wu bits)",
6856 field, num_bits.to_uhwi ());
6857 }
6858 }
6859 else
6860 {
6861 if (num_bits % 8 == 0)
6862 {
6863 /* Express in bytes. */
6864 byte_size_t num_bytes = num_bits / BITS_PER_UNIT;
6865 if (num_bytes == 1)
6866 inform (DECL_SOURCE_LOCATION (field),
6867 "field %qD is uninitialized (1 byte)", field);
6868 else
6869 inform (DECL_SOURCE_LOCATION (field),
6870 "field %qD is uninitialized (%wu bytes)",
6871 field, num_bytes.to_uhwi ());
6872 }
6873 else
6874 {
6875 /* Express in bits. */
6876 if (num_bits == 1)
6877 inform (DECL_SOURCE_LOCATION (field),
6878 "field %qD is uninitialized (1 bit)", field);
6879 else
6880 inform (DECL_SOURCE_LOCATION (field),
6881 "field %qD is uninitialized (%wu bits)",
6882 field, num_bits.to_uhwi ());
6883 }
6884 }
6885 }
6886
6887 static void
6888 complain_about_partially_uninit_item (const record_layout::item &item)
6889 {
6890 tree field = item.m_field;
6891 if (item.m_is_padding)
6892 inform (DECL_SOURCE_LOCATION (field),
6893 "padding after field %qD is partially uninitialized",
6894 field);
6895 else
6896 inform (DECL_SOURCE_LOCATION (field),
6897 "field %qD is partially uninitialized",
6898 field);
6899 /* TODO: ideally we'd describe what parts are uninitialized. */
6900 }
6901
6902 void maybe_emit_fixit_hint () const
6903 {
6904 if (tree decl = m_src_region->maybe_get_decl ())
6905 {
6906 gcc_rich_location hint_richloc (DECL_SOURCE_LOCATION (decl));
6907 hint_richloc.add_fixit_insert_after (" = {0}");
6908 inform (&hint_richloc,
6909 "suggest forcing zero-initialization by"
6910 " providing a %<{0}%> initializer");
6911 }
6912 }
6913
6914private:
6915 const region *m_src_region;
6916 const region *m_dest_region;
6917 const svalue *m_copied_sval;
c81b60b8
DM
6918};
6919
6920/* Return true if any part of SVAL is uninitialized. */
6921
6922static bool
6923contains_uninit_p (const svalue *sval)
6924{
08262e78 6925 switch (sval->get_kind ())
c81b60b8 6926 {
08262e78
DM
6927 default:
6928 return false;
6929 case SK_POISONED:
6930 {
6931 const poisoned_svalue *psval
6932 = as_a <const poisoned_svalue *> (sval);
6933 return psval->get_poison_kind () == POISON_KIND_UNINIT;
6934 }
6935 case SK_COMPOUND:
6936 {
6937 const compound_svalue *compound_sval
6938 = as_a <const compound_svalue *> (sval);
c81b60b8 6939
08262e78
DM
6940 for (auto iter : *compound_sval)
6941 {
6942 const svalue *sval = iter.second;
6943 if (const poisoned_svalue *psval
6944 = sval->dyn_cast_poisoned_svalue ())
6945 if (psval->get_poison_kind () == POISON_KIND_UNINIT)
6946 return true;
6947 }
c81b60b8 6948
08262e78
DM
6949 return false;
6950 }
6951 }
c81b60b8
DM
6952}
6953
6954/* Function for use by plugins when simulating writing data through a
6955 pointer to an "untrusted" region DST_REG (and thus crossing a security
6956 boundary), such as copying data to user space in an OS kernel.
6957
6958 Check that COPIED_SVAL is fully initialized. If not, complain about
6959 an infoleak to CTXT.
6960
6961 SRC_REG can be NULL; if non-NULL it is used as a hint in the diagnostic
6962 as to where COPIED_SVAL came from. */
6963
6964void
6965region_model::maybe_complain_about_infoleak (const region *dst_reg,
6966 const svalue *copied_sval,
6967 const region *src_reg,
6968 region_model_context *ctxt)
6969{
6970 /* Check for exposure. */
6971 if (contains_uninit_p (copied_sval))
6341f14e
DM
6972 ctxt->warn (make_unique<exposure_through_uninit_copy> (src_reg,
6973 dst_reg,
6974 copied_sval));
c81b60b8
DM
6975}
6976
3d2d04cd
DM
6977/* Set errno to a positive symbolic int, as if some error has occurred. */
6978
6979void
6980region_model::set_errno (const call_details &cd)
6981{
6982 const region *errno_reg = m_mgr->get_errno_region ();
6983 conjured_purge p (this, cd.get_ctxt ());
6984 const svalue *new_errno_sval
6985 = m_mgr->get_or_create_conjured_svalue (integer_type_node,
6986 cd.get_call_stmt (),
6987 errno_reg, p);
6988 const svalue *zero
6989 = m_mgr->get_or_create_int_cst (integer_type_node, 0);
6990 add_constraint (new_errno_sval, GT_EXPR, zero, cd.get_ctxt ());
6991 set_value (errno_reg, new_errno_sval, cd.get_ctxt ());
6992}
6993
eafa9d96
DM
6994/* class noop_region_model_context : public region_model_context. */
6995
c65d3c7f 6996void
6341f14e 6997noop_region_model_context::add_note (std::unique_ptr<pending_note>)
c65d3c7f 6998{
c65d3c7f
DM
6999}
7000
2503dd59
DM
7001void
7002noop_region_model_context::add_event (std::unique_ptr<checker_event>)
7003{
7004}
7005
eafa9d96 7006void
accece8c 7007noop_region_model_context::bifurcate (std::unique_ptr<custom_edge_info>)
eafa9d96 7008{
eafa9d96
DM
7009}
7010
7011void
7012noop_region_model_context::terminate_path ()
7013{
7014}
7015
2503dd59
DM
7016/* class region_model_context_decorator : public region_model_context. */
7017
7018void
7019region_model_context_decorator::add_event (std::unique_ptr<checker_event> event)
7020{
1e7b0a5d
DM
7021 if (m_inner)
7022 m_inner->add_event (std::move (event));
2503dd59
DM
7023}
7024
808f4dfe 7025/* struct model_merger. */
757bf1df 7026
808f4dfe 7027/* Dump a multiline representation of this merger to PP. */
757bf1df
DM
7028
7029void
808f4dfe 7030model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
757bf1df 7031{
808f4dfe
DM
7032 pp_string (pp, "model A:");
7033 pp_newline (pp);
7034 m_model_a->dump_to_pp (pp, simple, true);
7035 pp_newline (pp);
757bf1df 7036
808f4dfe 7037 pp_string (pp, "model B:");
757bf1df 7038 pp_newline (pp);
808f4dfe 7039 m_model_b->dump_to_pp (pp, simple, true);
757bf1df
DM
7040 pp_newline (pp);
7041
808f4dfe 7042 pp_string (pp, "merged model:");
757bf1df 7043 pp_newline (pp);
808f4dfe 7044 m_merged_model->dump_to_pp (pp, simple, true);
757bf1df
DM
7045 pp_newline (pp);
7046}
7047
808f4dfe 7048/* Dump a multiline representation of this merger to FILE. */
757bf1df
DM
7049
7050void
808f4dfe 7051model_merger::dump (FILE *fp, bool simple) const
757bf1df
DM
7052{
7053 pretty_printer pp;
7054 pp_format_decoder (&pp) = default_tree_printer;
7055 pp_show_color (&pp) = pp_show_color (global_dc->printer);
7056 pp.buffer->stream = fp;
808f4dfe 7057 dump_to_pp (&pp, simple);
757bf1df
DM
7058 pp_flush (&pp);
7059}
7060
808f4dfe 7061/* Dump a multiline representation of this merger to stderr. */
757bf1df
DM
7062
7063DEBUG_FUNCTION void
808f4dfe 7064model_merger::dump (bool simple) const
757bf1df 7065{
808f4dfe 7066 dump (stderr, simple);
757bf1df
DM
7067}
7068
f573d351
DM
7069/* Return true if it's OK to merge SVAL with other svalues. */
7070
7071bool
7072model_merger::mergeable_svalue_p (const svalue *sval) const
7073{
7074 if (m_ext_state)
7075 {
7076 /* Reject merging svalues that have non-purgable sm-state,
7077 to avoid falsely reporting memory leaks by merging them
7078 with something else. For example, given a local var "p",
7079 reject the merger of a:
7080 store_a mapping "p" to a malloc-ed ptr
7081 with:
7082 store_b mapping "p" to a NULL ptr. */
7083 if (m_state_a)
7084 if (!m_state_a->can_purge_p (*m_ext_state, sval))
7085 return false;
7086 if (m_state_b)
7087 if (!m_state_b->can_purge_p (*m_ext_state, sval))
7088 return false;
7089 }
7090 return true;
7091}
7092
841008d3
DM
7093/* Mark WIDENING_SVAL as changing meaning during the merge. */
7094
7095void
7096model_merger::on_widening_reuse (const widening_svalue *widening_sval)
7097{
7098 m_svals_changing_meaning.add (widening_sval);
7099}
7100
75038aa6
DM
7101} // namespace ana
7102
808f4dfe 7103/* Dump RMODEL fully to stderr (i.e. without summarization). */
757bf1df 7104
808f4dfe
DM
7105DEBUG_FUNCTION void
7106debug (const region_model &rmodel)
757bf1df 7107{
808f4dfe 7108 rmodel.dump (false);
757bf1df
DM
7109}
7110
8ca7fa84 7111/* class rejected_op_constraint : public rejected_constraint. */
84fb3546
DM
7112
7113void
8ca7fa84 7114rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
84fb3546
DM
7115{
7116 region_model m (m_model);
7117 const svalue *lhs_sval = m.get_rvalue (m_lhs, NULL);
7118 const svalue *rhs_sval = m.get_rvalue (m_rhs, NULL);
7119 lhs_sval->dump_to_pp (pp, true);
7120 pp_printf (pp, " %s ", op_symbol_code (m_op));
7121 rhs_sval->dump_to_pp (pp, true);
7122}
7123
ccd4df81
DM
7124/* class rejected_default_case : public rejected_constraint. */
7125
7126void
7127rejected_default_case::dump_to_pp (pretty_printer *pp) const
7128{
7129 pp_string (pp, "implicit default for enum");
7130}
7131
8ca7fa84
DM
7132/* class rejected_ranges_constraint : public rejected_constraint. */
7133
7134void
7135rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
7136{
7137 region_model m (m_model);
7138 const svalue *sval = m.get_rvalue (m_expr, NULL);
7139 sval->dump_to_pp (pp, true);
7140 pp_string (pp, " in ");
7141 m_ranges->dump_to_pp (pp, true);
7142}
7143
808f4dfe 7144/* class engine. */
757bf1df 7145
11a2ff8d
DM
7146/* engine's ctor. */
7147
4cebae09
DM
7148engine::engine (const supergraph *sg, logger *logger)
7149: m_sg (sg), m_mgr (logger)
11a2ff8d
DM
7150{
7151}
7152
808f4dfe 7153/* Dump the managed objects by class to LOGGER, and the per-class totals. */
757bf1df 7154
808f4dfe
DM
7155void
7156engine::log_stats (logger *logger) const
757bf1df 7157{
808f4dfe 7158 m_mgr.log_stats (logger, true);
757bf1df
DM
7159}
7160
75038aa6
DM
7161namespace ana {
7162
757bf1df
DM
7163#if CHECKING_P
7164
7165namespace selftest {
7166
8c08c983
DM
7167/* Build a constant tree of the given type from STR. */
7168
7169static tree
7170build_real_cst_from_string (tree type, const char *str)
7171{
7172 REAL_VALUE_TYPE real;
7173 real_from_string (&real, str);
7174 return build_real (type, real);
7175}
7176
7177/* Append various "interesting" constants to OUT (e.g. NaN). */
7178
7179static void
7180append_interesting_constants (auto_vec<tree> *out)
7181{
59067ddf 7182 out->safe_push (integer_zero_node);
8c08c983
DM
7183 out->safe_push (build_int_cst (integer_type_node, 42));
7184 out->safe_push (build_int_cst (unsigned_type_node, 0));
7185 out->safe_push (build_int_cst (unsigned_type_node, 42));
7186 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
7187 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
7188 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
7189 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
7190 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
7191 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
7192 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
7193 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
7194}
7195
7196/* Verify that tree_cmp is a well-behaved comparator for qsort, even
7197 if the underlying constants aren't comparable. */
7198
7199static void
7200test_tree_cmp_on_constants ()
7201{
7202 auto_vec<tree> csts;
7203 append_interesting_constants (&csts);
7204
7205 /* Try sorting every triple. */
7206 const unsigned num = csts.length ();
7207 for (unsigned i = 0; i < num; i++)
7208 for (unsigned j = 0; j < num; j++)
7209 for (unsigned k = 0; k < num; k++)
7210 {
7211 auto_vec<tree> v (3);
7212 v.quick_push (csts[i]);
7213 v.quick_push (csts[j]);
7214 v.quick_push (csts[k]);
7215 v.qsort (tree_cmp);
7216 }
7217}
7218
757bf1df
DM
7219/* Implementation detail of the ASSERT_CONDITION_* macros. */
7220
808f4dfe
DM
7221void
7222assert_condition (const location &loc,
7223 region_model &model,
7224 const svalue *lhs, tree_code op, const svalue *rhs,
7225 tristate expected)
7226{
7227 tristate actual = model.eval_condition (lhs, op, rhs);
7228 ASSERT_EQ_AT (loc, actual, expected);
7229}
7230
7231/* Implementation detail of the ASSERT_CONDITION_* macros. */
7232
757bf1df
DM
7233void
7234assert_condition (const location &loc,
7235 region_model &model,
7236 tree lhs, tree_code op, tree rhs,
7237 tristate expected)
7238{
7239 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
7240 ASSERT_EQ_AT (loc, actual, expected);
7241}
7242
90f7c300
DM
7243/* Implementation detail of ASSERT_DUMP_TREE_EQ. */
7244
7245static void
7246assert_dump_tree_eq (const location &loc, tree t, const char *expected)
7247{
7248 auto_fix_quotes sentinel;
7249 pretty_printer pp;
7250 pp_format_decoder (&pp) = default_tree_printer;
7251 dump_tree (&pp, t);
7252 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
7253}
7254
7255/* Assert that dump_tree (T) is EXPECTED. */
7256
7257#define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
7258 SELFTEST_BEGIN_STMT \
7259 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
7260 SELFTEST_END_STMT
7261
757bf1df
DM
7262/* Implementation detail of ASSERT_DUMP_EQ. */
7263
7264static void
7265assert_dump_eq (const location &loc,
7266 const region_model &model,
7267 bool summarize,
7268 const char *expected)
7269{
7270 auto_fix_quotes sentinel;
7271 pretty_printer pp;
7272 pp_format_decoder (&pp) = default_tree_printer;
808f4dfe
DM
7273
7274 model.dump_to_pp (&pp, summarize, true);
757bf1df
DM
7275 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
7276}
7277
7278/* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
7279
7280#define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
7281 SELFTEST_BEGIN_STMT \
7282 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
7283 SELFTEST_END_STMT
7284
7285/* Smoketest for region_model::dump_to_pp. */
7286
7287static void
7288test_dump ()
7289{
808f4dfe
DM
7290 region_model_manager mgr;
7291 region_model model (&mgr);
757bf1df
DM
7292
7293 ASSERT_DUMP_EQ (model, false,
808f4dfe
DM
7294 "stack depth: 0\n"
7295 "m_called_unknown_fn: FALSE\n"
7296 "constraint_manager:\n"
7297 " equiv classes:\n"
7298 " constraints:\n");
7299 ASSERT_DUMP_EQ (model, true,
7300 "stack depth: 0\n"
7301 "m_called_unknown_fn: FALSE\n"
7302 "constraint_manager:\n"
757bf1df
DM
7303 " equiv classes:\n"
7304 " constraints:\n");
757bf1df
DM
7305}
7306
884d9141
DM
7307/* Helper function for selftests. Create a struct or union type named NAME,
7308 with the fields given by the FIELD_DECLS in FIELDS.
7309 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
7310 create a UNION_TYPE. */
7311
7312static tree
7313make_test_compound_type (const char *name, bool is_struct,
7314 const auto_vec<tree> *fields)
7315{
7316 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
7317 TYPE_NAME (t) = get_identifier (name);
7318 TYPE_SIZE (t) = 0;
7319
7320 tree fieldlist = NULL;
7321 int i;
7322 tree field;
7323 FOR_EACH_VEC_ELT (*fields, i, field)
7324 {
7325 gcc_assert (TREE_CODE (field) == FIELD_DECL);
7326 DECL_CONTEXT (field) = t;
7327 fieldlist = chainon (field, fieldlist);
7328 }
7329 fieldlist = nreverse (fieldlist);
7330 TYPE_FIELDS (t) = fieldlist;
7331
7332 layout_type (t);
7333 return t;
7334}
7335
a96f1c38
DM
7336/* Selftest fixture for creating the type "struct coord {int x; int y; };". */
7337
7338struct coord_test
7339{
7340 coord_test ()
7341 {
7342 auto_vec<tree> fields;
7343 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
7344 get_identifier ("x"), integer_type_node);
7345 fields.safe_push (m_x_field);
7346 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
7347 get_identifier ("y"), integer_type_node);
7348 fields.safe_push (m_y_field);
7349 m_coord_type = make_test_compound_type ("coord", true, &fields);
7350 }
7351
7352 tree m_x_field;
7353 tree m_y_field;
7354 tree m_coord_type;
7355};
7356
808f4dfe 7357/* Verify usage of a struct. */
884d9141
DM
7358
7359static void
808f4dfe 7360test_struct ()
884d9141 7361{
a96f1c38
DM
7362 coord_test ct;
7363
7364 tree c = build_global_decl ("c", ct.m_coord_type);
7365 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7366 c, ct.m_x_field, NULL_TREE);
7367 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
7368 c, ct.m_y_field, NULL_TREE);
884d9141
DM
7369
7370 tree int_17 = build_int_cst (integer_type_node, 17);
7371 tree int_m3 = build_int_cst (integer_type_node, -3);
7372
808f4dfe
DM
7373 region_model_manager mgr;
7374 region_model model (&mgr);
884d9141
DM
7375 model.set_value (c_x, int_17, NULL);
7376 model.set_value (c_y, int_m3, NULL);
7377
808f4dfe
DM
7378 /* Verify get_offset for "c.x". */
7379 {
7380 const region *c_x_reg = model.get_lvalue (c_x, NULL);
7a6564c9 7381 region_offset offset = c_x_reg->get_offset (&mgr);
808f4dfe
DM
7382 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
7383 ASSERT_EQ (offset.get_bit_offset (), 0);
7384 }
7385
7386 /* Verify get_offset for "c.y". */
7387 {
7388 const region *c_y_reg = model.get_lvalue (c_y, NULL);
7a6564c9 7389 region_offset offset = c_y_reg->get_offset (&mgr);
808f4dfe
DM
7390 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
7391 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
7392 }
884d9141
DM
7393}
7394
808f4dfe 7395/* Verify usage of an array element. */
884d9141
DM
7396
7397static void
808f4dfe 7398test_array_1 ()
884d9141
DM
7399{
7400 tree tlen = size_int (10);
7401 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
7402
7403 tree a = build_global_decl ("a", arr_type);
7404
808f4dfe
DM
7405 region_model_manager mgr;
7406 region_model model (&mgr);
59067ddf 7407 tree int_0 = integer_zero_node;
884d9141
DM
7408 tree a_0 = build4 (ARRAY_REF, char_type_node,
7409 a, int_0, NULL_TREE, NULL_TREE);
7410 tree char_A = build_int_cst (char_type_node, 'A');
7411 model.set_value (a_0, char_A, NULL);
884d9141
DM
7412}
7413
90f7c300
DM
7414/* Verify that region_model::get_representative_tree works as expected. */
7415
7416static void
7417test_get_representative_tree ()
7418{
808f4dfe
DM
7419 region_model_manager mgr;
7420
90f7c300
DM
7421 /* STRING_CST. */
7422 {
7423 tree string_cst = build_string (4, "foo");
808f4dfe
DM
7424 region_model m (&mgr);
7425 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
7426 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
7427 ASSERT_EQ (rep, string_cst);
7428 }
7429
7430 /* String literal. */
7431 {
7432 tree string_cst_ptr = build_string_literal (4, "foo");
808f4dfe
DM
7433 region_model m (&mgr);
7434 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
7435 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
7436 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
7437 }
808f4dfe
DM
7438
7439 /* Value of an element within an array. */
7440 {
7441 tree tlen = size_int (10);
7442 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
7443 tree a = build_global_decl ("a", arr_type);
9d804f9b
DM
7444 placeholder_svalue test_sval (mgr.alloc_symbol_id (),
7445 char_type_node, "test value");
808f4dfe
DM
7446
7447 /* Value of a[3]. */
7448 {
7449 test_region_model_context ctxt;
7450 region_model model (&mgr);
7451 tree int_3 = build_int_cst (integer_type_node, 3);
7452 tree a_3 = build4 (ARRAY_REF, char_type_node,
7453 a, int_3, NULL_TREE, NULL_TREE);
7454 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
7455 model.set_value (a_3_reg, &test_sval, &ctxt);
7456 tree rep = model.get_representative_tree (&test_sval);
7457 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
7458 }
7459
7460 /* Value of a[0]. */
7461 {
7462 test_region_model_context ctxt;
7463 region_model model (&mgr);
59067ddf 7464 tree idx = integer_zero_node;
808f4dfe
DM
7465 tree a_0 = build4 (ARRAY_REF, char_type_node,
7466 a, idx, NULL_TREE, NULL_TREE);
7467 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
7468 model.set_value (a_0_reg, &test_sval, &ctxt);
7469 tree rep = model.get_representative_tree (&test_sval);
7470 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
7471 }
7472 }
7473
7474 /* Value of a field within a struct. */
7475 {
7476 coord_test ct;
7477
7478 tree c = build_global_decl ("c", ct.m_coord_type);
7479 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7480 c, ct.m_x_field, NULL_TREE);
7481 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
7482 c, ct.m_y_field, NULL_TREE);
7483
7484 test_region_model_context ctxt;
7485
7486 /* Value of initial field. */
7487 {
7488 region_model m (&mgr);
7489 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
9d804f9b
DM
7490 placeholder_svalue test_sval_x (mgr.alloc_symbol_id (),
7491 integer_type_node, "test x val");
808f4dfe
DM
7492 m.set_value (c_x_reg, &test_sval_x, &ctxt);
7493 tree rep = m.get_representative_tree (&test_sval_x);
7494 ASSERT_DUMP_TREE_EQ (rep, "c.x");
7495 }
7496
7497 /* Value of non-initial field. */
7498 {
7499 region_model m (&mgr);
7500 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
9d804f9b
DM
7501 placeholder_svalue test_sval_y (mgr.alloc_symbol_id (),
7502 integer_type_node, "test y val");
808f4dfe
DM
7503 m.set_value (c_y_reg, &test_sval_y, &ctxt);
7504 tree rep = m.get_representative_tree (&test_sval_y);
7505 ASSERT_DUMP_TREE_EQ (rep, "c.y");
7506 }
7507 }
90f7c300
DM
7508}
7509
757bf1df 7510/* Verify that calling region_model::get_rvalue repeatedly on the same
808f4dfe 7511 tree constant retrieves the same svalue *. */
757bf1df
DM
7512
7513static void
7514test_unique_constants ()
7515{
59067ddf 7516 tree int_0 = integer_zero_node;
757bf1df
DM
7517 tree int_42 = build_int_cst (integer_type_node, 42);
7518
7519 test_region_model_context ctxt;
808f4dfe
DM
7520 region_model_manager mgr;
7521 region_model model (&mgr);
757bf1df
DM
7522 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
7523 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
7524 model.get_rvalue (int_42, &ctxt));
7525 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
7526 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
757bf1df 7527
808f4dfe
DM
7528 /* A "(const int)42" will be a different tree from "(int)42)"... */
7529 tree const_int_type_node
7530 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
7531 tree const_int_42 = build_int_cst (const_int_type_node, 42);
7532 ASSERT_NE (int_42, const_int_42);
7533 /* It should have a different const_svalue. */
7534 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
7535 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
7536 ASSERT_NE (int_42_sval, const_int_42_sval);
7537 /* But they should compare as equal. */
7538 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
7539 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
757bf1df
DM
7540}
7541
808f4dfe
DM
7542/* Verify that each type gets its own singleton unknown_svalue within a
7543 region_model_manager, and that NULL_TREE gets its own singleton. */
757bf1df
DM
7544
7545static void
808f4dfe 7546test_unique_unknowns ()
757bf1df 7547{
808f4dfe
DM
7548 region_model_manager mgr;
7549 const svalue *unknown_int
7550 = mgr.get_or_create_unknown_svalue (integer_type_node);
7551 /* Repeated calls with the same type should get the same "unknown"
7552 svalue. */
7553 const svalue *unknown_int_2
7554 = mgr.get_or_create_unknown_svalue (integer_type_node);
7555 ASSERT_EQ (unknown_int, unknown_int_2);
757bf1df 7556
808f4dfe
DM
7557 /* Different types (or the NULL type) should have different
7558 unknown_svalues. */
7559 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
7560 ASSERT_NE (unknown_NULL_type, unknown_int);
757bf1df 7561
808f4dfe
DM
7562 /* Repeated calls with NULL for the type should get the same "unknown"
7563 svalue. */
7564 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
7565 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
757bf1df
DM
7566}
7567
808f4dfe 7568/* Verify that initial_svalue are handled as expected. */
757bf1df 7569
808f4dfe
DM
7570static void
7571test_initial_svalue_folding ()
757bf1df 7572{
808f4dfe
DM
7573 region_model_manager mgr;
7574 tree x = build_global_decl ("x", integer_type_node);
7575 tree y = build_global_decl ("y", integer_type_node);
757bf1df 7576
808f4dfe
DM
7577 test_region_model_context ctxt;
7578 region_model model (&mgr);
7579 const svalue *x_init = model.get_rvalue (x, &ctxt);
7580 const svalue *y_init = model.get_rvalue (y, &ctxt);
7581 ASSERT_NE (x_init, y_init);
7582 const region *x_reg = model.get_lvalue (x, &ctxt);
7583 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
757bf1df 7584
808f4dfe 7585}
757bf1df 7586
808f4dfe 7587/* Verify that unary ops are folded as expected. */
757bf1df
DM
7588
7589static void
808f4dfe 7590test_unaryop_svalue_folding ()
757bf1df 7591{
808f4dfe 7592 region_model_manager mgr;
757bf1df
DM
7593 tree x = build_global_decl ("x", integer_type_node);
7594 tree y = build_global_decl ("y", integer_type_node);
7595
808f4dfe
DM
7596 test_region_model_context ctxt;
7597 region_model model (&mgr);
7598 const svalue *x_init = model.get_rvalue (x, &ctxt);
7599 const svalue *y_init = model.get_rvalue (y, &ctxt);
7600 const region *x_reg = model.get_lvalue (x, &ctxt);
7601 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
7602
7603 /* "(int)x" -> "x". */
7604 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
7605
7606 /* "(void *)x" -> something other than "x". */
7607 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
7608
7609 /* "!(x == y)" -> "x != y". */
7610 ASSERT_EQ (mgr.get_or_create_unaryop
7611 (boolean_type_node, TRUTH_NOT_EXPR,
7612 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
7613 x_init, y_init)),
7614 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
7615 x_init, y_init));
7616 /* "!(x > y)" -> "x <= y". */
7617 ASSERT_EQ (mgr.get_or_create_unaryop
7618 (boolean_type_node, TRUTH_NOT_EXPR,
7619 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
7620 x_init, y_init)),
7621 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
7622 x_init, y_init));
7623}
7624
7625/* Verify that binops on constant svalues are folded. */
757bf1df 7626
808f4dfe
DM
7627static void
7628test_binop_svalue_folding ()
7629{
7630#define NUM_CSTS 10
7631 tree cst_int[NUM_CSTS];
7632 region_model_manager mgr;
7633 const svalue *cst_sval[NUM_CSTS];
7634 for (int i = 0; i < NUM_CSTS; i++)
7635 {
7636 cst_int[i] = build_int_cst (integer_type_node, i);
7637 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
7638 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
7639 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
7640 }
757bf1df 7641
808f4dfe
DM
7642 for (int i = 0; i < NUM_CSTS; i++)
7643 for (int j = 0; j < NUM_CSTS; j++)
7644 {
7645 if (i != j)
7646 ASSERT_NE (cst_sval[i], cst_sval[j]);
7647 if (i + j < NUM_CSTS)
7648 {
7649 const svalue *sum
7650 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7651 cst_sval[i], cst_sval[j]);
7652 ASSERT_EQ (sum, cst_sval[i + j]);
7653 }
7654 if (i - j >= 0)
7655 {
7656 const svalue *difference
7657 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
7658 cst_sval[i], cst_sval[j]);
7659 ASSERT_EQ (difference, cst_sval[i - j]);
7660 }
7661 if (i * j < NUM_CSTS)
7662 {
7663 const svalue *product
7664 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7665 cst_sval[i], cst_sval[j]);
7666 ASSERT_EQ (product, cst_sval[i * j]);
7667 }
7668 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
7669 cst_sval[i], cst_sval[j]);
7670 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
7671 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
7672 cst_sval[i], cst_sval[j]);
7673 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
7674 // etc
7675 }
757bf1df 7676
808f4dfe 7677 tree x = build_global_decl ("x", integer_type_node);
757bf1df 7678
808f4dfe
DM
7679 test_region_model_context ctxt;
7680 region_model model (&mgr);
7681 const svalue *x_init = model.get_rvalue (x, &ctxt);
7682
7683 /* PLUS_EXPR folding. */
7684 const svalue *x_init_plus_zero
7685 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7686 x_init, cst_sval[0]);
7687 ASSERT_EQ (x_init_plus_zero, x_init);
7688 const svalue *zero_plus_x_init
7689 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7690 cst_sval[0], x_init);
7691 ASSERT_EQ (zero_plus_x_init, x_init);
7692
7693 /* MULT_EXPR folding. */
7694 const svalue *x_init_times_zero
7695 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7696 x_init, cst_sval[0]);
7697 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
7698 const svalue *zero_times_x_init
7699 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7700 cst_sval[0], x_init);
7701 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
7702
7703 const svalue *x_init_times_one
7704 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7705 x_init, cst_sval[1]);
7706 ASSERT_EQ (x_init_times_one, x_init);
7707 const svalue *one_times_x_init
7708 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7709 cst_sval[1], x_init);
7710 ASSERT_EQ (one_times_x_init, x_init);
7711
7712 // etc
7713 // TODO: do we want to use the match-and-simplify DSL for this?
7714
7715 /* Verify that binops put any constants on the RHS. */
7716 const svalue *four_times_x_init
7717 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7718 cst_sval[4], x_init);
7719 const svalue *x_init_times_four
7720 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
7721 x_init, cst_sval[4]);
7722 ASSERT_EQ (four_times_x_init, x_init_times_four);
7723 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
7724 ASSERT_EQ (binop->get_op (), MULT_EXPR);
7725 ASSERT_EQ (binop->get_arg0 (), x_init);
7726 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
7727
7728 /* Verify that ((x + 1) + 1) == (x + 2). */
7729 const svalue *x_init_plus_one
7730 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7731 x_init, cst_sval[1]);
7732 const svalue *x_init_plus_two
7733 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7734 x_init, cst_sval[2]);
7735 const svalue *x_init_plus_one_plus_one
7736 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
7737 x_init_plus_one, cst_sval[1]);
7738 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
4f34f8cc
DM
7739
7740 /* Verify various binops on booleans. */
7741 {
7742 const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1);
7743 const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0);
7744 const svalue *sval_unknown
7745 = mgr.get_or_create_unknown_svalue (boolean_type_node);
9d804f9b
DM
7746 const placeholder_svalue sval_placeholder (mgr.alloc_symbol_id (),
7747 boolean_type_node, "v");
4f34f8cc
DM
7748 for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR})
7749 {
7750 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7751 sval_true, sval_unknown),
7752 sval_true);
7753 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7754 sval_false, sval_unknown),
7755 sval_unknown);
7756 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7757 sval_false, &sval_placeholder),
7758 &sval_placeholder);
7759 }
7760 for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR})
7761 {
7762 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7763 sval_false, sval_unknown),
7764 sval_false);
7765 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7766 sval_true, sval_unknown),
7767 sval_unknown);
7768 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
7769 sval_true, &sval_placeholder),
7770 &sval_placeholder);
7771 }
7772 }
808f4dfe
DM
7773}
7774
7775/* Verify that sub_svalues are folded as expected. */
757bf1df 7776
808f4dfe
DM
7777static void
7778test_sub_svalue_folding ()
7779{
7780 coord_test ct;
7781 tree c = build_global_decl ("c", ct.m_coord_type);
7782 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7783 c, ct.m_x_field, NULL_TREE);
757bf1df 7784
808f4dfe
DM
7785 region_model_manager mgr;
7786 region_model model (&mgr);
7787 test_region_model_context ctxt;
7788 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
757bf1df 7789
808f4dfe
DM
7790 /* Verify that sub_svalue of "unknown" simply
7791 yields an unknown. */
757bf1df 7792
808f4dfe
DM
7793 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
7794 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
7795 unknown, c_x_reg);
7796 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
7797 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
757bf1df
DM
7798}
7799
f09b9955
DM
7800/* Get BIT within VAL as a symbolic value within MGR. */
7801
7802static const svalue *
7803get_bit (region_model_manager *mgr,
7804 bit_offset_t bit,
7805 unsigned HOST_WIDE_INT val)
7806{
7807 const svalue *inner_svalue
7808 = mgr->get_or_create_int_cst (unsigned_type_node, val);
7809 return mgr->get_or_create_bits_within (boolean_type_node,
7810 bit_range (bit, 1),
7811 inner_svalue);
7812}
7813
7814/* Verify that bits_within_svalues are folded as expected. */
7815
7816static void
7817test_bits_within_svalue_folding ()
7818{
7819 region_model_manager mgr;
7820
7821 const svalue *zero = mgr.get_or_create_int_cst (boolean_type_node, 0);
7822 const svalue *one = mgr.get_or_create_int_cst (boolean_type_node, 1);
7823
7824 {
7825 const unsigned val = 0x0000;
7826 for (unsigned bit = 0; bit < 16; bit++)
7827 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
7828 }
7829
7830 {
7831 const unsigned val = 0x0001;
7832 ASSERT_EQ (get_bit (&mgr, 0, val), one);
7833 for (unsigned bit = 1; bit < 16; bit++)
7834 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
7835 }
7836
7837 {
7838 const unsigned val = 0x8000;
7839 for (unsigned bit = 0; bit < 15; bit++)
7840 ASSERT_EQ (get_bit (&mgr, bit, val), zero);
7841 ASSERT_EQ (get_bit (&mgr, 15, val), one);
7842 }
7843
7844 {
7845 const unsigned val = 0xFFFF;
7846 for (unsigned bit = 0; bit < 16; bit++)
7847 ASSERT_EQ (get_bit (&mgr, bit, val), one);
7848 }
7849}
7850
808f4dfe 7851/* Test that region::descendent_of_p works as expected. */
757bf1df
DM
7852
7853static void
808f4dfe 7854test_descendent_of_p ()
757bf1df 7855{
808f4dfe
DM
7856 region_model_manager mgr;
7857 const region *stack = mgr.get_stack_region ();
7858 const region *heap = mgr.get_heap_region ();
7859 const region *code = mgr.get_code_region ();
7860 const region *globals = mgr.get_globals_region ();
757bf1df 7861
808f4dfe
DM
7862 /* descendent_of_p should return true when used on the region itself. */
7863 ASSERT_TRUE (stack->descendent_of_p (stack));
7864 ASSERT_FALSE (stack->descendent_of_p (heap));
7865 ASSERT_FALSE (stack->descendent_of_p (code));
7866 ASSERT_FALSE (stack->descendent_of_p (globals));
757bf1df 7867
808f4dfe
DM
7868 tree x = build_global_decl ("x", integer_type_node);
7869 const region *x_reg = mgr.get_region_for_global (x);
7870 ASSERT_TRUE (x_reg->descendent_of_p (globals));
757bf1df 7871
808f4dfe
DM
7872 /* A cast_region should be a descendent of the original region. */
7873 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
7874 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
757bf1df
DM
7875}
7876
391512ad
DM
7877/* Verify that bit_range_region works as expected. */
7878
7879static void
7880test_bit_range_regions ()
7881{
7882 tree x = build_global_decl ("x", integer_type_node);
7883 region_model_manager mgr;
7884 const region *x_reg = mgr.get_region_for_global (x);
7885 const region *byte0
7886 = mgr.get_bit_range (x_reg, char_type_node, bit_range (0, 8));
7887 const region *byte1
7888 = mgr.get_bit_range (x_reg, char_type_node, bit_range (8, 8));
7889 ASSERT_TRUE (byte0->descendent_of_p (x_reg));
7890 ASSERT_TRUE (byte1->descendent_of_p (x_reg));
7891 ASSERT_NE (byte0, byte1);
7892}
7893
757bf1df
DM
7894/* Verify that simple assignments work as expected. */
7895
7896static void
7897test_assignment ()
7898{
59067ddf 7899 tree int_0 = integer_zero_node;
757bf1df
DM
7900 tree x = build_global_decl ("x", integer_type_node);
7901 tree y = build_global_decl ("y", integer_type_node);
7902
7903 /* "x == 0", then use of y, then "y = 0;". */
808f4dfe
DM
7904 region_model_manager mgr;
7905 region_model model (&mgr);
757bf1df
DM
7906 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
7907 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
7908 model.set_value (model.get_lvalue (y, NULL),
7909 model.get_rvalue (int_0, NULL),
7910 NULL);
7911 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
7912 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
757bf1df
DM
7913}
7914
a96f1c38
DM
7915/* Verify that compound assignments work as expected. */
7916
7917static void
7918test_compound_assignment ()
7919{
7920 coord_test ct;
7921
7922 tree c = build_global_decl ("c", ct.m_coord_type);
7923 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7924 c, ct.m_x_field, NULL_TREE);
7925 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
7926 c, ct.m_y_field, NULL_TREE);
7927 tree d = build_global_decl ("d", ct.m_coord_type);
7928 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
7929 d, ct.m_x_field, NULL_TREE);
7930 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
7931 d, ct.m_y_field, NULL_TREE);
7932
7933 tree int_17 = build_int_cst (integer_type_node, 17);
7934 tree int_m3 = build_int_cst (integer_type_node, -3);
7935
808f4dfe
DM
7936 region_model_manager mgr;
7937 region_model model (&mgr);
a96f1c38
DM
7938 model.set_value (c_x, int_17, NULL);
7939 model.set_value (c_y, int_m3, NULL);
7940
a96f1c38 7941 /* Copy c to d. */
13ad6d9f
DM
7942 const svalue *sval = model.get_rvalue (c, NULL);
7943 model.set_value (model.get_lvalue (d, NULL), sval, NULL);
7944
a96f1c38
DM
7945 /* Check that the fields have the same svalues. */
7946 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
7947 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
7948}
7949
757bf1df
DM
7950/* Verify the details of pushing and popping stack frames. */
7951
7952static void
7953test_stack_frames ()
7954{
7955 tree int_42 = build_int_cst (integer_type_node, 42);
7956 tree int_10 = build_int_cst (integer_type_node, 10);
7957 tree int_5 = build_int_cst (integer_type_node, 5);
59067ddf 7958 tree int_0 = integer_zero_node;
757bf1df
DM
7959
7960 auto_vec <tree> param_types;
7961 tree parent_fndecl = make_fndecl (integer_type_node,
7962 "parent_fn",
7963 param_types);
7964 allocate_struct_function (parent_fndecl, true);
7965
7966 tree child_fndecl = make_fndecl (integer_type_node,
7967 "child_fn",
7968 param_types);
7969 allocate_struct_function (child_fndecl, true);
7970
7971 /* "a" and "b" in the parent frame. */
7972 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7973 get_identifier ("a"),
7974 integer_type_node);
4cebae09 7975 DECL_CONTEXT (a) = parent_fndecl;
757bf1df
DM
7976 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7977 get_identifier ("b"),
7978 integer_type_node);
4cebae09 7979 DECL_CONTEXT (b) = parent_fndecl;
757bf1df
DM
7980 /* "x" and "y" in a child frame. */
7981 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7982 get_identifier ("x"),
7983 integer_type_node);
4cebae09 7984 DECL_CONTEXT (x) = child_fndecl;
757bf1df
DM
7985 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7986 get_identifier ("y"),
7987 integer_type_node);
4cebae09 7988 DECL_CONTEXT (y) = child_fndecl;
757bf1df
DM
7989
7990 /* "p" global. */
7991 tree p = build_global_decl ("p", ptr_type_node);
7992
7993 /* "q" global. */
7994 tree q = build_global_decl ("q", ptr_type_node);
7995
808f4dfe 7996 region_model_manager mgr;
757bf1df 7997 test_region_model_context ctxt;
808f4dfe 7998 region_model model (&mgr);
757bf1df
DM
7999
8000 /* Push stack frame for "parent_fn". */
808f4dfe 8001 const region *parent_frame_reg
c0d8a64e 8002 = model.push_frame (*DECL_STRUCT_FUNCTION (parent_fndecl),
808f4dfe
DM
8003 NULL, &ctxt);
8004 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
8005 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
8006 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
8007 model.set_value (a_in_parent_reg,
8008 model.get_rvalue (int_42, &ctxt),
8009 &ctxt);
8010 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
8011
757bf1df
DM
8012 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
8013 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
8014 tristate (tristate::TS_TRUE));
8015
8016 /* Push stack frame for "child_fn". */
808f4dfe 8017 const region *child_frame_reg
c0d8a64e 8018 = model.push_frame (*DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
808f4dfe
DM
8019 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
8020 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
8021 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
8022 model.set_value (x_in_child_reg,
8023 model.get_rvalue (int_0, &ctxt),
8024 &ctxt);
8025 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
8026
757bf1df
DM
8027 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
8028 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
8029 tristate (tristate::TS_TRUE));
8030
8031 /* Point a global pointer at a local in the child frame: p = &x. */
808f4dfe
DM
8032 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
8033 model.set_value (p_in_globals_reg,
8034 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
757bf1df 8035 &ctxt);
808f4dfe 8036 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
757bf1df
DM
8037
8038 /* Point another global pointer at p: q = &p. */
808f4dfe
DM
8039 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
8040 model.set_value (q_in_globals_reg,
8041 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
757bf1df
DM
8042 &ctxt);
8043
808f4dfe
DM
8044 /* Test region::descendent_of_p. */
8045 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
8046 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
8047 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
757bf1df
DM
8048
8049 /* Pop the "child_fn" frame from the stack. */
808f4dfe
DM
8050 model.pop_frame (NULL, NULL, &ctxt);
8051 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
8052 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
757bf1df
DM
8053
8054 /* Verify that p (which was pointing at the local "x" in the popped
8055 frame) has been poisoned. */
33255ad3 8056 const svalue *new_p_sval = model.get_rvalue (p, NULL);
757bf1df
DM
8057 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
8058 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
8059 POISON_KIND_POPPED_STACK);
8060
8061 /* Verify that q still points to p, in spite of the region
8062 renumbering. */
808f4dfe 8063 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
757bf1df 8064 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
5932dd35 8065 ASSERT_EQ (new_q_sval->maybe_get_region (),
757bf1df
DM
8066 model.get_lvalue (p, &ctxt));
8067
8068 /* Verify that top of stack has been updated. */
808f4dfe 8069 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
757bf1df
DM
8070
8071 /* Verify locals in parent frame. */
8072 /* Verify "a" still has its value. */
808f4dfe 8073 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
757bf1df
DM
8074 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
8075 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
8076 int_42);
8077 /* Verify "b" still has its constraint. */
8078 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
8079 tristate (tristate::TS_TRUE));
8080}
8081
8082/* Verify that get_representative_path_var works as expected, that
808f4dfe 8083 we can map from regions to parms and back within a recursive call
757bf1df
DM
8084 stack. */
8085
8086static void
8087test_get_representative_path_var ()
8088{
8089 auto_vec <tree> param_types;
8090 tree fndecl = make_fndecl (integer_type_node,
8091 "factorial",
8092 param_types);
8093 allocate_struct_function (fndecl, true);
8094
8095 /* Parm "n". */
8096 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8097 get_identifier ("n"),
8098 integer_type_node);
4cebae09 8099 DECL_CONTEXT (n) = fndecl;
757bf1df 8100
808f4dfe
DM
8101 region_model_manager mgr;
8102 test_region_model_context ctxt;
8103 region_model model (&mgr);
757bf1df
DM
8104
8105 /* Push 5 stack frames for "factorial", each with a param */
808f4dfe
DM
8106 auto_vec<const region *> parm_regs;
8107 auto_vec<const svalue *> parm_svals;
757bf1df
DM
8108 for (int depth = 0; depth < 5; depth++)
8109 {
808f4dfe 8110 const region *frame_n_reg
c0d8a64e 8111 = model.push_frame (*DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
808f4dfe
DM
8112 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
8113 parm_regs.safe_push (parm_n_reg);
757bf1df 8114
808f4dfe
DM
8115 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
8116 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
8117 parm_svals.safe_push (sval_n);
757bf1df
DM
8118 }
8119
8120 /* Verify that we can recognize that the regions are the parms,
8121 at every depth. */
8122 for (int depth = 0; depth < 5; depth++)
8123 {
808f4dfe
DM
8124 {
8125 svalue_set visited;
8126 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
8127 &visited),
8128 path_var (n, depth + 1));
8129 }
757bf1df
DM
8130 /* ...and that we can lookup lvalues for locals for all frames,
8131 not just the top. */
8132 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
808f4dfe 8133 parm_regs[depth]);
757bf1df 8134 /* ...and that we can locate the svalues. */
808f4dfe
DM
8135 {
8136 svalue_set visited;
8137 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
8138 &visited),
8139 path_var (n, depth + 1));
8140 }
757bf1df
DM
8141 }
8142}
8143
808f4dfe 8144/* Ensure that region_model::operator== works as expected. */
757bf1df
DM
8145
8146static void
808f4dfe 8147test_equality_1 ()
757bf1df 8148{
808f4dfe
DM
8149 tree int_42 = build_int_cst (integer_type_node, 42);
8150 tree int_17 = build_int_cst (integer_type_node, 17);
757bf1df 8151
808f4dfe
DM
8152/* Verify that "empty" region_model instances are equal to each other. */
8153 region_model_manager mgr;
8154 region_model model0 (&mgr);
8155 region_model model1 (&mgr);
757bf1df 8156 ASSERT_EQ (model0, model1);
808f4dfe
DM
8157
8158 /* Verify that setting state in model1 makes the models non-equal. */
8159 tree x = build_global_decl ("x", integer_type_node);
8160 model0.set_value (x, int_42, NULL);
8161 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
8162 ASSERT_NE (model0, model1);
8163
8164 /* Verify the copy-ctor. */
8165 region_model model2 (model0);
8166 ASSERT_EQ (model0, model2);
8167 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
8168 ASSERT_NE (model1, model2);
8169
8170 /* Verify that models obtained from copy-ctor are independently editable
8171 w/o affecting the original model. */
8172 model2.set_value (x, int_17, NULL);
8173 ASSERT_NE (model0, model2);
8174 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
8175 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
757bf1df
DM
8176}
8177
8178/* Verify that region models for
8179 x = 42; y = 113;
8180 and
8181 y = 113; x = 42;
808f4dfe 8182 are equal. */
757bf1df
DM
8183
8184static void
8185test_canonicalization_2 ()
8186{
8187 tree int_42 = build_int_cst (integer_type_node, 42);
8188 tree int_113 = build_int_cst (integer_type_node, 113);
8189 tree x = build_global_decl ("x", integer_type_node);
8190 tree y = build_global_decl ("y", integer_type_node);
8191
808f4dfe
DM
8192 region_model_manager mgr;
8193 region_model model0 (&mgr);
757bf1df
DM
8194 model0.set_value (model0.get_lvalue (x, NULL),
8195 model0.get_rvalue (int_42, NULL),
8196 NULL);
8197 model0.set_value (model0.get_lvalue (y, NULL),
8198 model0.get_rvalue (int_113, NULL),
8199 NULL);
8200
808f4dfe 8201 region_model model1 (&mgr);
757bf1df
DM
8202 model1.set_value (model1.get_lvalue (y, NULL),
8203 model1.get_rvalue (int_113, NULL),
8204 NULL);
8205 model1.set_value (model1.get_lvalue (x, NULL),
8206 model1.get_rvalue (int_42, NULL),
8207 NULL);
8208
757bf1df
DM
8209 ASSERT_EQ (model0, model1);
8210}
8211
8212/* Verify that constraints for
8213 x > 3 && y > 42
8214 and
8215 y > 42 && x > 3
8216 are equal after canonicalization. */
8217
8218static void
8219test_canonicalization_3 ()
8220{
8221 tree int_3 = build_int_cst (integer_type_node, 3);
8222 tree int_42 = build_int_cst (integer_type_node, 42);
8223 tree x = build_global_decl ("x", integer_type_node);
8224 tree y = build_global_decl ("y", integer_type_node);
8225
808f4dfe
DM
8226 region_model_manager mgr;
8227 region_model model0 (&mgr);
757bf1df
DM
8228 model0.add_constraint (x, GT_EXPR, int_3, NULL);
8229 model0.add_constraint (y, GT_EXPR, int_42, NULL);
8230
808f4dfe 8231 region_model model1 (&mgr);
757bf1df
DM
8232 model1.add_constraint (y, GT_EXPR, int_42, NULL);
8233 model1.add_constraint (x, GT_EXPR, int_3, NULL);
8234
808f4dfe
DM
8235 model0.canonicalize ();
8236 model1.canonicalize ();
757bf1df
DM
8237 ASSERT_EQ (model0, model1);
8238}
8239
8c08c983
DM
8240/* Verify that we can canonicalize a model containing NaN and other real
8241 constants. */
8242
8243static void
8244test_canonicalization_4 ()
8245{
8246 auto_vec<tree> csts;
8247 append_interesting_constants (&csts);
8248
808f4dfe
DM
8249 region_model_manager mgr;
8250 region_model model (&mgr);
8c08c983 8251
3f207ab3 8252 for (tree cst : csts)
8c08c983
DM
8253 model.get_rvalue (cst, NULL);
8254
808f4dfe 8255 model.canonicalize ();
8c08c983
DM
8256}
8257
757bf1df
DM
8258/* Assert that if we have two region_model instances
8259 with values VAL_A and VAL_B for EXPR that they are
8260 mergable. Write the merged model to *OUT_MERGED_MODEL,
8261 and the merged svalue ptr to *OUT_MERGED_SVALUE.
8262 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
8263 for that region_model. */
8264
8265static void
8266assert_region_models_merge (tree expr, tree val_a, tree val_b,
808f4dfe
DM
8267 region_model *out_merged_model,
8268 const svalue **out_merged_svalue)
757bf1df 8269{
808f4dfe 8270 region_model_manager *mgr = out_merged_model->get_manager ();
bb8e93eb
DM
8271 program_point point (program_point::origin (*mgr));
8272 test_region_model_context ctxt;
808f4dfe
DM
8273 region_model model0 (mgr);
8274 region_model model1 (mgr);
757bf1df
DM
8275 if (val_a)
8276 model0.set_value (model0.get_lvalue (expr, &ctxt),
8277 model0.get_rvalue (val_a, &ctxt),
8278 &ctxt);
8279 if (val_b)
8280 model1.set_value (model1.get_lvalue (expr, &ctxt),
8281 model1.get_rvalue (val_b, &ctxt),
8282 &ctxt);
8283
8284 /* They should be mergeable. */
808f4dfe
DM
8285 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
8286 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
757bf1df
DM
8287}
8288
8289/* Verify that we can merge region_model instances. */
8290
8291static void
8292test_state_merging ()
8293{
8294 tree int_42 = build_int_cst (integer_type_node, 42);
8295 tree int_113 = build_int_cst (integer_type_node, 113);
8296 tree x = build_global_decl ("x", integer_type_node);
8297 tree y = build_global_decl ("y", integer_type_node);
8298 tree z = build_global_decl ("z", integer_type_node);
8299 tree p = build_global_decl ("p", ptr_type_node);
8300
8301 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
8302 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
8303
8304 auto_vec <tree> param_types;
8305 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
8306 allocate_struct_function (test_fndecl, true);
8307
8308 /* Param "a". */
8309 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8310 get_identifier ("a"),
8311 integer_type_node);
4cebae09 8312 DECL_CONTEXT (a) = test_fndecl;
757bf1df
DM
8313 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
8314
455f58ec
DM
8315 /* Param "q", a pointer. */
8316 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
8317 get_identifier ("q"),
8318 ptr_type_node);
4cebae09 8319 DECL_CONTEXT (q) = test_fndecl;
455f58ec 8320
808f4dfe 8321 region_model_manager mgr;
bb8e93eb 8322 program_point point (program_point::origin (mgr));
808f4dfe 8323
757bf1df 8324 {
808f4dfe
DM
8325 region_model model0 (&mgr);
8326 region_model model1 (&mgr);
8327 region_model merged (&mgr);
757bf1df 8328 /* Verify empty models can be merged. */
808f4dfe 8329 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
8330 ASSERT_EQ (model0, merged);
8331 }
8332
8333 /* Verify that we can merge two contradictory constraints on the
8334 value for a global. */
8335 /* TODO: verify that the merged model doesn't have a value for
8336 the global */
8337 {
808f4dfe
DM
8338 region_model model0 (&mgr);
8339 region_model model1 (&mgr);
8340 region_model merged (&mgr);
757bf1df
DM
8341 test_region_model_context ctxt;
8342 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
8343 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe 8344 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
8345 ASSERT_NE (model0, merged);
8346 ASSERT_NE (model1, merged);
8347 }
8348
8349 /* Verify handling of a PARM_DECL. */
8350 {
8351 test_region_model_context ctxt;
808f4dfe
DM
8352 region_model model0 (&mgr);
8353 region_model model1 (&mgr);
757bf1df 8354 ASSERT_EQ (model0.get_stack_depth (), 0);
c0d8a64e 8355 model0.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
757bf1df 8356 ASSERT_EQ (model0.get_stack_depth (), 1);
c0d8a64e 8357 model1.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
757bf1df 8358
9d804f9b
DM
8359 placeholder_svalue test_sval (mgr.alloc_symbol_id (),
8360 integer_type_node, "test sval");
808f4dfe
DM
8361 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
8362 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
757bf1df
DM
8363 ASSERT_EQ (model0, model1);
8364
757bf1df 8365 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
8366 region_model merged (&mgr);
8367 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 8368 ASSERT_EQ (model0, merged);
808f4dfe
DM
8369 /* In particular, "a" should have the placeholder value. */
8370 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
757bf1df
DM
8371 }
8372
8373 /* Verify handling of a global. */
8374 {
8375 test_region_model_context ctxt;
808f4dfe
DM
8376 region_model model0 (&mgr);
8377 region_model model1 (&mgr);
757bf1df 8378
9d804f9b
DM
8379 placeholder_svalue test_sval (mgr.alloc_symbol_id (),
8380 integer_type_node, "test sval");
808f4dfe
DM
8381 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
8382 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
8383 ASSERT_EQ (model0, model1);
757bf1df
DM
8384
8385 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
8386 region_model merged (&mgr);
8387 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 8388 ASSERT_EQ (model0, merged);
808f4dfe
DM
8389 /* In particular, "x" should have the placeholder value. */
8390 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
757bf1df
DM
8391 }
8392
8393 /* Use global-handling to verify various combinations of values. */
8394
8395 /* Two equal constant values. */
8396 {
808f4dfe
DM
8397 region_model merged (&mgr);
8398 const svalue *merged_x_sval;
757bf1df
DM
8399 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
8400
8401 /* In particular, there should be a constant value for "x". */
8402 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
8403 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
8404 int_42);
8405 }
8406
8407 /* Two non-equal constant values. */
8408 {
808f4dfe
DM
8409 region_model merged (&mgr);
8410 const svalue *merged_x_sval;
757bf1df
DM
8411 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
8412
808f4dfe
DM
8413 /* In particular, there should be a "widening" value for "x". */
8414 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
757bf1df
DM
8415 }
8416
808f4dfe 8417 /* Initial and constant. */
757bf1df 8418 {
808f4dfe
DM
8419 region_model merged (&mgr);
8420 const svalue *merged_x_sval;
757bf1df
DM
8421 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
8422
8423 /* In particular, there should be an unknown value for "x". */
8424 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
8425 }
8426
808f4dfe 8427 /* Constant and initial. */
757bf1df 8428 {
808f4dfe
DM
8429 region_model merged (&mgr);
8430 const svalue *merged_x_sval;
757bf1df
DM
8431 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
8432
8433 /* In particular, there should be an unknown value for "x". */
8434 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
8435 }
8436
8437 /* Unknown and constant. */
8438 // TODO
8439
8440 /* Pointers: NULL and NULL. */
8441 // TODO
8442
8443 /* Pointers: NULL and non-NULL. */
8444 // TODO
8445
8446 /* Pointers: non-NULL and non-NULL: ptr to a local. */
8447 {
808f4dfe 8448 region_model model0 (&mgr);
c0d8a64e 8449 model0.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
757bf1df
DM
8450 model0.set_value (model0.get_lvalue (p, NULL),
8451 model0.get_rvalue (addr_of_a, NULL), NULL);
8452
8453 region_model model1 (model0);
8454 ASSERT_EQ (model0, model1);
8455
8456 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
8457 region_model merged (&mgr);
8458 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
8459 ASSERT_EQ (model0, merged);
8460 }
8461
8462 /* Pointers: non-NULL and non-NULL: ptr to a global. */
8463 {
808f4dfe 8464 region_model merged (&mgr);
757bf1df 8465 /* p == &y in both input models. */
808f4dfe 8466 const svalue *merged_p_sval;
757bf1df
DM
8467 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
8468 &merged_p_sval);
8469
8470 /* We should get p == &y in the merged model. */
8471 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
808f4dfe
DM
8472 const region_svalue *merged_p_ptr
8473 = merged_p_sval->dyn_cast_region_svalue ();
8474 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
8475 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
757bf1df
DM
8476 }
8477
8478 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
8479 {
808f4dfe
DM
8480 region_model merged (&mgr);
8481 /* x == &y vs x == &z in the input models; these are actually casts
8482 of the ptrs to "int". */
8483 const svalue *merged_x_sval;
8484 // TODO:
757bf1df
DM
8485 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
8486 &merged_x_sval);
8487
8488 /* We should get x == unknown in the merged model. */
8489 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
8490 }
8491
8492 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
8493 {
8494 test_region_model_context ctxt;
808f4dfe 8495 region_model model0 (&mgr);
9a2c9579 8496 tree size = build_int_cst (size_type_node, 1024);
808f4dfe 8497 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
b9365b93 8498 const region *new_reg
ce917b04 8499 = model0.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe 8500 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
757bf1df 8501 model0.set_value (model0.get_lvalue (p, &ctxt),
808f4dfe 8502 ptr_sval, &ctxt);
757bf1df
DM
8503
8504 region_model model1 (model0);
8505
8506 ASSERT_EQ (model0, model1);
8507
808f4dfe
DM
8508 region_model merged (&mgr);
8509 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 8510
808f4dfe 8511 /* The merged model ought to be identical. */
757bf1df
DM
8512 ASSERT_EQ (model0, merged);
8513 }
8514
808f4dfe
DM
8515 /* Two regions sharing the same placeholder svalue should continue sharing
8516 it after self-merger. */
757bf1df
DM
8517 {
8518 test_region_model_context ctxt;
808f4dfe 8519 region_model model0 (&mgr);
9d804f9b
DM
8520 placeholder_svalue placeholder_sval (mgr.alloc_symbol_id (),
8521 integer_type_node, "test");
808f4dfe
DM
8522 model0.set_value (model0.get_lvalue (x, &ctxt),
8523 &placeholder_sval, &ctxt);
8524 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
757bf1df
DM
8525 region_model model1 (model0);
8526
8527 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
8528 region_model merged (&mgr);
8529 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
8530 ASSERT_EQ (model0, merged);
8531
8532 /* In particular, we should have x == y. */
8533 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
8534 tristate (tristate::TS_TRUE));
8535 }
8536
757bf1df 8537 {
808f4dfe
DM
8538 region_model model0 (&mgr);
8539 region_model model1 (&mgr);
757bf1df
DM
8540 test_region_model_context ctxt;
8541 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
8542 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
808f4dfe
DM
8543 region_model merged (&mgr);
8544 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
8545 }
8546
8547 {
808f4dfe
DM
8548 region_model model0 (&mgr);
8549 region_model model1 (&mgr);
757bf1df
DM
8550 test_region_model_context ctxt;
8551 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
8552 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
8553 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe
DM
8554 region_model merged (&mgr);
8555 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 8556 }
757bf1df
DM
8557
8558 // TODO: what can't we merge? need at least one such test
8559
8560 /* TODO: various things
8561 - heap regions
8562 - value merging:
8563 - every combination, but in particular
808f4dfe 8564 - pairs of regions
757bf1df
DM
8565 */
8566
8567 /* Views. */
8568 {
8569 test_region_model_context ctxt;
808f4dfe 8570 region_model model0 (&mgr);
757bf1df 8571
808f4dfe
DM
8572 const region *x_reg = model0.get_lvalue (x, &ctxt);
8573 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
757bf1df
DM
8574 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
8575
8576 region_model model1 (model0);
8577 ASSERT_EQ (model1, model0);
8578
8579 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
8580 region_model merged (&mgr);
8581 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 8582 }
455f58ec
DM
8583
8584 /* Verify that we can merge a model in which a local in an older stack
8585 frame points to a local in a more recent stack frame. */
8586 {
808f4dfe 8587 region_model model0 (&mgr);
c0d8a64e 8588 model0.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
808f4dfe 8589 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
455f58ec
DM
8590
8591 /* Push a second frame. */
808f4dfe 8592 const region *reg_2nd_frame
c0d8a64e 8593 = model0.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
455f58ec
DM
8594
8595 /* Have a pointer in the older frame point to a local in the
8596 more recent frame. */
808f4dfe
DM
8597 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
8598 model0.set_value (q_in_first_frame, sval_ptr, NULL);
455f58ec
DM
8599
8600 /* Verify that it's pointing at the newer frame. */
5932dd35 8601 const region *reg_pointee = sval_ptr->maybe_get_region ();
808f4dfe 8602 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
455f58ec 8603
808f4dfe 8604 model0.canonicalize ();
455f58ec
DM
8605
8606 region_model model1 (model0);
8607 ASSERT_EQ (model0, model1);
8608
8609 /* They should be mergeable, and the result should be the same
8610 (after canonicalization, at least). */
808f4dfe
DM
8611 region_model merged (&mgr);
8612 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
8613 merged.canonicalize ();
455f58ec
DM
8614 ASSERT_EQ (model0, merged);
8615 }
8616
8617 /* Verify that we can merge a model in which a local points to a global. */
8618 {
808f4dfe 8619 region_model model0 (&mgr);
c0d8a64e 8620 model0.push_frame (*DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
455f58ec
DM
8621 model0.set_value (model0.get_lvalue (q, NULL),
8622 model0.get_rvalue (addr_of_y, NULL), NULL);
8623
455f58ec
DM
8624 region_model model1 (model0);
8625 ASSERT_EQ (model0, model1);
8626
8627 /* They should be mergeable, and the result should be the same
8628 (after canonicalization, at least). */
808f4dfe
DM
8629 region_model merged (&mgr);
8630 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
455f58ec
DM
8631 ASSERT_EQ (model0, merged);
8632 }
757bf1df
DM
8633}
8634
8635/* Verify that constraints are correctly merged when merging region_model
8636 instances. */
8637
8638static void
8639test_constraint_merging ()
8640{
59067ddf 8641 tree int_0 = integer_zero_node;
757bf1df
DM
8642 tree int_5 = build_int_cst (integer_type_node, 5);
8643 tree x = build_global_decl ("x", integer_type_node);
8644 tree y = build_global_decl ("y", integer_type_node);
8645 tree z = build_global_decl ("z", integer_type_node);
8646 tree n = build_global_decl ("n", integer_type_node);
8647
808f4dfe 8648 region_model_manager mgr;
757bf1df
DM
8649 test_region_model_context ctxt;
8650
8651 /* model0: 0 <= (x == y) < n. */
808f4dfe 8652 region_model model0 (&mgr);
757bf1df
DM
8653 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
8654 model0.add_constraint (x, GE_EXPR, int_0, NULL);
8655 model0.add_constraint (x, LT_EXPR, n, NULL);
8656
8657 /* model1: z != 5 && (0 <= x < n). */
808f4dfe 8658 region_model model1 (&mgr);
757bf1df
DM
8659 model1.add_constraint (z, NE_EXPR, int_5, NULL);
8660 model1.add_constraint (x, GE_EXPR, int_0, NULL);
8661 model1.add_constraint (x, LT_EXPR, n, NULL);
8662
8663 /* They should be mergeable; the merged constraints should
8664 be: (0 <= x < n). */
bb8e93eb 8665 program_point point (program_point::origin (mgr));
808f4dfe
DM
8666 region_model merged (&mgr);
8667 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
8668
8669 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
8670 tristate (tristate::TS_TRUE));
8671 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
8672 tristate (tristate::TS_TRUE));
8673
8674 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
8675 tristate (tristate::TS_UNKNOWN));
8676 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
8677 tristate (tristate::TS_UNKNOWN));
8678}
8679
808f4dfe
DM
8680/* Verify that widening_svalue::eval_condition_without_cm works as
8681 expected. */
8682
8683static void
8684test_widening_constraints ()
8685{
bb8e93eb 8686 region_model_manager mgr;
e6fe02d8 8687 function_point point (program_point::origin (mgr).get_function_point ());
59067ddf 8688 tree int_0 = integer_zero_node;
808f4dfe 8689 tree int_m1 = build_int_cst (integer_type_node, -1);
59067ddf 8690 tree int_1 = integer_one_node;
808f4dfe 8691 tree int_256 = build_int_cst (integer_type_node, 256);
808f4dfe
DM
8692 test_region_model_context ctxt;
8693 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
8694 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
8695 const svalue *w_zero_then_one_sval
8696 = mgr.get_or_create_widening_svalue (integer_type_node, point,
8697 int_0_sval, int_1_sval);
8698 const widening_svalue *w_zero_then_one
8699 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
8700 ASSERT_EQ (w_zero_then_one->get_direction (),
8701 widening_svalue::DIR_ASCENDING);
8702 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
8703 tristate::TS_FALSE);
8704 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
8705 tristate::TS_FALSE);
8706 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
8707 tristate::TS_UNKNOWN);
8708 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
8709 tristate::TS_UNKNOWN);
8710
8711 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
8712 tristate::TS_FALSE);
8713 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
8714 tristate::TS_UNKNOWN);
8715 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
8716 tristate::TS_UNKNOWN);
8717 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
8718 tristate::TS_UNKNOWN);
8719
8720 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
8721 tristate::TS_TRUE);
8722 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
8723 tristate::TS_UNKNOWN);
8724 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
8725 tristate::TS_UNKNOWN);
8726 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
8727 tristate::TS_UNKNOWN);
8728
8729 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
8730 tristate::TS_TRUE);
8731 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
8732 tristate::TS_TRUE);
8733 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
8734 tristate::TS_UNKNOWN);
8735 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
8736 tristate::TS_UNKNOWN);
8737
8738 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
8739 tristate::TS_FALSE);
8740 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
8741 tristate::TS_UNKNOWN);
8742 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
8743 tristate::TS_UNKNOWN);
8744 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
8745 tristate::TS_UNKNOWN);
8746
8747 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
8748 tristate::TS_TRUE);
8749 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
8750 tristate::TS_UNKNOWN);
8751 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
8752 tristate::TS_UNKNOWN);
8753 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
8754 tristate::TS_UNKNOWN);
8755}
8756
8757/* Verify merging constraints for states simulating successive iterations
8758 of a loop.
8759 Simulate:
8760 for (i = 0; i < 256; i++)
8761 [...body...]
8762 i.e. this gimple:.
8763 i_15 = 0;
8764 goto <bb 4>;
8765
8766 <bb 4> :
8767 i_11 = PHI <i_15(2), i_23(3)>
8768 if (i_11 <= 255)
8769 goto <bb 3>;
8770 else
8771 goto [AFTER LOOP]
8772
8773 <bb 3> :
8774 [LOOP BODY]
8775 i_23 = i_11 + 1;
8776
8777 and thus these ops (and resultant states):
8778 i_11 = PHI()
8779 {i_11: 0}
8780 add_constraint (i_11 <= 255) [for the true edge]
8781 {i_11: 0} [constraint was a no-op]
8782 i_23 = i_11 + 1;
8783 {i_22: 1}
8784 i_11 = PHI()
8785 {i_11: WIDENED (at phi, 0, 1)}
8786 add_constraint (i_11 <= 255) [for the true edge]
8787 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
8788 i_23 = i_11 + 1;
8789 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
8790 i_11 = PHI(); merge with state at phi above
8791 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
8792 [changing meaning of "WIDENED" here]
8793 if (i_11 <= 255)
8794 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
8795 F: {i_11: 256}
8796 */
8797
8798static void
8799test_iteration_1 ()
8800{
bb8e93eb
DM
8801 region_model_manager mgr;
8802 program_point point (program_point::origin (mgr));
808f4dfe 8803
59067ddf
JJ
8804 tree int_0 = integer_zero_node;
8805 tree int_1 = integer_one_node;
808f4dfe 8806 tree int_256 = build_int_cst (integer_type_node, 256);
808f4dfe
DM
8807 tree i = build_global_decl ("i", integer_type_node);
8808
808f4dfe
DM
8809 test_region_model_context ctxt;
8810
8811 /* model0: i: 0. */
8812 region_model model0 (&mgr);
8813 model0.set_value (i, int_0, &ctxt);
8814
8815 /* model1: i: 1. */
8816 region_model model1 (&mgr);
8817 model1.set_value (i, int_1, &ctxt);
8818
8819 /* Should merge "i" to a widened value. */
8820 region_model model2 (&mgr);
8821 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
8822 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
8823 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
8824 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
8825 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
8826
8827 /* Add constraint: i < 256 */
8828 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
8829 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
8830 tristate (tristate::TS_TRUE));
8831 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
8832 tristate (tristate::TS_TRUE));
8833
8834 /* Try merging with the initial state. */
8835 region_model model3 (&mgr);
8836 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
8837 /* Merging the merged value with the initial value should be idempotent,
8838 so that the analysis converges. */
8839 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
8840 /* Merger of 0 and a widening value with constraint < CST
8841 should retain the constraint, even though it was implicit
8842 for the 0 case. */
8843 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
8844 tristate (tristate::TS_TRUE));
8845 /* ...and we should have equality: the analysis should have converged. */
8846 ASSERT_EQ (model3, model2);
8847
8848 /* "i_23 = i_11 + 1;" */
8849 region_model model4 (model3);
8850 ASSERT_EQ (model4, model2);
8851 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
8852 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
8853 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
8854
8855 /* Try merging with the "i: 1" state. */
8856 region_model model5 (&mgr);
8857 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
8858 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
8859 ASSERT_EQ (model5, model4);
8860
8861 /* "i_11 = PHI();" merge with state at phi above.
8862 For i, we should have a merger of WIDENING with WIDENING + 1,
8863 and this should be WIDENING again. */
8864 region_model model6 (&mgr);
8865 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
8866 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
8867 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
808f4dfe
DM
8868}
8869
6969ac30
DM
8870/* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
8871 all cast pointers to that region are also known to be non-NULL. */
8872
8873static void
8874test_malloc_constraints ()
8875{
808f4dfe
DM
8876 region_model_manager mgr;
8877 region_model model (&mgr);
6969ac30
DM
8878 tree p = build_global_decl ("p", ptr_type_node);
8879 tree char_star = build_pointer_type (char_type_node);
8880 tree q = build_global_decl ("q", char_star);
8881 tree null_ptr = build_int_cst (ptr_type_node, 0);
8882
808f4dfe 8883 const svalue *size_in_bytes
9a2c9579 8884 = mgr.get_or_create_unknown_svalue (size_type_node);
ce917b04
DM
8885 const region *reg
8886 = model.get_or_create_region_for_heap_alloc (size_in_bytes, NULL);
808f4dfe
DM
8887 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
8888 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
6969ac30
DM
8889 model.set_value (q, p, NULL);
8890
6969ac30
DM
8891 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
8892 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
8893 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
8894 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
8895
8896 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
8897
6969ac30
DM
8898 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
8899 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
8900 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
8901 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
8902}
8903
808f4dfe
DM
8904/* Smoketest of getting and setting the value of a variable. */
8905
8906static void
8907test_var ()
8908{
8909 /* "int i;" */
8910 tree i = build_global_decl ("i", integer_type_node);
8911
8912 tree int_17 = build_int_cst (integer_type_node, 17);
8913 tree int_m3 = build_int_cst (integer_type_node, -3);
8914
8915 region_model_manager mgr;
8916 region_model model (&mgr);
8917
8918 const region *i_reg = model.get_lvalue (i, NULL);
8919 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
8920
8921 /* Reading "i" should give a symbolic "initial value". */
8922 const svalue *sval_init = model.get_rvalue (i, NULL);
8923 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
8924 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
8925 /* ..and doing it again should give the same "initial value". */
8926 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
8927
8928 /* "i = 17;". */
8929 model.set_value (i, int_17, NULL);
8930 ASSERT_EQ (model.get_rvalue (i, NULL),
8931 model.get_rvalue (int_17, NULL));
8932
8933 /* "i = -3;". */
8934 model.set_value (i, int_m3, NULL);
8935 ASSERT_EQ (model.get_rvalue (i, NULL),
8936 model.get_rvalue (int_m3, NULL));
8937
8938 /* Verify get_offset for "i". */
8939 {
7a6564c9 8940 region_offset offset = i_reg->get_offset (&mgr);
808f4dfe
DM
8941 ASSERT_EQ (offset.get_base_region (), i_reg);
8942 ASSERT_EQ (offset.get_bit_offset (), 0);
8943 }
8944}
8945
8946static void
8947test_array_2 ()
8948{
8949 /* "int arr[10];" */
8950 tree tlen = size_int (10);
8951 tree arr_type
8952 = build_array_type (integer_type_node, build_index_type (tlen));
8953 tree arr = build_global_decl ("arr", arr_type);
8954
8955 /* "int i;" */
8956 tree i = build_global_decl ("i", integer_type_node);
8957
59067ddf
JJ
8958 tree int_0 = integer_zero_node;
8959 tree int_1 = integer_one_node;
808f4dfe
DM
8960
8961 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
8962 arr, int_0, NULL_TREE, NULL_TREE);
8963 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
8964 arr, int_1, NULL_TREE, NULL_TREE);
8965 tree arr_i = build4 (ARRAY_REF, integer_type_node,
8966 arr, i, NULL_TREE, NULL_TREE);
8967
8968 tree int_17 = build_int_cst (integer_type_node, 17);
8969 tree int_42 = build_int_cst (integer_type_node, 42);
8970 tree int_m3 = build_int_cst (integer_type_node, -3);
8971
8972 region_model_manager mgr;
8973 region_model model (&mgr);
8974 /* "arr[0] = 17;". */
8975 model.set_value (arr_0, int_17, NULL);
8976 /* "arr[1] = -3;". */
8977 model.set_value (arr_1, int_m3, NULL);
8978
8979 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
8980 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
8981
8982 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
8983 model.set_value (arr_1, int_42, NULL);
8984 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
8985
8986 /* Verify get_offset for "arr[0]". */
8987 {
8988 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
7a6564c9 8989 region_offset offset = arr_0_reg->get_offset (&mgr);
808f4dfe
DM
8990 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
8991 ASSERT_EQ (offset.get_bit_offset (), 0);
8992 }
8993
8994 /* Verify get_offset for "arr[1]". */
8995 {
8996 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
7a6564c9 8997 region_offset offset = arr_1_reg->get_offset (&mgr);
808f4dfe
DM
8998 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
8999 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
9000 }
9001
7a6564c9
TL
9002 /* Verify get_offset for "arr[i]". */
9003 {
9004 const region *arr_i_reg = model.get_lvalue (arr_i, NULL);
9005 region_offset offset = arr_i_reg->get_offset (&mgr);
9006 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
1579394c
DM
9007 const svalue *offset_sval = offset.get_symbolic_byte_offset ();
9008 if (const svalue *cast = offset_sval->maybe_undo_cast ())
9009 offset_sval = cast;
9010 ASSERT_EQ (offset_sval->get_kind (), SK_BINOP);
7a6564c9
TL
9011 }
9012
808f4dfe
DM
9013 /* "arr[i] = i;" - this should remove the earlier bindings. */
9014 model.set_value (arr_i, i, NULL);
9015 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
9016 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
9017
9018 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
9019 model.set_value (arr_0, int_17, NULL);
9020 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
9021 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
9022}
9023
9024/* Smoketest of dereferencing a pointer via MEM_REF. */
9025
9026static void
9027test_mem_ref ()
9028{
9029 /*
9030 x = 17;
9031 p = &x;
9032 *p;
9033 */
9034 tree x = build_global_decl ("x", integer_type_node);
9035 tree int_star = build_pointer_type (integer_type_node);
9036 tree p = build_global_decl ("p", int_star);
9037
9038 tree int_17 = build_int_cst (integer_type_node, 17);
9039 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
c87f1f3d
JJ
9040 tree ptype = build_pointer_type_for_mode (char_type_node, ptr_mode, true);
9041 tree offset_0 = build_int_cst (ptype, 0);
808f4dfe
DM
9042 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
9043
9044 region_model_manager mgr;
9045 region_model model (&mgr);
9046
9047 /* "x = 17;". */
9048 model.set_value (x, int_17, NULL);
9049
9050 /* "p = &x;". */
9051 model.set_value (p, addr_of_x, NULL);
9052
9053 const svalue *sval = model.get_rvalue (star_p, NULL);
9054 ASSERT_EQ (sval->maybe_get_constant (), int_17);
9055}
9056
9057/* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
9058 Analogous to this code:
9059 void test_6 (int a[10])
9060 {
9061 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
9062 a[3] = 42;
9063 __analyzer_eval (a[3] == 42); [should be TRUE]
9064 }
9065 from data-model-1.c, which looks like this at the gimple level:
9066 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
9067 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
9068 int _2 = *_1; # MEM_REF
9069 _Bool _3 = _2 == 42;
9070 int _4 = (int) _3;
9071 __analyzer_eval (_4);
9072
9073 # a[3] = 42;
9074 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
9075 *_5 = 42; # MEM_REF
9076
9077 # __analyzer_eval (a[3] == 42); [should be TRUE]
9078 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
9079 int _7 = *_6; # MEM_REF
9080 _Bool _8 = _7 == 42;
9081 int _9 = (int) _8;
9082 __analyzer_eval (_9); */
9083
9084static void
9085test_POINTER_PLUS_EXPR_then_MEM_REF ()
9086{
9087 tree int_star = build_pointer_type (integer_type_node);
9088 tree a = build_global_decl ("a", int_star);
9089 tree offset_12 = build_int_cst (size_type_node, 12);
9090 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
c87f1f3d
JJ
9091 tree ptype = build_pointer_type_for_mode (char_type_node, ptr_mode, true);
9092 tree offset_0 = build_int_cst (ptype, 0);
808f4dfe
DM
9093 tree mem_ref = build2 (MEM_REF, integer_type_node,
9094 pointer_plus_expr, offset_0);
9095 region_model_manager mgr;
9096 region_model m (&mgr);
9097
9098 tree int_42 = build_int_cst (integer_type_node, 42);
9099 m.set_value (mem_ref, int_42, NULL);
9100 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
9101}
9102
9103/* Verify that malloc works. */
9104
9105static void
9106test_malloc ()
9107{
9108 tree int_star = build_pointer_type (integer_type_node);
9109 tree p = build_global_decl ("p", int_star);
9110 tree n = build_global_decl ("n", integer_type_node);
9111 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
9112 n, build_int_cst (size_type_node, 4));
9113
9114 region_model_manager mgr;
9115 test_region_model_context ctxt;
9116 region_model model (&mgr);
9117
9118 /* "p = malloc (n * 4);". */
9119 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
ce917b04
DM
9120 const region *reg
9121 = model.get_or_create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe
DM
9122 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
9123 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 9124 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
9125}
9126
9127/* Verify that alloca works. */
9128
9129static void
9130test_alloca ()
9131{
9132 auto_vec <tree> param_types;
9133 tree fndecl = make_fndecl (integer_type_node,
9134 "test_fn",
9135 param_types);
9136 allocate_struct_function (fndecl, true);
9137
9138
9139 tree int_star = build_pointer_type (integer_type_node);
9140 tree p = build_global_decl ("p", int_star);
9141 tree n = build_global_decl ("n", integer_type_node);
9142 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
9143 n, build_int_cst (size_type_node, 4));
9144
9145 region_model_manager mgr;
9146 test_region_model_context ctxt;
9147 region_model model (&mgr);
9148
9149 /* Push stack frame. */
9150 const region *frame_reg
c0d8a64e 9151 = model.push_frame (*DECL_STRUCT_FUNCTION (fndecl),
808f4dfe
DM
9152 NULL, &ctxt);
9153 /* "p = alloca (n * 4);". */
9154 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
b9365b93 9155 const region *reg = model.create_region_for_alloca (size_sval, &ctxt);
808f4dfe
DM
9156 ASSERT_EQ (reg->get_parent_region (), frame_reg);
9157 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
9158 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 9159 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
9160
9161 /* Verify that the pointers to the alloca region are replaced by
9162 poisoned values when the frame is popped. */
9163 model.pop_frame (NULL, NULL, &ctxt);
33255ad3 9164 ASSERT_EQ (model.get_rvalue (p, NULL)->get_kind (), SK_POISONED);
808f4dfe
DM
9165}
9166
71fc4655
DM
9167/* Verify that svalue::involves_p works. */
9168
9169static void
9170test_involves_p ()
9171{
9172 region_model_manager mgr;
9173 tree int_star = build_pointer_type (integer_type_node);
9174 tree p = build_global_decl ("p", int_star);
9175 tree q = build_global_decl ("q", int_star);
9176
9177 test_region_model_context ctxt;
9178 region_model model (&mgr);
9179 const svalue *p_init = model.get_rvalue (p, &ctxt);
9180 const svalue *q_init = model.get_rvalue (q, &ctxt);
9181
9182 ASSERT_TRUE (p_init->involves_p (p_init));
9183 ASSERT_FALSE (p_init->involves_p (q_init));
9184
9185 const region *star_p_reg = mgr.get_symbolic_region (p_init);
9186 const region *star_q_reg = mgr.get_symbolic_region (q_init);
9187
9188 const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
9189 const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
9190
9191 ASSERT_TRUE (init_star_p->involves_p (p_init));
9192 ASSERT_FALSE (p_init->involves_p (init_star_p));
9193 ASSERT_FALSE (init_star_p->involves_p (q_init));
9194 ASSERT_TRUE (init_star_q->involves_p (q_init));
9195 ASSERT_FALSE (init_star_q->involves_p (p_init));
9196}
9197
757bf1df
DM
9198/* Run all of the selftests within this file. */
9199
9200void
9201analyzer_region_model_cc_tests ()
9202{
8c08c983 9203 test_tree_cmp_on_constants ();
757bf1df 9204 test_dump ();
808f4dfe
DM
9205 test_struct ();
9206 test_array_1 ();
90f7c300 9207 test_get_representative_tree ();
757bf1df 9208 test_unique_constants ();
808f4dfe
DM
9209 test_unique_unknowns ();
9210 test_initial_svalue_folding ();
9211 test_unaryop_svalue_folding ();
9212 test_binop_svalue_folding ();
9213 test_sub_svalue_folding ();
f09b9955 9214 test_bits_within_svalue_folding ();
808f4dfe 9215 test_descendent_of_p ();
391512ad 9216 test_bit_range_regions ();
757bf1df 9217 test_assignment ();
a96f1c38 9218 test_compound_assignment ();
757bf1df
DM
9219 test_stack_frames ();
9220 test_get_representative_path_var ();
808f4dfe 9221 test_equality_1 ();
757bf1df
DM
9222 test_canonicalization_2 ();
9223 test_canonicalization_3 ();
8c08c983 9224 test_canonicalization_4 ();
757bf1df
DM
9225 test_state_merging ();
9226 test_constraint_merging ();
808f4dfe
DM
9227 test_widening_constraints ();
9228 test_iteration_1 ();
6969ac30 9229 test_malloc_constraints ();
808f4dfe
DM
9230 test_var ();
9231 test_array_2 ();
9232 test_mem_ref ();
9233 test_POINTER_PLUS_EXPR_then_MEM_REF ();
9234 test_malloc ();
9235 test_alloca ();
71fc4655 9236 test_involves_p ();
757bf1df
DM
9237}
9238
9239} // namespace selftest
9240
9241#endif /* CHECKING_P */
9242
75038aa6
DM
9243} // namespace ana
9244
757bf1df 9245#endif /* #if ENABLE_ANALYZER */