]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/analyzer/region-model.cc
analyzer: add support for plugin-supplied known function behaviors
[thirdparty/gcc.git] / gcc / analyzer / region-model.cc
CommitLineData
757bf1df 1/* Classes for modeling the state of memory.
7adcbafe 2 Copyright (C) 2019-2022 Free Software Foundation, Inc.
757bf1df
DM
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tree.h"
25#include "function.h"
26#include "basic-block.h"
27#include "gimple.h"
28#include "gimple-iterator.h"
7892ff37 29#include "diagnostic-core.h"
757bf1df
DM
30#include "graphviz.h"
31#include "options.h"
32#include "cgraph.h"
33#include "tree-dfa.h"
34#include "stringpool.h"
35#include "convert.h"
36#include "target.h"
37#include "fold-const.h"
38#include "tree-pretty-print.h"
39#include "diagnostic-color.h"
40#include "diagnostic-metadata.h"
757bf1df 41#include "tristate.h"
ef7827b0 42#include "bitmap.h"
757bf1df
DM
43#include "selftest.h"
44#include "function.h"
809192e7 45#include "json.h"
757bf1df
DM
46#include "analyzer/analyzer.h"
47#include "analyzer/analyzer-logging.h"
48#include "ordered-hash-map.h"
49#include "options.h"
50#include "cgraph.h"
51#include "cfg.h"
52#include "digraph.h"
53#include "analyzer/supergraph.h"
54#include "sbitmap.h"
808f4dfe
DM
55#include "analyzer/call-string.h"
56#include "analyzer/program-point.h"
57#include "analyzer/store.h"
757bf1df
DM
58#include "analyzer/region-model.h"
59#include "analyzer/constraint-manager.h"
60#include "diagnostic-event-id.h"
61#include "analyzer/sm.h"
62#include "diagnostic-event-id.h"
63#include "analyzer/sm.h"
64#include "analyzer/pending-diagnostic.h"
808f4dfe 65#include "analyzer/region-model-reachability.h"
757bf1df 66#include "analyzer/analyzer-selftests.h"
f573d351 67#include "analyzer/program-state.h"
884d9141 68#include "stor-layout.h"
c7e276b8 69#include "attribs.h"
9a2c9579 70#include "tree-object-size.h"
1e2fe671
DM
71#include "gimple-ssa.h"
72#include "tree-phinodes.h"
73#include "tree-ssa-operands.h"
74#include "ssa-iterators.h"
5fbcbcaf 75#include "calls.h"
e6c3bb37 76#include "is-a.h"
757bf1df
DM
77
78#if ENABLE_ANALYZER
79
75038aa6
DM
80namespace ana {
81
757bf1df
DM
82/* Dump T to PP in language-independent form, for debugging/logging/dumping
83 purposes. */
84
757bf1df 85void
808f4dfe 86dump_tree (pretty_printer *pp, tree t)
757bf1df 87{
808f4dfe 88 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
757bf1df
DM
89}
90
808f4dfe
DM
91/* Dump T to PP in language-independent form in quotes, for
92 debugging/logging/dumping purposes. */
757bf1df
DM
93
94void
808f4dfe 95dump_quoted_tree (pretty_printer *pp, tree t)
757bf1df 96{
808f4dfe
DM
97 pp_begin_quote (pp, pp_show_color (pp));
98 dump_tree (pp, t);
99 pp_end_quote (pp, pp_show_color (pp));
757bf1df
DM
100}
101
808f4dfe
DM
102/* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
103 calls within other pp_printf calls.
757bf1df 104
808f4dfe
DM
105 default_tree_printer handles 'T' and some other codes by calling
106 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
107 dump_generic_node calls pp_printf in various places, leading to
108 garbled output.
757bf1df 109
808f4dfe
DM
110 Ideally pp_printf could be made to be reentrant, but in the meantime
111 this function provides a workaround. */
6969ac30
DM
112
113void
808f4dfe 114print_quoted_type (pretty_printer *pp, tree t)
6969ac30 115{
808f4dfe
DM
116 pp_begin_quote (pp, pp_show_color (pp));
117 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
118 pp_end_quote (pp, pp_show_color (pp));
6969ac30
DM
119}
120
d726a57b
DM
121/* class region_to_value_map. */
122
123/* Assignment operator for region_to_value_map. */
124
125region_to_value_map &
126region_to_value_map::operator= (const region_to_value_map &other)
127{
128 m_hash_map.empty ();
129 for (auto iter : other.m_hash_map)
130 {
131 const region *reg = iter.first;
132 const svalue *sval = iter.second;
133 m_hash_map.put (reg, sval);
134 }
135 return *this;
136}
137
138/* Equality operator for region_to_value_map. */
139
140bool
141region_to_value_map::operator== (const region_to_value_map &other) const
142{
143 if (m_hash_map.elements () != other.m_hash_map.elements ())
144 return false;
145
146 for (auto iter : *this)
147 {
148 const region *reg = iter.first;
149 const svalue *sval = iter.second;
150 const svalue * const *other_slot = other.get (reg);
151 if (other_slot == NULL)
152 return false;
153 if (sval != *other_slot)
154 return false;
155 }
156
157 return true;
158}
159
160/* Dump this object to PP. */
161
162void
163region_to_value_map::dump_to_pp (pretty_printer *pp, bool simple,
164 bool multiline) const
165{
166 auto_vec<const region *> regs;
167 for (iterator iter = begin (); iter != end (); ++iter)
168 regs.safe_push ((*iter).first);
169 regs.qsort (region::cmp_ptr_ptr);
170 if (multiline)
171 pp_newline (pp);
172 else
173 pp_string (pp, " {");
174 unsigned i;
175 const region *reg;
176 FOR_EACH_VEC_ELT (regs, i, reg)
177 {
178 if (multiline)
179 pp_string (pp, " ");
180 else if (i > 0)
181 pp_string (pp, ", ");
182 reg->dump_to_pp (pp, simple);
183 pp_string (pp, ": ");
184 const svalue *sval = *get (reg);
185 sval->dump_to_pp (pp, true);
186 if (multiline)
187 pp_newline (pp);
188 }
189 if (!multiline)
190 pp_string (pp, "}");
191}
192
193/* Dump this object to stderr. */
194
195DEBUG_FUNCTION void
196region_to_value_map::dump (bool simple) const
197{
198 pretty_printer pp;
199 pp_format_decoder (&pp) = default_tree_printer;
200 pp_show_color (&pp) = pp_show_color (global_dc->printer);
201 pp.buffer->stream = stderr;
202 dump_to_pp (&pp, simple, true);
203 pp_newline (&pp);
204 pp_flush (&pp);
205}
206
207
208/* Attempt to merge THIS with OTHER, writing the result
209 to OUT.
210
211 For now, write (region, value) mappings that are in common between THIS
212 and OTHER to OUT, effectively taking the intersection, rather than
213 rejecting differences. */
214
215bool
216region_to_value_map::can_merge_with_p (const region_to_value_map &other,
217 region_to_value_map *out) const
218{
219 for (auto iter : *this)
220 {
221 const region *iter_reg = iter.first;
222 const svalue *iter_sval = iter.second;
223 const svalue * const * other_slot = other.get (iter_reg);
224 if (other_slot)
225 if (iter_sval == *other_slot)
226 out->put (iter_reg, iter_sval);
227 }
228 return true;
229}
230
33255ad3
DM
231/* Purge any state involving SVAL. */
232
233void
234region_to_value_map::purge_state_involving (const svalue *sval)
235{
236 auto_vec<const region *> to_purge;
237 for (auto iter : *this)
238 {
239 const region *iter_reg = iter.first;
240 const svalue *iter_sval = iter.second;
241 if (iter_reg->involves_p (sval) || iter_sval->involves_p (sval))
242 to_purge.safe_push (iter_reg);
243 }
244 for (auto iter : to_purge)
245 m_hash_map.remove (iter);
246}
247
757bf1df
DM
248/* class region_model. */
249
808f4dfe 250/* Ctor for region_model: construct an "empty" model. */
757bf1df 251
808f4dfe 252region_model::region_model (region_model_manager *mgr)
9a2c9579
DM
253: m_mgr (mgr), m_store (), m_current_frame (NULL),
254 m_dynamic_extents ()
757bf1df 255{
808f4dfe 256 m_constraints = new constraint_manager (mgr);
757bf1df
DM
257}
258
259/* region_model's copy ctor. */
260
261region_model::region_model (const region_model &other)
808f4dfe
DM
262: m_mgr (other.m_mgr), m_store (other.m_store),
263 m_constraints (new constraint_manager (*other.m_constraints)),
9a2c9579
DM
264 m_current_frame (other.m_current_frame),
265 m_dynamic_extents (other.m_dynamic_extents)
757bf1df 266{
757bf1df
DM
267}
268
269/* region_model's dtor. */
270
271region_model::~region_model ()
272{
273 delete m_constraints;
274}
275
276/* region_model's assignment operator. */
277
278region_model &
279region_model::operator= (const region_model &other)
280{
808f4dfe
DM
281 /* m_mgr is const. */
282 gcc_assert (m_mgr == other.m_mgr);
757bf1df 283
808f4dfe 284 m_store = other.m_store;
757bf1df
DM
285
286 delete m_constraints;
808f4dfe 287 m_constraints = new constraint_manager (*other.m_constraints);
757bf1df 288
808f4dfe 289 m_current_frame = other.m_current_frame;
757bf1df 290
9a2c9579
DM
291 m_dynamic_extents = other.m_dynamic_extents;
292
757bf1df
DM
293 return *this;
294}
295
296/* Equality operator for region_model.
297
808f4dfe
DM
298 Amongst other things this directly compares the stores and the constraint
299 managers, so for this to be meaningful both this and OTHER should
757bf1df
DM
300 have been canonicalized. */
301
302bool
303region_model::operator== (const region_model &other) const
304{
808f4dfe
DM
305 /* We can only compare instances that use the same manager. */
306 gcc_assert (m_mgr == other.m_mgr);
757bf1df 307
808f4dfe 308 if (m_store != other.m_store)
757bf1df
DM
309 return false;
310
311 if (*m_constraints != *other.m_constraints)
312 return false;
313
808f4dfe
DM
314 if (m_current_frame != other.m_current_frame)
315 return false;
757bf1df 316
9a2c9579
DM
317 if (m_dynamic_extents != other.m_dynamic_extents)
318 return false;
319
757bf1df
DM
320 gcc_checking_assert (hash () == other.hash ());
321
322 return true;
323}
324
325/* Generate a hash value for this region_model. */
326
327hashval_t
808f4dfe
DM
328region_model::hash () const
329{
330 hashval_t result = m_store.hash ();
331 result ^= m_constraints->hash ();
332 return result;
757bf1df
DM
333}
334
808f4dfe
DM
335/* Dump a representation of this model to PP, showing the
336 stack, the store, and any constraints.
337 Use SIMPLE to control how svalues and regions are printed. */
757bf1df
DM
338
339void
808f4dfe
DM
340region_model::dump_to_pp (pretty_printer *pp, bool simple,
341 bool multiline) const
757bf1df 342{
808f4dfe
DM
343 /* Dump stack. */
344 pp_printf (pp, "stack depth: %i", get_stack_depth ());
345 if (multiline)
346 pp_newline (pp);
347 else
348 pp_string (pp, " {");
349 for (const frame_region *iter_frame = m_current_frame; iter_frame;
350 iter_frame = iter_frame->get_calling_frame ())
351 {
352 if (multiline)
353 pp_string (pp, " ");
354 else if (iter_frame != m_current_frame)
355 pp_string (pp, ", ");
356 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
357 iter_frame->dump_to_pp (pp, simple);
358 if (multiline)
359 pp_newline (pp);
360 }
361 if (!multiline)
362 pp_string (pp, "}");
363
364 /* Dump store. */
365 if (!multiline)
366 pp_string (pp, ", {");
367 m_store.dump_to_pp (pp, simple, multiline,
368 m_mgr->get_store_manager ());
369 if (!multiline)
370 pp_string (pp, "}");
371
372 /* Dump constraints. */
373 pp_string (pp, "constraint_manager:");
374 if (multiline)
375 pp_newline (pp);
376 else
377 pp_string (pp, " {");
378 m_constraints->dump_to_pp (pp, multiline);
379 if (!multiline)
380 pp_string (pp, "}");
9a2c9579
DM
381
382 /* Dump sizes of dynamic regions, if any are known. */
383 if (!m_dynamic_extents.is_empty ())
384 {
385 pp_string (pp, "dynamic_extents:");
386 m_dynamic_extents.dump_to_pp (pp, simple, multiline);
387 }
808f4dfe 388}
757bf1df 389
808f4dfe 390/* Dump a representation of this model to FILE. */
757bf1df 391
808f4dfe
DM
392void
393region_model::dump (FILE *fp, bool simple, bool multiline) const
394{
395 pretty_printer pp;
396 pp_format_decoder (&pp) = default_tree_printer;
397 pp_show_color (&pp) = pp_show_color (global_dc->printer);
398 pp.buffer->stream = fp;
399 dump_to_pp (&pp, simple, multiline);
400 pp_newline (&pp);
401 pp_flush (&pp);
757bf1df
DM
402}
403
808f4dfe 404/* Dump a multiline representation of this model to stderr. */
757bf1df 405
808f4dfe
DM
406DEBUG_FUNCTION void
407region_model::dump (bool simple) const
408{
409 dump (stderr, simple, true);
410}
757bf1df 411
808f4dfe 412/* Dump a multiline representation of this model to stderr. */
757bf1df 413
808f4dfe
DM
414DEBUG_FUNCTION void
415region_model::debug () const
757bf1df 416{
808f4dfe 417 dump (true);
757bf1df
DM
418}
419
e61ffa20
DM
420/* Assert that this object is valid. */
421
422void
423region_model::validate () const
424{
425 m_store.validate ();
426}
427
808f4dfe
DM
428/* Canonicalize the store and constraints, to maximize the chance of
429 equality between region_model instances. */
757bf1df
DM
430
431void
808f4dfe 432region_model::canonicalize ()
757bf1df 433{
808f4dfe
DM
434 m_store.canonicalize (m_mgr->get_store_manager ());
435 m_constraints->canonicalize ();
757bf1df
DM
436}
437
438/* Return true if this region_model is in canonical form. */
439
440bool
441region_model::canonicalized_p () const
442{
443 region_model copy (*this);
808f4dfe 444 copy.canonicalize ();
757bf1df
DM
445 return *this == copy;
446}
447
808f4dfe
DM
448/* See the comment for store::loop_replay_fixup. */
449
450void
451region_model::loop_replay_fixup (const region_model *dst_state)
452{
453 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
454}
455
757bf1df
DM
456/* A subclass of pending_diagnostic for complaining about uses of
457 poisoned values. */
458
459class poisoned_value_diagnostic
460: public pending_diagnostic_subclass<poisoned_value_diagnostic>
461{
462public:
00e7d024
DM
463 poisoned_value_diagnostic (tree expr, enum poison_kind pkind,
464 const region *src_region)
465 : m_expr (expr), m_pkind (pkind),
466 m_src_region (src_region)
757bf1df
DM
467 {}
468
ff171cb1 469 const char *get_kind () const final override { return "poisoned_value_diagnostic"; }
757bf1df 470
ff171cb1 471 bool use_of_uninit_p () const final override
33255ad3
DM
472 {
473 return m_pkind == POISON_KIND_UNINIT;
474 }
475
757bf1df
DM
476 bool operator== (const poisoned_value_diagnostic &other) const
477 {
00e7d024
DM
478 return (m_expr == other.m_expr
479 && m_pkind == other.m_pkind
480 && m_src_region == other.m_src_region);
757bf1df
DM
481 }
482
ff171cb1 483 int get_controlling_option () const final override
7fd6e36e
DM
484 {
485 switch (m_pkind)
486 {
487 default:
488 gcc_unreachable ();
489 case POISON_KIND_UNINIT:
490 return OPT_Wanalyzer_use_of_uninitialized_value;
491 case POISON_KIND_FREED:
492 return OPT_Wanalyzer_use_after_free;
493 case POISON_KIND_POPPED_STACK:
494 return OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame;
495 }
496 }
497
ff171cb1 498 bool emit (rich_location *rich_loc) final override
757bf1df
DM
499 {
500 switch (m_pkind)
501 {
502 default:
503 gcc_unreachable ();
33255ad3
DM
504 case POISON_KIND_UNINIT:
505 {
506 diagnostic_metadata m;
507 m.add_cwe (457); /* "CWE-457: Use of Uninitialized Variable". */
7fd6e36e 508 return warning_meta (rich_loc, m, get_controlling_option (),
33255ad3
DM
509 "use of uninitialized value %qE",
510 m_expr);
511 }
512 break;
757bf1df
DM
513 case POISON_KIND_FREED:
514 {
515 diagnostic_metadata m;
516 m.add_cwe (416); /* "CWE-416: Use After Free". */
7fd6e36e 517 return warning_meta (rich_loc, m, get_controlling_option (),
6c8e5844
DM
518 "use after %<free%> of %qE",
519 m_expr);
757bf1df
DM
520 }
521 break;
522 case POISON_KIND_POPPED_STACK:
523 {
757bf1df 524 /* TODO: which CWE? */
808f4dfe 525 return warning_at
7fd6e36e 526 (rich_loc, get_controlling_option (),
808f4dfe
DM
527 "dereferencing pointer %qE to within stale stack frame",
528 m_expr);
757bf1df
DM
529 }
530 break;
531 }
532 }
533
ff171cb1 534 label_text describe_final_event (const evdesc::final_event &ev) final override
757bf1df
DM
535 {
536 switch (m_pkind)
537 {
538 default:
539 gcc_unreachable ();
33255ad3
DM
540 case POISON_KIND_UNINIT:
541 return ev.formatted_print ("use of uninitialized value %qE here",
542 m_expr);
757bf1df
DM
543 case POISON_KIND_FREED:
544 return ev.formatted_print ("use after %<free%> of %qE here",
545 m_expr);
546 case POISON_KIND_POPPED_STACK:
547 return ev.formatted_print
808f4dfe 548 ("dereferencing pointer %qE to within stale stack frame",
757bf1df
DM
549 m_expr);
550 }
551 }
552
ff171cb1 553 void mark_interesting_stuff (interesting_t *interest) final override
00e7d024
DM
554 {
555 if (m_src_region)
556 interest->add_region_creation (m_src_region);
557 }
558
757bf1df
DM
559private:
560 tree m_expr;
561 enum poison_kind m_pkind;
00e7d024 562 const region *m_src_region;
757bf1df
DM
563};
564
5e00ad3f
DM
565/* A subclass of pending_diagnostic for complaining about shifts
566 by negative counts. */
567
568class shift_count_negative_diagnostic
569: public pending_diagnostic_subclass<shift_count_negative_diagnostic>
570{
571public:
572 shift_count_negative_diagnostic (const gassign *assign, tree count_cst)
573 : m_assign (assign), m_count_cst (count_cst)
574 {}
575
ff171cb1 576 const char *get_kind () const final override
5e00ad3f
DM
577 {
578 return "shift_count_negative_diagnostic";
579 }
580
581 bool operator== (const shift_count_negative_diagnostic &other) const
582 {
583 return (m_assign == other.m_assign
584 && same_tree_p (m_count_cst, other.m_count_cst));
585 }
586
ff171cb1 587 int get_controlling_option () const final override
7fd6e36e
DM
588 {
589 return OPT_Wanalyzer_shift_count_negative;
590 }
591
ff171cb1 592 bool emit (rich_location *rich_loc) final override
5e00ad3f 593 {
7fd6e36e 594 return warning_at (rich_loc, get_controlling_option (),
5e00ad3f
DM
595 "shift by negative count (%qE)", m_count_cst);
596 }
597
ff171cb1 598 label_text describe_final_event (const evdesc::final_event &ev) final override
5e00ad3f
DM
599 {
600 return ev.formatted_print ("shift by negative amount here (%qE)", m_count_cst);
601 }
602
603private:
604 const gassign *m_assign;
605 tree m_count_cst;
606};
607
608/* A subclass of pending_diagnostic for complaining about shifts
609 by counts >= the width of the operand type. */
610
611class shift_count_overflow_diagnostic
612: public pending_diagnostic_subclass<shift_count_overflow_diagnostic>
613{
614public:
615 shift_count_overflow_diagnostic (const gassign *assign,
616 int operand_precision,
617 tree count_cst)
618 : m_assign (assign), m_operand_precision (operand_precision),
619 m_count_cst (count_cst)
620 {}
621
ff171cb1 622 const char *get_kind () const final override
5e00ad3f
DM
623 {
624 return "shift_count_overflow_diagnostic";
625 }
626
627 bool operator== (const shift_count_overflow_diagnostic &other) const
628 {
629 return (m_assign == other.m_assign
630 && m_operand_precision == other.m_operand_precision
631 && same_tree_p (m_count_cst, other.m_count_cst));
632 }
633
ff171cb1 634 int get_controlling_option () const final override
7fd6e36e
DM
635 {
636 return OPT_Wanalyzer_shift_count_overflow;
637 }
638
ff171cb1 639 bool emit (rich_location *rich_loc) final override
5e00ad3f 640 {
7fd6e36e 641 return warning_at (rich_loc, get_controlling_option (),
5e00ad3f
DM
642 "shift by count (%qE) >= precision of type (%qi)",
643 m_count_cst, m_operand_precision);
644 }
645
ff171cb1 646 label_text describe_final_event (const evdesc::final_event &ev) final override
5e00ad3f
DM
647 {
648 return ev.formatted_print ("shift by count %qE here", m_count_cst);
649 }
650
651private:
652 const gassign *m_assign;
653 int m_operand_precision;
654 tree m_count_cst;
655};
656
808f4dfe
DM
657/* If ASSIGN is a stmt that can be modelled via
658 set_value (lhs_reg, SVALUE, CTXT)
659 for some SVALUE, get the SVALUE.
660 Otherwise return NULL. */
757bf1df 661
808f4dfe
DM
662const svalue *
663region_model::get_gassign_result (const gassign *assign,
664 region_model_context *ctxt)
757bf1df
DM
665{
666 tree lhs = gimple_assign_lhs (assign);
667 tree rhs1 = gimple_assign_rhs1 (assign);
757bf1df
DM
668 enum tree_code op = gimple_assign_rhs_code (assign);
669 switch (op)
670 {
671 default:
808f4dfe 672 return NULL;
757bf1df
DM
673
674 case POINTER_PLUS_EXPR:
675 {
676 /* e.g. "_1 = a_10(D) + 12;" */
677 tree ptr = rhs1;
678 tree offset = gimple_assign_rhs2 (assign);
679
808f4dfe
DM
680 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
681 const svalue *offset_sval = get_rvalue (offset, ctxt);
682 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
683 is an integer of type sizetype". */
684 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
685
686 const svalue *sval_binop
687 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
688 ptr_sval, offset_sval);
689 return sval_binop;
757bf1df
DM
690 }
691 break;
692
693 case POINTER_DIFF_EXPR:
694 {
695 /* e.g. "_1 = p_2(D) - q_3(D);". */
808f4dfe
DM
696 tree rhs2 = gimple_assign_rhs2 (assign);
697 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
698 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 699
808f4dfe 700 // TODO: perhaps fold to zero if they're known to be equal?
757bf1df 701
808f4dfe
DM
702 const svalue *sval_binop
703 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
704 rhs1_sval, rhs2_sval);
705 return sval_binop;
757bf1df
DM
706 }
707 break;
708
808f4dfe
DM
709 /* Assignments of the form
710 set_value (lvalue (LHS), rvalue (EXPR))
711 for various EXPR.
712 We already have the lvalue for the LHS above, as "lhs_reg". */
713 case ADDR_EXPR: /* LHS = &RHS; */
714 case BIT_FIELD_REF:
715 case COMPONENT_REF: /* LHS = op0.op1; */
757bf1df 716 case MEM_REF:
757bf1df 717 case REAL_CST:
808f4dfe
DM
718 case COMPLEX_CST:
719 case VECTOR_CST:
757bf1df
DM
720 case INTEGER_CST:
721 case ARRAY_REF:
808f4dfe
DM
722 case SSA_NAME: /* LHS = VAR; */
723 case VAR_DECL: /* LHS = VAR; */
724 case PARM_DECL:/* LHS = VAR; */
725 case REALPART_EXPR:
726 case IMAGPART_EXPR:
727 return get_rvalue (rhs1, ctxt);
728
729 case ABS_EXPR:
730 case ABSU_EXPR:
731 case CONJ_EXPR:
732 case BIT_NOT_EXPR:
757bf1df
DM
733 case FIX_TRUNC_EXPR:
734 case FLOAT_EXPR:
808f4dfe 735 case NEGATE_EXPR:
757bf1df 736 case NOP_EXPR:
808f4dfe 737 case VIEW_CONVERT_EXPR:
757bf1df 738 {
808f4dfe
DM
739 /* Unary ops. */
740 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
741 const svalue *sval_unaryop
742 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
743 return sval_unaryop;
757bf1df 744 }
757bf1df
DM
745
746 case EQ_EXPR:
747 case GE_EXPR:
748 case LE_EXPR:
749 case NE_EXPR:
750 case GT_EXPR:
751 case LT_EXPR:
808f4dfe
DM
752 case UNORDERED_EXPR:
753 case ORDERED_EXPR:
757bf1df
DM
754 {
755 tree rhs2 = gimple_assign_rhs2 (assign);
756
808f4dfe
DM
757 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
758 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 759
2f5951bd 760 if (TREE_TYPE (lhs) == boolean_type_node)
808f4dfe 761 {
2f5951bd
DM
762 /* Consider constraints between svalues. */
763 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
764 if (t.is_known ())
765 return m_mgr->get_or_create_constant_svalue
766 (t.is_true () ? boolean_true_node : boolean_false_node);
808f4dfe 767 }
2f5951bd
DM
768
769 /* Otherwise, generate a symbolic binary op. */
770 const svalue *sval_binop
771 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
772 rhs1_sval, rhs2_sval);
773 return sval_binop;
757bf1df
DM
774 }
775 break;
776
777 case PLUS_EXPR:
778 case MINUS_EXPR:
779 case MULT_EXPR:
808f4dfe 780 case MULT_HIGHPART_EXPR:
757bf1df 781 case TRUNC_DIV_EXPR:
808f4dfe
DM
782 case CEIL_DIV_EXPR:
783 case FLOOR_DIV_EXPR:
784 case ROUND_DIV_EXPR:
757bf1df 785 case TRUNC_MOD_EXPR:
808f4dfe
DM
786 case CEIL_MOD_EXPR:
787 case FLOOR_MOD_EXPR:
788 case ROUND_MOD_EXPR:
789 case RDIV_EXPR:
790 case EXACT_DIV_EXPR:
757bf1df
DM
791 case LSHIFT_EXPR:
792 case RSHIFT_EXPR:
808f4dfe
DM
793 case LROTATE_EXPR:
794 case RROTATE_EXPR:
757bf1df
DM
795 case BIT_IOR_EXPR:
796 case BIT_XOR_EXPR:
797 case BIT_AND_EXPR:
798 case MIN_EXPR:
799 case MAX_EXPR:
808f4dfe 800 case COMPLEX_EXPR:
757bf1df
DM
801 {
802 /* Binary ops. */
803 tree rhs2 = gimple_assign_rhs2 (assign);
804
808f4dfe
DM
805 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
806 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 807
5e00ad3f
DM
808 if (ctxt && (op == LSHIFT_EXPR || op == RSHIFT_EXPR))
809 {
810 /* "INT34-C. Do not shift an expression by a negative number of bits
811 or by greater than or equal to the number of bits that exist in
812 the operand." */
813 if (const tree rhs2_cst = rhs2_sval->maybe_get_constant ())
814 if (TREE_CODE (rhs2_cst) == INTEGER_CST)
815 {
816 if (tree_int_cst_sgn (rhs2_cst) < 0)
817 ctxt->warn (new shift_count_negative_diagnostic
818 (assign, rhs2_cst));
819 else if (compare_tree_int (rhs2_cst,
820 TYPE_PRECISION (TREE_TYPE (rhs1)))
821 >= 0)
822 ctxt->warn (new shift_count_overflow_diagnostic
823 (assign, TYPE_PRECISION (TREE_TYPE (rhs1)),
824 rhs2_cst));
825 }
826 }
827
808f4dfe
DM
828 const svalue *sval_binop
829 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
830 rhs1_sval, rhs2_sval);
831 return sval_binop;
832 }
833
834 /* Vector expressions. In theory we could implement these elementwise,
835 but for now, simply return unknown values. */
836 case VEC_DUPLICATE_EXPR:
837 case VEC_SERIES_EXPR:
838 case VEC_COND_EXPR:
839 case VEC_PERM_EXPR:
1b0be822
DM
840 case VEC_WIDEN_MULT_HI_EXPR:
841 case VEC_WIDEN_MULT_LO_EXPR:
842 case VEC_WIDEN_MULT_EVEN_EXPR:
843 case VEC_WIDEN_MULT_ODD_EXPR:
844 case VEC_UNPACK_HI_EXPR:
845 case VEC_UNPACK_LO_EXPR:
846 case VEC_UNPACK_FLOAT_HI_EXPR:
847 case VEC_UNPACK_FLOAT_LO_EXPR:
848 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
849 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
850 case VEC_PACK_TRUNC_EXPR:
851 case VEC_PACK_SAT_EXPR:
852 case VEC_PACK_FIX_TRUNC_EXPR:
853 case VEC_PACK_FLOAT_EXPR:
854 case VEC_WIDEN_LSHIFT_HI_EXPR:
855 case VEC_WIDEN_LSHIFT_LO_EXPR:
808f4dfe
DM
856 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
857 }
858}
859
1e2fe671
DM
860/* Workaround for discarding certain false positives from
861 -Wanalyzer-use-of-uninitialized-value
862 of the form:
863 ((A OR-IF B) OR-IF C)
864 and:
865 ((A AND-IF B) AND-IF C)
866 where evaluating B is redundant, but could involve simple accesses of
867 uninitialized locals.
868
869 When optimization is turned on the FE can immediately fold compound
870 conditionals. Specifically, c_parser_condition parses this condition:
871 ((A OR-IF B) OR-IF C)
872 and calls c_fully_fold on the condition.
873 Within c_fully_fold, fold_truth_andor is called, which bails when
874 optimization is off, but if any optimization is turned on can convert the
875 ((A OR-IF B) OR-IF C)
876 into:
877 ((A OR B) OR_IF C)
878 for sufficiently simple B
879 i.e. the inner OR-IF becomes an OR.
880 At gimplification time the inner OR becomes BIT_IOR_EXPR (in gimplify_expr),
881 giving this for the inner condition:
882 tmp = A | B;
883 if (tmp)
884 thus effectively synthesizing a redundant access of B when optimization
885 is turned on, when compared to:
886 if (A) goto L1; else goto L4;
887 L1: if (B) goto L2; else goto L4;
888 L2: if (C) goto L3; else goto L4;
889 for the unoptimized case.
890
891 Return true if CTXT appears to be handling such a short-circuitable stmt,
892 such as the def-stmt for B for the:
893 tmp = A | B;
894 case above, for the case where A is true and thus B would have been
895 short-circuited without optimization, using MODEL for the value of A. */
896
897static bool
898within_short_circuited_stmt_p (const region_model *model,
b33dd787 899 const gassign *assign_stmt)
1e2fe671 900{
1e2fe671 901 /* We must have an assignment to a temporary of _Bool type. */
1e2fe671
DM
902 tree lhs = gimple_assign_lhs (assign_stmt);
903 if (TREE_TYPE (lhs) != boolean_type_node)
904 return false;
905 if (TREE_CODE (lhs) != SSA_NAME)
906 return false;
907 if (SSA_NAME_VAR (lhs) != NULL_TREE)
908 return false;
909
910 /* The temporary bool must be used exactly once: as the second arg of
911 a BIT_IOR_EXPR or BIT_AND_EXPR. */
912 use_operand_p use_op;
913 gimple *use_stmt;
914 if (!single_imm_use (lhs, &use_op, &use_stmt))
915 return false;
916 const gassign *use_assign = dyn_cast <const gassign *> (use_stmt);
917 if (!use_assign)
918 return false;
919 enum tree_code op = gimple_assign_rhs_code (use_assign);
920 if (!(op == BIT_IOR_EXPR ||op == BIT_AND_EXPR))
921 return false;
922 if (!(gimple_assign_rhs1 (use_assign) != lhs
923 && gimple_assign_rhs2 (use_assign) == lhs))
924 return false;
925
926 /* The first arg of the bitwise stmt must have a known value in MODEL
927 that implies that the value of the second arg doesn't matter, i.e.
928 1 for bitwise or, 0 for bitwise and. */
929 tree other_arg = gimple_assign_rhs1 (use_assign);
930 /* Use a NULL ctxt here to avoid generating warnings. */
931 const svalue *other_arg_sval = model->get_rvalue (other_arg, NULL);
932 tree other_arg_cst = other_arg_sval->maybe_get_constant ();
933 if (!other_arg_cst)
934 return false;
935 switch (op)
936 {
937 default:
938 gcc_unreachable ();
939 case BIT_IOR_EXPR:
940 if (zerop (other_arg_cst))
941 return false;
942 break;
943 case BIT_AND_EXPR:
944 if (!zerop (other_arg_cst))
945 return false;
946 break;
947 }
948
949 /* All tests passed. We appear to be in a stmt that generates a boolean
950 temporary with a value that won't matter. */
951 return true;
952}
953
b33dd787
DM
954/* Workaround for discarding certain false positives from
955 -Wanalyzer-use-of-uninitialized-value
956 seen with -ftrivial-auto-var-init=.
957
958 -ftrivial-auto-var-init= will generate calls to IFN_DEFERRED_INIT.
959
960 If the address of the var is taken, gimplification will give us
961 something like:
962
963 _1 = .DEFERRED_INIT (4, 2, &"len"[0]);
964 len = _1;
965
966 The result of DEFERRED_INIT will be an uninit value; we don't
967 want to emit a false positive for "len = _1;"
968
969 Return true if ASSIGN_STMT is such a stmt. */
970
971static bool
972due_to_ifn_deferred_init_p (const gassign *assign_stmt)
973
974{
975 /* We must have an assignment to a decl from an SSA name that's the
976 result of a IFN_DEFERRED_INIT call. */
977 if (gimple_assign_rhs_code (assign_stmt) != SSA_NAME)
978 return false;
979 tree lhs = gimple_assign_lhs (assign_stmt);
980 if (TREE_CODE (lhs) != VAR_DECL)
981 return false;
982 tree rhs = gimple_assign_rhs1 (assign_stmt);
983 if (TREE_CODE (rhs) != SSA_NAME)
984 return false;
985 const gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
986 const gcall *call = dyn_cast <const gcall *> (def_stmt);
987 if (!call)
988 return false;
989 if (gimple_call_internal_p (call)
990 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
991 return true;
992 return false;
993}
994
33255ad3
DM
995/* Check for SVAL being poisoned, adding a warning to CTXT.
996 Return SVAL, or, if a warning is added, another value, to avoid
997 repeatedly complaining about the same poisoned value in followup code. */
998
999const svalue *
1000region_model::check_for_poison (const svalue *sval,
1001 tree expr,
1002 region_model_context *ctxt) const
1003{
1004 if (!ctxt)
1005 return sval;
1006
1007 if (const poisoned_svalue *poisoned_sval = sval->dyn_cast_poisoned_svalue ())
1008 {
cc68ad87
DM
1009 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
1010
1011 /* Ignore uninitialized uses of empty types; there's nothing
1012 to initialize. */
1013 if (pkind == POISON_KIND_UNINIT
1014 && sval->get_type ()
1015 && is_empty_type (sval->get_type ()))
1016 return sval;
1017
b33dd787
DM
1018 if (pkind == POISON_KIND_UNINIT)
1019 if (const gimple *curr_stmt = ctxt->get_stmt ())
1020 if (const gassign *assign_stmt
1021 = dyn_cast <const gassign *> (curr_stmt))
1022 {
1023 /* Special case to avoid certain false positives. */
1024 if (within_short_circuited_stmt_p (this, assign_stmt))
1025 return sval;
1026
1027 /* Special case to avoid false positive on
1028 -ftrivial-auto-var-init=. */
1029 if (due_to_ifn_deferred_init_p (assign_stmt))
1030 return sval;
1031 }
1e2fe671 1032
33255ad3
DM
1033 /* If we have an SSA name for a temporary, we don't want to print
1034 '<unknown>'.
1035 Poisoned values are shared by type, and so we can't reconstruct
1036 the tree other than via the def stmts, using
1037 fixup_tree_for_diagnostic. */
1038 tree diag_arg = fixup_tree_for_diagnostic (expr);
00e7d024
DM
1039 const region *src_region = NULL;
1040 if (pkind == POISON_KIND_UNINIT)
1041 src_region = get_region_for_poisoned_expr (expr);
1042 if (ctxt->warn (new poisoned_value_diagnostic (diag_arg, pkind,
1043 src_region)))
33255ad3
DM
1044 {
1045 /* We only want to report use of a poisoned value at the first
1046 place it gets used; return an unknown value to avoid generating
1047 a chain of followup warnings. */
1048 sval = m_mgr->get_or_create_unknown_svalue (sval->get_type ());
1049 }
1050
1051 return sval;
1052 }
1053
1054 return sval;
1055}
1056
00e7d024
DM
1057/* Attempt to get a region for describing EXPR, the source of region of
1058 a poisoned_svalue for use in a poisoned_value_diagnostic.
1059 Return NULL if there is no good region to use. */
1060
1061const region *
1062region_model::get_region_for_poisoned_expr (tree expr) const
1063{
1064 if (TREE_CODE (expr) == SSA_NAME)
1065 {
1066 tree decl = SSA_NAME_VAR (expr);
1067 if (decl && DECL_P (decl))
1068 expr = decl;
1069 else
1070 return NULL;
1071 }
1072 return get_lvalue (expr, NULL);
1073}
1074
808f4dfe
DM
1075/* Update this model for the ASSIGN stmt, using CTXT to report any
1076 diagnostics. */
1077
1078void
1079region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
1080{
1081 tree lhs = gimple_assign_lhs (assign);
1082 tree rhs1 = gimple_assign_rhs1 (assign);
1083
1084 const region *lhs_reg = get_lvalue (lhs, ctxt);
1085
1086 /* Most assignments are handled by:
1087 set_value (lhs_reg, SVALUE, CTXT)
1088 for some SVALUE. */
1089 if (const svalue *sval = get_gassign_result (assign, ctxt))
1090 {
33255ad3
DM
1091 tree expr = get_diagnostic_tree_for_gassign (assign);
1092 check_for_poison (sval, expr, ctxt);
808f4dfe
DM
1093 set_value (lhs_reg, sval, ctxt);
1094 return;
1095 }
1096
1097 enum tree_code op = gimple_assign_rhs_code (assign);
1098 switch (op)
1099 {
1100 default:
1101 {
1b0be822 1102 if (0)
808f4dfe
DM
1103 sorry_at (assign->location, "unhandled assignment op: %qs",
1104 get_tree_code_name (op));
1b0be822
DM
1105 const svalue *unknown_sval
1106 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
1107 set_value (lhs_reg, unknown_sval, ctxt);
757bf1df
DM
1108 }
1109 break;
1110
808f4dfe
DM
1111 case CONSTRUCTOR:
1112 {
1113 if (TREE_CLOBBER_P (rhs1))
1114 {
1115 /* e.g. "x ={v} {CLOBBER};" */
1116 clobber_region (lhs_reg);
1117 }
1118 else
1119 {
1120 /* Any CONSTRUCTOR that survives to this point is either
1121 just a zero-init of everything, or a vector. */
1122 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
1123 zero_fill_region (lhs_reg);
1124 unsigned ix;
1125 tree index;
1126 tree val;
1127 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
1128 {
1129 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
1130 if (!index)
1131 index = build_int_cst (integer_type_node, ix);
1132 gcc_assert (TREE_CODE (index) == INTEGER_CST);
1133 const svalue *index_sval
1134 = m_mgr->get_or_create_constant_svalue (index);
1135 gcc_assert (index_sval);
1136 const region *sub_reg
1137 = m_mgr->get_element_region (lhs_reg,
1138 TREE_TYPE (val),
1139 index_sval);
1140 const svalue *val_sval = get_rvalue (val, ctxt);
1141 set_value (sub_reg, val_sval, ctxt);
1142 }
1143 }
1144 }
1145 break;
1146
1147 case STRING_CST:
757bf1df 1148 {
808f4dfe 1149 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
808f4dfe
DM
1150 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
1151 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 1152 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
1153 }
1154 break;
1155 }
1156}
1157
33255ad3
DM
1158/* A pending_diagnostic subclass for implementing "__analyzer_dump_path". */
1159
1160class dump_path_diagnostic
1161 : public pending_diagnostic_subclass<dump_path_diagnostic>
1162{
1163public:
ff171cb1 1164 int get_controlling_option () const final override
7fd6e36e
DM
1165 {
1166 return 0;
1167 }
1168
ff171cb1 1169 bool emit (rich_location *richloc) final override
33255ad3
DM
1170 {
1171 inform (richloc, "path");
1172 return true;
1173 }
1174
ff171cb1 1175 const char *get_kind () const final override { return "dump_path_diagnostic"; }
33255ad3
DM
1176
1177 bool operator== (const dump_path_diagnostic &) const
1178 {
1179 return true;
1180 }
1181};
1182
1183/* Handle the pre-sm-state part of STMT, modifying this object in-place.
1184 Write true to *OUT_TERMINATE_PATH if the path should be terminated.
1185 Write true to *OUT_UNKNOWN_SIDE_EFFECTS if the stmt has unknown
1186 side effects. */
1187
1188void
1189region_model::on_stmt_pre (const gimple *stmt,
1190 bool *out_terminate_path,
1191 bool *out_unknown_side_effects,
1192 region_model_context *ctxt)
1193{
1194 switch (gimple_code (stmt))
1195 {
1196 default:
1197 /* No-op for now. */
1198 break;
1199
1200 case GIMPLE_ASSIGN:
1201 {
1202 const gassign *assign = as_a <const gassign *> (stmt);
1203 on_assignment (assign, ctxt);
1204 }
1205 break;
1206
1207 case GIMPLE_ASM:
ded2c2c0
DM
1208 {
1209 const gasm *asm_stmt = as_a <const gasm *> (stmt);
1210 on_asm_stmt (asm_stmt, ctxt);
1211 }
33255ad3
DM
1212 break;
1213
1214 case GIMPLE_CALL:
1215 {
1216 /* Track whether we have a gcall to a function that's not recognized by
1217 anything, for which we don't have a function body, or for which we
1218 don't know the fndecl. */
1219 const gcall *call = as_a <const gcall *> (stmt);
1220
1221 /* Debugging/test support. */
1222 if (is_special_named_call_p (call, "__analyzer_describe", 2))
1223 impl_call_analyzer_describe (call, ctxt);
1224 else if (is_special_named_call_p (call, "__analyzer_dump_capacity", 1))
1225 impl_call_analyzer_dump_capacity (call, ctxt);
4409152a
DM
1226 else if (is_special_named_call_p (call, "__analyzer_dump_escaped", 0))
1227 impl_call_analyzer_dump_escaped (call);
33255ad3
DM
1228 else if (is_special_named_call_p (call, "__analyzer_dump_path", 0))
1229 {
1230 /* Handle the builtin "__analyzer_dump_path" by queuing a
1231 diagnostic at this exploded_node. */
1232 ctxt->warn (new dump_path_diagnostic ());
1233 }
1234 else if (is_special_named_call_p (call, "__analyzer_dump_region_model",
1235 0))
1236 {
1237 /* Handle the builtin "__analyzer_dump_region_model" by dumping
1238 the region model's state to stderr. */
1239 dump (false);
1240 }
1241 else if (is_special_named_call_p (call, "__analyzer_eval", 1))
1242 impl_call_analyzer_eval (call, ctxt);
1243 else if (is_special_named_call_p (call, "__analyzer_break", 0))
1244 {
1245 /* Handle the builtin "__analyzer_break" by triggering a
1246 breakpoint. */
1247 /* TODO: is there a good cross-platform way to do this? */
1248 raise (SIGINT);
1249 }
1250 else if (is_special_named_call_p (call,
1251 "__analyzer_dump_exploded_nodes",
1252 1))
1253 {
1254 /* This is handled elsewhere. */
1255 }
1256 else
1257 *out_unknown_side_effects = on_call_pre (call, ctxt,
1258 out_terminate_path);
1259 }
1260 break;
1261
1262 case GIMPLE_RETURN:
1263 {
1264 const greturn *return_ = as_a <const greturn *> (stmt);
1265 on_return (return_, ctxt);
1266 }
1267 break;
1268 }
1269}
1270
7a6564c9 1271/* Abstract base class for all out-of-bounds warnings with concrete values. */
7e3b45be
TL
1272
1273class out_of_bounds : public pending_diagnostic_subclass<out_of_bounds>
1274{
1275public:
1276 out_of_bounds (const region *reg, tree diag_arg,
1277 byte_range out_of_bounds_range)
1278 : m_reg (reg), m_diag_arg (diag_arg),
1279 m_out_of_bounds_range (out_of_bounds_range)
1280 {}
1281
1282 const char *get_kind () const final override
1283 {
1284 return "out_of_bounds_diagnostic";
1285 }
1286
1287 bool operator== (const out_of_bounds &other) const
1288 {
1289 return m_reg == other.m_reg
1290 && m_out_of_bounds_range == other.m_out_of_bounds_range
1291 && pending_diagnostic::same_tree_p (m_diag_arg, other.m_diag_arg);
1292 }
1293
1294 int get_controlling_option () const final override
1295 {
1296 return OPT_Wanalyzer_out_of_bounds;
1297 }
1298
1299 void mark_interesting_stuff (interesting_t *interest) final override
1300 {
1301 interest->add_region_creation (m_reg);
1302 }
1303
1304protected:
1305 const region *m_reg;
1306 tree m_diag_arg;
1307 byte_range m_out_of_bounds_range;
1308};
1309
1310/* Abstract subclass to complaing about out-of-bounds
1311 past the end of the buffer. */
1312
1313class past_the_end : public out_of_bounds
1314{
1315public:
1316 past_the_end (const region *reg, tree diag_arg, byte_range range,
1317 tree byte_bound)
1318 : out_of_bounds (reg, diag_arg, range), m_byte_bound (byte_bound)
1319 {}
1320
1321 bool operator== (const past_the_end &other) const
1322 {
1323 return out_of_bounds::operator== (other)
1324 && pending_diagnostic::same_tree_p (m_byte_bound,
1325 other.m_byte_bound);
1326 }
1327
1328 label_text
1329 describe_region_creation_event (const evdesc::region_creation &ev) final
1330 override
1331 {
1332 if (m_byte_bound && TREE_CODE (m_byte_bound) == INTEGER_CST)
1333 return ev.formatted_print ("capacity is %E bytes", m_byte_bound);
1334
1335 return label_text ();
1336 }
1337
1338protected:
1339 tree m_byte_bound;
1340};
1341
1342/* Concrete subclass to complain about buffer overflows. */
1343
1344class buffer_overflow : public past_the_end
1345{
1346public:
1347 buffer_overflow (const region *reg, tree diag_arg,
1348 byte_range range, tree byte_bound)
1349 : past_the_end (reg, diag_arg, range, byte_bound)
1350 {}
1351
1352 bool emit (rich_location *rich_loc) final override
1353 {
1354 diagnostic_metadata m;
1355 bool warned;
1356 switch (m_reg->get_memory_space ())
1357 {
1358 default:
1359 m.add_cwe (787);
1360 warned = warning_meta (rich_loc, m, get_controlling_option (),
1361 "buffer overflow");
1362 break;
1363 case MEMSPACE_STACK:
1364 m.add_cwe (121);
1365 warned = warning_meta (rich_loc, m, get_controlling_option (),
1366 "stack-based buffer overflow");
1367 break;
1368 case MEMSPACE_HEAP:
1369 m.add_cwe (122);
1370 warned = warning_meta (rich_loc, m, get_controlling_option (),
1371 "heap-based buffer overflow");
1372 break;
1373 }
1374
1375 if (warned)
1376 {
1377 char num_bytes_past_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1378 print_dec (m_out_of_bounds_range.m_size_in_bytes,
1379 num_bytes_past_buf, UNSIGNED);
1380 if (m_diag_arg)
1381 inform (rich_loc->get_loc (), "write is %s bytes past the end"
1382 " of %qE", num_bytes_past_buf,
1383 m_diag_arg);
1384 else
1385 inform (rich_loc->get_loc (), "write is %s bytes past the end"
1386 "of the region",
1387 num_bytes_past_buf);
1388 }
1389
1390 return warned;
1391 }
1392
1393 label_text describe_final_event (const evdesc::final_event &ev)
1394 final override
1395 {
1396 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
1397 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
1398 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1399 print_dec (start, start_buf, SIGNED);
1400 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1401 print_dec (end, end_buf, SIGNED);
1402
1403 if (start == end)
1404 {
1405 if (m_diag_arg)
1406 return ev.formatted_print ("out-of-bounds write at byte %s but %qE"
1407 " ends at byte %E", start_buf, m_diag_arg,
1408 m_byte_bound);
1409 return ev.formatted_print ("out-of-bounds write at byte %s but region"
1410 " ends at byte %E", start_buf,
1411 m_byte_bound);
1412 }
1413 else
1414 {
1415 if (m_diag_arg)
1416 return ev.formatted_print ("out-of-bounds write from byte %s till"
1417 " byte %s but %qE ends at byte %E",
1418 start_buf, end_buf, m_diag_arg,
1419 m_byte_bound);
1420 return ev.formatted_print ("out-of-bounds write from byte %s till"
1421 " byte %s but region ends at byte %E",
1422 start_buf, end_buf, m_byte_bound);
1423 }
1424 }
1425};
1426
1427/* Concrete subclass to complain about buffer overreads. */
1428
1429class buffer_overread : public past_the_end
1430{
1431public:
1432 buffer_overread (const region *reg, tree diag_arg,
1433 byte_range range, tree byte_bound)
1434 : past_the_end (reg, diag_arg, range, byte_bound)
1435 {}
1436
1437 bool emit (rich_location *rich_loc) final override
1438 {
1439 diagnostic_metadata m;
1440 m.add_cwe (126);
1441 bool warned = warning_meta (rich_loc, m, get_controlling_option (),
1442 "buffer overread");
1443
1444 if (warned)
1445 {
1446 char num_bytes_past_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1447 print_dec (m_out_of_bounds_range.m_size_in_bytes,
1448 num_bytes_past_buf, UNSIGNED);
1449 if (m_diag_arg)
23e8c0b0 1450 inform (rich_loc->get_loc (), "read is %s bytes past the end"
7e3b45be
TL
1451 " of %qE", num_bytes_past_buf,
1452 m_diag_arg);
1453 else
23e8c0b0 1454 inform (rich_loc->get_loc (), "read is %s bytes past the end"
7e3b45be
TL
1455 "of the region",
1456 num_bytes_past_buf);
1457 }
1458
1459 return warned;
1460 }
1461
1462 label_text describe_final_event (const evdesc::final_event &ev)
1463 final override
1464 {
1465 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
1466 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
1467 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1468 print_dec (start, start_buf, SIGNED);
1469 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1470 print_dec (end, end_buf, SIGNED);
1471
1472 if (start == end)
1473 {
1474 if (m_diag_arg)
1475 return ev.formatted_print ("out-of-bounds read at byte %s but %qE"
1476 " ends at byte %E", start_buf, m_diag_arg,
1477 m_byte_bound);
1478 return ev.formatted_print ("out-of-bounds read at byte %s but region"
1479 " ends at byte %E", start_buf,
1480 m_byte_bound);
1481 }
1482 else
1483 {
1484 if (m_diag_arg)
1485 return ev.formatted_print ("out-of-bounds read from byte %s till"
1486 " byte %s but %qE ends at byte %E",
1487 start_buf, end_buf, m_diag_arg,
1488 m_byte_bound);
1489 return ev.formatted_print ("out-of-bounds read from byte %s till"
1490 " byte %s but region ends at byte %E",
1491 start_buf, end_buf, m_byte_bound);
1492 }
1493 }
1494};
1495
1496/* Concrete subclass to complain about buffer underflows. */
1497
1498class buffer_underflow : public out_of_bounds
1499{
1500public:
1501 buffer_underflow (const region *reg, tree diag_arg, byte_range range)
1502 : out_of_bounds (reg, diag_arg, range)
1503 {}
1504
1505 bool emit (rich_location *rich_loc) final override
1506 {
1507 diagnostic_metadata m;
1508 m.add_cwe (124);
1509 return warning_meta (rich_loc, m, get_controlling_option (),
1510 "buffer underflow");
1511 }
1512
1513 label_text describe_final_event (const evdesc::final_event &ev)
1514 final override
1515 {
1516 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
1517 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
1518 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1519 print_dec (start, start_buf, SIGNED);
1520 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1521 print_dec (end, end_buf, SIGNED);
1522
1523 if (start == end)
1524 {
1525 if (m_diag_arg)
1526 return ev.formatted_print ("out-of-bounds write at byte %s but %qE"
1527 " starts at byte 0", start_buf,
1528 m_diag_arg);
1529 return ev.formatted_print ("out-of-bounds write at byte %s but region"
1530 " starts at byte 0", start_buf);
1531 }
1532 else
1533 {
1534 if (m_diag_arg)
1535 return ev.formatted_print ("out-of-bounds write from byte %s till"
1536 " byte %s but %qE starts at byte 0",
1537 start_buf, end_buf, m_diag_arg);
1538 return ev.formatted_print ("out-of-bounds write from byte %s till"
1539 " byte %s but region starts at byte 0",
1540 start_buf, end_buf);;
1541 }
1542 }
1543};
1544
1545/* Concrete subclass to complain about buffer underreads. */
1546
1547class buffer_underread : public out_of_bounds
1548{
1549public:
1550 buffer_underread (const region *reg, tree diag_arg, byte_range range)
1551 : out_of_bounds (reg, diag_arg, range)
1552 {}
1553
1554 bool emit (rich_location *rich_loc) final override
1555 {
1556 diagnostic_metadata m;
1557 m.add_cwe (127);
1558 return warning_meta (rich_loc, m, get_controlling_option (),
1559 "buffer underread");
1560 }
1561
1562 label_text describe_final_event (const evdesc::final_event &ev)
1563 final override
1564 {
1565 byte_size_t start = m_out_of_bounds_range.get_start_byte_offset ();
1566 byte_size_t end = m_out_of_bounds_range.get_last_byte_offset ();
1567 char start_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1568 print_dec (start, start_buf, SIGNED);
1569 char end_buf[WIDE_INT_PRINT_BUFFER_SIZE];
1570 print_dec (end, end_buf, SIGNED);
1571
1572 if (start == end)
1573 {
1574 if (m_diag_arg)
1575 return ev.formatted_print ("out-of-bounds read at byte %s but %qE"
1576 " starts at byte 0", start_buf,
1577 m_diag_arg);
1578 return ev.formatted_print ("out-of-bounds read at byte %s but region"
1579 " starts at byte 0", start_buf);
1580 }
1581 else
1582 {
1583 if (m_diag_arg)
1584 return ev.formatted_print ("out-of-bounds read from byte %s till"
1585 " byte %s but %qE starts at byte 0",
1586 start_buf, end_buf, m_diag_arg);
1587 return ev.formatted_print ("out-of-bounds read from byte %s till"
1588 " byte %s but region starts at byte 0",
1589 start_buf, end_buf);;
1590 }
1591 }
1592};
1593
7a6564c9
TL
1594/* Abstract class to complain about out-of-bounds read/writes where
1595 the values are symbolic. */
1596
1597class symbolic_past_the_end
1598 : public pending_diagnostic_subclass<symbolic_past_the_end>
1599{
1600public:
1601 symbolic_past_the_end (const region *reg, tree diag_arg, tree offset,
1602 tree num_bytes, tree capacity)
1603 : m_reg (reg), m_diag_arg (diag_arg), m_offset (offset),
1604 m_num_bytes (num_bytes), m_capacity (capacity)
1605 {}
1606
1607 const char *get_kind () const final override
1608 {
1609 return "symbolic_past_the_end";
1610 }
1611
1612 bool operator== (const symbolic_past_the_end &other) const
1613 {
1614 return m_reg == other.m_reg
1615 && pending_diagnostic::same_tree_p (m_diag_arg, other.m_diag_arg)
1616 && pending_diagnostic::same_tree_p (m_offset, other.m_offset)
1617 && pending_diagnostic::same_tree_p (m_num_bytes, other.m_num_bytes)
1618 && pending_diagnostic::same_tree_p (m_capacity, other.m_capacity);
1619 }
1620
1621 int get_controlling_option () const final override
1622 {
1623 return OPT_Wanalyzer_out_of_bounds;
1624 }
1625
1626 void mark_interesting_stuff (interesting_t *interest) final override
1627 {
1628 interest->add_region_creation (m_reg);
1629 }
1630
1631 label_text
1632 describe_region_creation_event (const evdesc::region_creation &ev) final
1633 override
1634 {
1635 if (m_capacity)
1636 return ev.formatted_print ("capacity is %qE bytes", m_capacity);
1637
1638 return label_text ();
1639 }
1640
1641 label_text
1642 describe_final_event (const evdesc::final_event &ev) final override
1643 {
1644 const char *byte_str;
1645 if (pending_diagnostic::same_tree_p (m_num_bytes, integer_one_node))
1646 byte_str = "byte";
1647 else
1648 byte_str = "bytes";
1649
1650 if (m_offset)
1651 {
1652 if (m_num_bytes && TREE_CODE (m_num_bytes) == INTEGER_CST)
1653 {
1654 if (m_diag_arg)
1655 return ev.formatted_print ("%s of %E %s at offset %qE"
1656 " exceeds %qE", m_dir_str,
1657 m_num_bytes, byte_str,
1658 m_offset, m_diag_arg);
1659 else
1660 return ev.formatted_print ("%s of %E %s at offset %qE"
1661 " exceeds the buffer", m_dir_str,
1662 m_num_bytes, byte_str, m_offset);
1663 }
1664 else if (m_num_bytes)
1665 {
1666 if (m_diag_arg)
1667 return ev.formatted_print ("%s of %qE %s at offset %qE"
1668 " exceeds %qE", m_dir_str,
1669 m_num_bytes, byte_str,
1670 m_offset, m_diag_arg);
1671 else
1672 return ev.formatted_print ("%s of %qE %s at offset %qE"
1673 " exceeds the buffer", m_dir_str,
1674 m_num_bytes, byte_str, m_offset);
1675 }
1676 else
1677 {
1678 if (m_diag_arg)
1679 return ev.formatted_print ("%s at offset %qE exceeds %qE",
1680 m_dir_str, m_offset, m_diag_arg);
1681 else
1682 return ev.formatted_print ("%s at offset %qE exceeds the"
1683 " buffer", m_dir_str, m_offset);
1684 }
1685 }
1686 if (m_diag_arg)
1687 return ev.formatted_print ("out-of-bounds %s on %qE",
1688 m_dir_str, m_diag_arg);
1689 return ev.formatted_print ("out-of-bounds %s", m_dir_str);
1690 }
1691
1692protected:
1693 const region *m_reg;
1694 tree m_diag_arg;
1695 tree m_offset;
1696 tree m_num_bytes;
1697 tree m_capacity;
1698 const char *m_dir_str;
1699};
1700
1701/* Concrete subclass to complain about overflows with symbolic values. */
1702
1703class symbolic_buffer_overflow : public symbolic_past_the_end
1704{
1705public:
1706 symbolic_buffer_overflow (const region *reg, tree diag_arg, tree offset,
1707 tree num_bytes, tree capacity)
1708 : symbolic_past_the_end (reg, diag_arg, offset, num_bytes, capacity)
1709 {
1710 m_dir_str = "write";
1711 }
1712
1713 bool emit (rich_location *rich_loc) final override
1714 {
1715 diagnostic_metadata m;
1716 switch (m_reg->get_memory_space ())
1717 {
1718 default:
1719 m.add_cwe (787);
1720 return warning_meta (rich_loc, m, get_controlling_option (),
1721 "buffer overflow");
1722 case MEMSPACE_STACK:
1723 m.add_cwe (121);
1724 return warning_meta (rich_loc, m, get_controlling_option (),
1725 "stack-based buffer overflow");
1726 case MEMSPACE_HEAP:
1727 m.add_cwe (122);
1728 return warning_meta (rich_loc, m, get_controlling_option (),
1729 "heap-based buffer overflow");
1730 }
1731 }
1732};
1733
1734/* Concrete subclass to complain about overreads with symbolic values. */
1735
1736class symbolic_buffer_overread : public symbolic_past_the_end
1737{
1738public:
1739 symbolic_buffer_overread (const region *reg, tree diag_arg, tree offset,
1740 tree num_bytes, tree capacity)
1741 : symbolic_past_the_end (reg, diag_arg, offset, num_bytes, capacity)
1742 {
1743 m_dir_str = "read";
1744 }
1745
1746 bool emit (rich_location *rich_loc) final override
1747 {
1748 diagnostic_metadata m;
1749 m.add_cwe (126);
1750 return warning_meta (rich_loc, m, get_controlling_option (),
1751 "buffer overread");
1752 }
1753};
1754
1755/* Check whether an access is past the end of the BASE_REG. */
1756
1757void region_model::check_symbolic_bounds (const region *base_reg,
1758 const svalue *sym_byte_offset,
1759 const svalue *num_bytes_sval,
1760 const svalue *capacity,
1761 enum access_direction dir,
1762 region_model_context *ctxt) const
1763{
1764 gcc_assert (ctxt);
1765
1766 const svalue *next_byte
1767 = m_mgr->get_or_create_binop (num_bytes_sval->get_type (), PLUS_EXPR,
1768 sym_byte_offset, num_bytes_sval);
1769
1770 if (eval_condition_without_cm (next_byte, GT_EXPR, capacity).is_true ())
1771 {
1772 tree diag_arg = get_representative_tree (base_reg);
1773 tree offset_tree = get_representative_tree (sym_byte_offset);
1774 tree num_bytes_tree = get_representative_tree (num_bytes_sval);
1775 tree capacity_tree = get_representative_tree (capacity);
1776 switch (dir)
1777 {
1778 default:
1779 gcc_unreachable ();
1780 break;
1781 case DIR_READ:
1782 ctxt->warn (new symbolic_buffer_overread (base_reg, diag_arg,
1783 offset_tree,
1784 num_bytes_tree,
1785 capacity_tree));
1786 break;
1787 case DIR_WRITE:
1788 ctxt->warn (new symbolic_buffer_overflow (base_reg, diag_arg,
1789 offset_tree,
1790 num_bytes_tree,
1791 capacity_tree));
1792 break;
1793 }
1794 }
1795}
1796
1797static tree
1798maybe_get_integer_cst_tree (const svalue *sval)
1799{
1800 tree cst_tree = sval->maybe_get_constant ();
1801 if (cst_tree && TREE_CODE (cst_tree) == INTEGER_CST)
1802 return cst_tree;
1803
1804 return NULL_TREE;
1805}
1806
7e3b45be
TL
1807/* May complain when the access on REG is out-of-bounds. */
1808
7a6564c9
TL
1809void
1810region_model::check_region_bounds (const region *reg,
1811 enum access_direction dir,
1812 region_model_context *ctxt) const
7e3b45be
TL
1813{
1814 gcc_assert (ctxt);
1815
7a6564c9
TL
1816 /* Get the offset. */
1817 region_offset reg_offset = reg->get_offset (m_mgr);
7e3b45be
TL
1818 const region *base_reg = reg_offset.get_base_region ();
1819
7a6564c9 1820 /* Bail out on symbolic regions.
7e3b45be
TL
1821 (e.g. because the analyzer did not see previous offsets on the latter,
1822 it might think that a negative access is before the buffer). */
7a6564c9 1823 if (base_reg->symbolic_p ())
7e3b45be 1824 return;
7a6564c9
TL
1825
1826 /* Find out how many bytes were accessed. */
1827 const svalue *num_bytes_sval = reg->get_byte_size_sval (m_mgr);
1828 tree num_bytes_tree = maybe_get_integer_cst_tree (num_bytes_sval);
1829
1830 /* Get the capacity of the buffer. */
1831 const svalue *capacity = get_capacity (base_reg);
1832 tree cst_capacity_tree = maybe_get_integer_cst_tree (capacity);
1833
7e3b45be
TL
1834 /* The constant offset from a pointer is represented internally as a sizetype
1835 but should be interpreted as a signed value here. The statement below
7a6564c9
TL
1836 converts the offset from bits to bytes and then to a signed integer with
1837 the same precision the sizetype has on the target system.
7e3b45be
TL
1838
1839 For example, this is needed for out-of-bounds-3.c test1 to pass when
1840 compiled with a 64-bit gcc build targeting 32-bit systems. */
7a6564c9
TL
1841 byte_offset_t offset;
1842 if (!reg_offset.symbolic_p ())
1843 offset = wi::sext (reg_offset.get_bit_offset () >> LOG2_BITS_PER_UNIT,
1844 TYPE_PRECISION (size_type_node));
1845
1846 /* If either the offset or the number of bytes accessed are symbolic,
1847 we have to reason about symbolic values. */
1848 if (reg_offset.symbolic_p () || !num_bytes_tree)
1849 {
1850 const svalue* byte_offset_sval;
1851 if (!reg_offset.symbolic_p ())
1852 {
1853 tree offset_tree = wide_int_to_tree (integer_type_node, offset);
1854 byte_offset_sval
1855 = m_mgr->get_or_create_constant_svalue (offset_tree);
1856 }
1857 else
1858 byte_offset_sval = reg_offset.get_symbolic_byte_offset ();
1859 check_symbolic_bounds (base_reg, byte_offset_sval, num_bytes_sval,
1860 capacity, dir, ctxt);
1861 return;
1862 }
7e3b45be 1863
7a6564c9 1864 /* Otherwise continue to check with concrete values. */
7e3b45be
TL
1865 byte_range out (0, 0);
1866 /* NUM_BYTES_TREE should always be interpreted as unsigned. */
7a6564c9
TL
1867 byte_offset_t num_bytes_unsigned = wi::to_offset (num_bytes_tree);
1868 byte_range read_bytes (offset, num_bytes_unsigned);
7e3b45be
TL
1869 /* If read_bytes has a subset < 0, we do have an underflow. */
1870 if (read_bytes.falls_short_of_p (0, &out))
1871 {
7a6564c9 1872 tree diag_arg = get_representative_tree (base_reg);
7e3b45be
TL
1873 switch (dir)
1874 {
1875 default:
1876 gcc_unreachable ();
1877 break;
1878 case DIR_READ:
1879 ctxt->warn (new buffer_underread (reg, diag_arg, out));
1880 break;
1881 case DIR_WRITE:
1882 ctxt->warn (new buffer_underflow (reg, diag_arg, out));
1883 break;
1884 }
1885 }
1886
7a6564c9
TL
1887 /* For accesses past the end, we do need a concrete capacity. No need to
1888 do a symbolic check here because the inequality check does not reason
1889 whether constants are greater than symbolic values. */
1890 if (!cst_capacity_tree)
7e3b45be
TL
1891 return;
1892
1893 byte_range buffer (0, wi::to_offset (cst_capacity_tree));
1894 /* If READ_BYTES exceeds BUFFER, we do have an overflow. */
1895 if (read_bytes.exceeds_p (buffer, &out))
1896 {
1897 tree byte_bound = wide_int_to_tree (size_type_node,
1898 buffer.get_next_byte_offset ());
7a6564c9 1899 tree diag_arg = get_representative_tree (base_reg);
7e3b45be
TL
1900
1901 switch (dir)
1902 {
1903 default:
1904 gcc_unreachable ();
1905 break;
1906 case DIR_READ:
1907 ctxt->warn (new buffer_overread (reg, diag_arg, out, byte_bound));
1908 break;
1909 case DIR_WRITE:
1910 ctxt->warn (new buffer_overflow (reg, diag_arg, out, byte_bound));
1911 break;
1912 }
1913 }
1914}
1915
9ff3e236
DM
1916/* Ensure that all arguments at the call described by CD are checked
1917 for poisoned values, by calling get_rvalue on each argument. */
1918
1919void
1920region_model::check_call_args (const call_details &cd) const
1921{
1922 for (unsigned arg_idx = 0; arg_idx < cd.num_args (); arg_idx++)
1923 cd.get_arg_svalue (arg_idx);
1924}
1925
aee1adf2
DM
1926/* Return true if CD is known to be a call to a function with
1927 __attribute__((const)). */
1928
1929static bool
1930const_fn_p (const call_details &cd)
1931{
1932 tree fndecl = cd.get_fndecl_for_call ();
1933 if (!fndecl)
1934 return false;
1935 gcc_assert (DECL_P (fndecl));
1936 return TREE_READONLY (fndecl);
1937}
1938
1939/* If this CD is known to be a call to a function with
1940 __attribute__((const)), attempt to get a const_fn_result_svalue
1941 based on the arguments, or return NULL otherwise. */
1942
1943static const svalue *
1944maybe_get_const_fn_result (const call_details &cd)
1945{
1946 if (!const_fn_p (cd))
1947 return NULL;
1948
1949 unsigned num_args = cd.num_args ();
1950 if (num_args > const_fn_result_svalue::MAX_INPUTS)
1951 /* Too many arguments. */
1952 return NULL;
1953
1954 auto_vec<const svalue *> inputs (num_args);
1955 for (unsigned arg_idx = 0; arg_idx < num_args; arg_idx++)
1956 {
1957 const svalue *arg_sval = cd.get_arg_svalue (arg_idx);
1958 if (!arg_sval->can_have_associated_state_p ())
1959 return NULL;
1960 inputs.quick_push (arg_sval);
1961 }
1962
1963 region_model_manager *mgr = cd.get_manager ();
1964 const svalue *sval
1965 = mgr->get_or_create_const_fn_result_svalue (cd.get_lhs_type (),
1966 cd.get_fndecl_for_call (),
1967 inputs);
1968 return sval;
1969}
1970
07e30160
DM
1971/* Update this model for an outcome of a call that returns zero.
1972 If UNMERGEABLE, then make the result unmergeable, e.g. to prevent
1973 the state-merger code from merging success and failure outcomes. */
1974
1975void
1976region_model::update_for_zero_return (const call_details &cd,
1977 bool unmergeable)
1978{
1979 if (!cd.get_lhs_type ())
1980 return;
1981 const svalue *result
1982 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), 0);
1983 if (unmergeable)
1984 result = m_mgr->get_or_create_unmergeable (result);
1985 set_value (cd.get_lhs_region (), result, cd.get_ctxt ());
1986}
1987
1988/* Update this model for an outcome of a call that returns non-zero. */
1989
1990void
1991region_model::update_for_nonzero_return (const call_details &cd)
1992{
1993 if (!cd.get_lhs_type ())
1994 return;
1995 const svalue *zero
1996 = m_mgr->get_or_create_int_cst (cd.get_lhs_type (), 0);
1997 const svalue *result
1998 = get_store_value (cd.get_lhs_region (), cd.get_ctxt ());
1999 add_constraint (result, NE_EXPR, zero, cd.get_ctxt ());
2000}
2001
2002/* Subroutine of region_model::maybe_get_copy_bounds.
2003 The Linux kernel commonly uses
2004 min_t([unsigned] long, VAR, sizeof(T));
2005 to set an upper bound on the size of a copy_to_user.
2006 Attempt to simplify such sizes by trying to get the upper bound as a
2007 constant.
2008 Return the simplified svalue if possible, or NULL otherwise. */
2009
2010static const svalue *
2011maybe_simplify_upper_bound (const svalue *num_bytes_sval,
2012 region_model_manager *mgr)
2013{
2014 tree type = num_bytes_sval->get_type ();
2015 while (const svalue *raw = num_bytes_sval->maybe_undo_cast ())
2016 num_bytes_sval = raw;
2017 if (const binop_svalue *binop_sval = num_bytes_sval->dyn_cast_binop_svalue ())
2018 if (binop_sval->get_op () == MIN_EXPR)
2019 if (binop_sval->get_arg1 ()->get_kind () == SK_CONSTANT)
2020 {
2021 return mgr->get_or_create_cast (type, binop_sval->get_arg1 ());
2022 /* TODO: we might want to also capture the constraint
2023 when recording the diagnostic, or note that we're using
2024 the upper bound. */
2025 }
2026 return NULL;
2027}
2028
2029/* Attempt to get an upper bound for the size of a copy when simulating a
2030 copy function.
2031
2032 NUM_BYTES_SVAL is the symbolic value for the size of the copy.
2033 Use it if it's constant, otherwise try to simplify it. Failing
2034 that, use the size of SRC_REG if constant.
2035
2036 Return a symbolic value for an upper limit on the number of bytes
2037 copied, or NULL if no such value could be determined. */
2038
2039const svalue *
2040region_model::maybe_get_copy_bounds (const region *src_reg,
2041 const svalue *num_bytes_sval)
2042{
2043 if (num_bytes_sval->maybe_get_constant ())
2044 return num_bytes_sval;
2045
2046 if (const svalue *simplified
2047 = maybe_simplify_upper_bound (num_bytes_sval, m_mgr))
2048 num_bytes_sval = simplified;
2049
2050 if (num_bytes_sval->maybe_get_constant ())
2051 return num_bytes_sval;
2052
2053 /* For now, try just guessing the size as the capacity of the
2054 base region of the src.
2055 This is a hack; we might get too large a value. */
2056 const region *src_base_reg = src_reg->get_base_region ();
2057 num_bytes_sval = get_capacity (src_base_reg);
2058
2059 if (num_bytes_sval->maybe_get_constant ())
2060 return num_bytes_sval;
2061
2062 /* Non-constant: give up. */
2063 return NULL;
2064}
2065
2066/* Get any known_function for FNDECL, or NULL if there is none. */
2067
2068const known_function *
2069region_model::get_known_function (tree fndecl) const
2070{
2071 known_function_manager *known_fn_mgr = m_mgr->get_known_function_manager ();
2072 return known_fn_mgr->get_by_fndecl (fndecl);
2073}
2074
757bf1df
DM
2075/* Update this model for the CALL stmt, using CTXT to report any
2076 diagnostics - the first half.
2077
2078 Updates to the region_model that should be made *before* sm-states
2079 are updated are done here; other updates to the region_model are done
ef7827b0 2080 in region_model::on_call_post.
757bf1df 2081
ef7827b0
DM
2082 Return true if the function call has unknown side effects (it wasn't
2083 recognized and we don't have a body for it, or are unable to tell which
5ee4ba03
DM
2084 fndecl it is).
2085
2086 Write true to *OUT_TERMINATE_PATH if this execution path should be
2087 terminated (e.g. the function call terminates the process). */
ef7827b0
DM
2088
2089bool
5ee4ba03
DM
2090region_model::on_call_pre (const gcall *call, region_model_context *ctxt,
2091 bool *out_terminate_path)
757bf1df 2092{
48e8a7a6
DM
2093 call_details cd (call, this, ctxt);
2094
ef7827b0
DM
2095 bool unknown_side_effects = false;
2096
9b4eee5f
DM
2097 /* Special-case for IFN_DEFERRED_INIT.
2098 We want to report uninitialized variables with -fanalyzer (treating
2099 -ftrivial-auto-var-init= as purely a mitigation feature).
2100 Handle IFN_DEFERRED_INIT by treating it as no-op: don't touch the
2101 lhs of the call, so that it is still uninitialized from the point of
2102 view of the analyzer. */
2103 if (gimple_call_internal_p (call)
2104 && gimple_call_internal_fn (call) == IFN_DEFERRED_INIT)
2105 return false;
2106
bddd8d86
DM
2107 /* Get svalues for all of the arguments at the callsite, to ensure that we
2108 complain about any uninitialized arguments. This might lead to
2109 duplicates if any of the handling below also looks up the svalues,
2110 but the deduplication code should deal with that. */
2111 if (ctxt)
ca123e01 2112 check_call_args (cd);
bddd8d86 2113
33255ad3
DM
2114 /* Some of the cases below update the lhs of the call based on the
2115 return value, but not all. Provide a default value, which may
2116 get overwritten below. */
2117 if (tree lhs = gimple_call_lhs (call))
2118 {
2119 const region *lhs_region = get_lvalue (lhs, ctxt);
aee1adf2
DM
2120 const svalue *sval = maybe_get_const_fn_result (cd);
2121 if (!sval)
2122 {
2123 /* For the common case of functions without __attribute__((const)),
2124 use a conjured value, and purge any prior state involving that
2125 value (in case this is in a loop). */
2126 sval = m_mgr->get_or_create_conjured_svalue (TREE_TYPE (lhs), call,
3734527d
DM
2127 lhs_region,
2128 conjured_purge (this,
2129 ctxt));
aee1adf2 2130 }
3a1d168e 2131 set_value (lhs_region, sval, ctxt);
33255ad3
DM
2132 }
2133
48e8a7a6 2134 if (gimple_call_internal_p (call))
757bf1df 2135 {
48e8a7a6
DM
2136 switch (gimple_call_internal_fn (call))
2137 {
2138 default:
2139 break;
2140 case IFN_BUILTIN_EXPECT:
b5081130
DM
2141 impl_call_builtin_expect (cd);
2142 return false;
37eb3ef4
DM
2143 case IFN_UBSAN_BOUNDS:
2144 return false;
2402dc6b
DM
2145 case IFN_VA_ARG:
2146 impl_call_va_arg (cd);
2147 return false;
48e8a7a6
DM
2148 }
2149 }
808f4dfe 2150
48e8a7a6
DM
2151 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
2152 {
808f4dfe
DM
2153 /* The various impl_call_* member functions are implemented
2154 in region-model-impl-calls.cc.
2155 Having them split out into separate functions makes it easier
2156 to put breakpoints on the handling of specific functions. */
5fbcbcaf 2157 int callee_fndecl_flags = flags_from_decl_or_type (callee_fndecl);
ee7bfbe5 2158
47997a32 2159 if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
ee7bfbe5
DM
2160 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
2161 switch (DECL_UNCHECKED_FUNCTION_CODE (callee_fndecl))
2162 {
2163 default:
5fbcbcaf 2164 if (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
b7028f06 2165 unknown_side_effects = true;
ee7bfbe5
DM
2166 break;
2167 case BUILT_IN_ALLOCA:
2168 case BUILT_IN_ALLOCA_WITH_ALIGN:
b5081130
DM
2169 impl_call_alloca (cd);
2170 return false;
ee7bfbe5 2171 case BUILT_IN_CALLOC:
b5081130
DM
2172 impl_call_calloc (cd);
2173 return false;
ee7bfbe5
DM
2174 case BUILT_IN_EXPECT:
2175 case BUILT_IN_EXPECT_WITH_PROBABILITY:
b5081130
DM
2176 impl_call_builtin_expect (cd);
2177 return false;
ee7bfbe5
DM
2178 case BUILT_IN_FREE:
2179 /* Handle in "on_call_post". */
2180 break;
2181 case BUILT_IN_MALLOC:
b5081130
DM
2182 impl_call_malloc (cd);
2183 return false;
b7028f06
DM
2184 case BUILT_IN_MEMCPY:
2185 case BUILT_IN_MEMCPY_CHK:
2186 impl_call_memcpy (cd);
2187 return false;
ee7bfbe5 2188 case BUILT_IN_MEMSET:
bc62bfb0 2189 case BUILT_IN_MEMSET_CHK:
ee7bfbe5
DM
2190 impl_call_memset (cd);
2191 return false;
2192 break;
a6baafca 2193 case BUILT_IN_REALLOC:
a6baafca 2194 return false;
111fd515
DM
2195 case BUILT_IN_STRCHR:
2196 impl_call_strchr (cd);
2197 return false;
b7028f06
DM
2198 case BUILT_IN_STRCPY:
2199 case BUILT_IN_STRCPY_CHK:
2200 impl_call_strcpy (cd);
2201 return false;
ee7bfbe5 2202 case BUILT_IN_STRLEN:
b5081130
DM
2203 impl_call_strlen (cd);
2204 return false;
b7028f06 2205
37eb3ef4
DM
2206 case BUILT_IN_STACK_SAVE:
2207 case BUILT_IN_STACK_RESTORE:
2208 return false;
2209
b7028f06
DM
2210 /* Stdio builtins. */
2211 case BUILT_IN_FPRINTF:
2212 case BUILT_IN_FPRINTF_UNLOCKED:
2213 case BUILT_IN_PUTC:
2214 case BUILT_IN_PUTC_UNLOCKED:
2215 case BUILT_IN_FPUTC:
2216 case BUILT_IN_FPUTC_UNLOCKED:
2217 case BUILT_IN_FPUTS:
2218 case BUILT_IN_FPUTS_UNLOCKED:
2219 case BUILT_IN_FWRITE:
2220 case BUILT_IN_FWRITE_UNLOCKED:
2221 case BUILT_IN_PRINTF:
2222 case BUILT_IN_PRINTF_UNLOCKED:
2223 case BUILT_IN_PUTCHAR:
2224 case BUILT_IN_PUTCHAR_UNLOCKED:
2225 case BUILT_IN_PUTS:
2226 case BUILT_IN_PUTS_UNLOCKED:
2227 case BUILT_IN_VFPRINTF:
2228 case BUILT_IN_VPRINTF:
2229 /* These stdio builtins have external effects that are out
2230 of scope for the analyzer: we only want to model the effects
2231 on the return value. */
2232 break;
2402dc6b
DM
2233
2234 case BUILT_IN_VA_START:
2235 impl_call_va_start (cd);
2236 return false;
2237 case BUILT_IN_VA_COPY:
2238 impl_call_va_copy (cd);
2239 return false;
ee7bfbe5 2240 }
ee7bfbe5 2241 else if (is_named_call_p (callee_fndecl, "malloc", call, 1))
b5081130
DM
2242 {
2243 impl_call_malloc (cd);
2244 return false;
2245 }
808f4dfe 2246 else if (is_named_call_p (callee_fndecl, "calloc", call, 2))
b5081130
DM
2247 {
2248 impl_call_calloc (cd);
2249 return false;
2250 }
ee7bfbe5 2251 else if (is_named_call_p (callee_fndecl, "alloca", call, 1))
b5081130
DM
2252 {
2253 impl_call_alloca (cd);
2254 return false;
2255 }
a6baafca
DM
2256 else if (is_named_call_p (callee_fndecl, "realloc", call, 2))
2257 {
2258 impl_call_realloc (cd);
2259 return false;
2260 }
5ee4ba03
DM
2261 else if (is_named_call_p (callee_fndecl, "error"))
2262 {
2263 if (impl_call_error (cd, 3, out_terminate_path))
2264 return false;
2265 else
2266 unknown_side_effects = true;
2267 }
2268 else if (is_named_call_p (callee_fndecl, "error_at_line"))
2269 {
2270 if (impl_call_error (cd, 5, out_terminate_path))
2271 return false;
2272 else
2273 unknown_side_effects = true;
2274 }
33255ad3
DM
2275 else if (is_named_call_p (callee_fndecl, "fgets", call, 3)
2276 || is_named_call_p (callee_fndecl, "fgets_unlocked", call, 3))
2277 {
2278 impl_call_fgets (cd);
2279 return false;
2280 }
2281 else if (is_named_call_p (callee_fndecl, "fread", call, 4))
2282 {
2283 impl_call_fread (cd);
2284 return false;
2285 }
e097c9ab
DM
2286 else if (is_named_call_p (callee_fndecl, "getchar", call, 0))
2287 {
2288 /* No side-effects (tracking stream state is out-of-scope
2289 for the analyzer). */
2290 }
1e19ecd7
DM
2291 else if (is_named_call_p (callee_fndecl, "memset", call, 3)
2292 && POINTER_TYPE_P (cd.get_arg_type (0)))
e516294a 2293 {
808f4dfe 2294 impl_call_memset (cd);
e516294a 2295 return false;
872693ee
DM
2296 }
2297 else if (is_named_call_p (callee_fndecl, "putenv", call, 1)
2298 && POINTER_TYPE_P (cd.get_arg_type (0)))
2299 {
2300 impl_call_putenv (cd);
2301 return false;
e516294a 2302 }
111fd515
DM
2303 else if (is_named_call_p (callee_fndecl, "strchr", call, 2)
2304 && POINTER_TYPE_P (cd.get_arg_type (0)))
2305 {
2306 impl_call_strchr (cd);
2307 return false;
2308 }
1e19ecd7
DM
2309 else if (is_named_call_p (callee_fndecl, "strlen", call, 1)
2310 && POINTER_TYPE_P (cd.get_arg_type (0)))
757bf1df 2311 {
b5081130
DM
2312 impl_call_strlen (cd);
2313 return false;
757bf1df 2314 }
1690a839 2315 else if (is_named_call_p (callee_fndecl, "operator new", call, 1))
b5081130
DM
2316 {
2317 impl_call_operator_new (cd);
2318 return false;
2319 }
1690a839 2320 else if (is_named_call_p (callee_fndecl, "operator new []", call, 1))
b5081130
DM
2321 {
2322 impl_call_operator_new (cd);
2323 return false;
2324 }
1690a839
DM
2325 else if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
2326 || is_named_call_p (callee_fndecl, "operator delete", call, 2)
2327 || is_named_call_p (callee_fndecl, "operator delete []", call, 1))
2328 {
2329 /* Handle in "on_call_post". */
2330 }
07e30160
DM
2331 else if (const known_function *kf = get_known_function (callee_fndecl))
2332 {
2333 kf->impl_call_pre (cd);
2334 return false;
2335 }
ef7827b0 2336 else if (!fndecl_has_gimple_body_p (callee_fndecl)
5fbcbcaf 2337 && (!(callee_fndecl_flags & (ECF_CONST | ECF_PURE)))
808f4dfe 2338 && !fndecl_built_in_p (callee_fndecl))
ef7827b0 2339 unknown_side_effects = true;
757bf1df 2340 }
ef7827b0
DM
2341 else
2342 unknown_side_effects = true;
757bf1df 2343
ef7827b0 2344 return unknown_side_effects;
757bf1df
DM
2345}
2346
2347/* Update this model for the CALL stmt, using CTXT to report any
2348 diagnostics - the second half.
2349
2350 Updates to the region_model that should be made *after* sm-states
2351 are updated are done here; other updates to the region_model are done
ef7827b0
DM
2352 in region_model::on_call_pre.
2353
2354 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
2355 to purge state. */
757bf1df
DM
2356
2357void
ef7827b0
DM
2358region_model::on_call_post (const gcall *call,
2359 bool unknown_side_effects,
2360 region_model_context *ctxt)
757bf1df 2361{
757bf1df 2362 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
1690a839 2363 {
eafa9d96 2364 call_details cd (call, this, ctxt);
1690a839
DM
2365 if (is_named_call_p (callee_fndecl, "free", call, 1))
2366 {
1690a839
DM
2367 impl_call_free (cd);
2368 return;
2369 }
2370 if (is_named_call_p (callee_fndecl, "operator delete", call, 1)
2371 || is_named_call_p (callee_fndecl, "operator delete", call, 2)
2372 || is_named_call_p (callee_fndecl, "operator delete []", call, 1))
2373 {
1690a839
DM
2374 impl_call_operator_delete (cd);
2375 return;
2376 }
c7e276b8
DM
2377 /* Was this fndecl referenced by
2378 __attribute__((malloc(FOO)))? */
2379 if (lookup_attribute ("*dealloc", DECL_ATTRIBUTES (callee_fndecl)))
2380 {
c7e276b8
DM
2381 impl_deallocation_call (cd);
2382 return;
2383 }
eafa9d96
DM
2384 if (fndecl_built_in_p (callee_fndecl, BUILT_IN_NORMAL)
2385 && gimple_builtin_call_types_compatible_p (call, callee_fndecl))
2386 switch (DECL_UNCHECKED_FUNCTION_CODE (callee_fndecl))
2387 {
2388 default:
2389 break;
2390 case BUILT_IN_REALLOC:
2391 impl_call_realloc (cd);
2392 return;
2402dc6b
DM
2393
2394 case BUILT_IN_VA_END:
2395 impl_call_va_end (cd);
2396 return;
eafa9d96 2397 }
1690a839 2398 }
ef7827b0
DM
2399
2400 if (unknown_side_effects)
2401 handle_unrecognized_call (call, ctxt);
2402}
2403
33255ad3
DM
2404/* Purge state involving SVAL from this region_model, using CTXT
2405 (if non-NULL) to purge other state in a program_state.
2406
2407 For example, if we're at the def-stmt of an SSA name, then we need to
2408 purge any state for svalues that involve that SSA name. This avoids
2409 false positives in loops, since a symbolic value referring to the
2410 SSA name will be referring to the previous value of that SSA name.
2411
2412 For example, in:
2413 while ((e = hashmap_iter_next(&iter))) {
2414 struct oid2strbuf *e_strbuf = (struct oid2strbuf *)e;
2415 free (e_strbuf->value);
2416 }
2417 at the def-stmt of e_8:
2418 e_8 = hashmap_iter_next (&iter);
2419 we should purge the "freed" state of:
2420 INIT_VAL(CAST_REG(‘struct oid2strbuf’, (*INIT_VAL(e_8))).value)
2421 which is the "e_strbuf->value" value from the previous iteration,
2422 or we will erroneously report a double-free - the "e_8" within it
2423 refers to the previous value. */
2424
2425void
2426region_model::purge_state_involving (const svalue *sval,
2427 region_model_context *ctxt)
2428{
a113b143
DM
2429 if (!sval->can_have_associated_state_p ())
2430 return;
33255ad3
DM
2431 m_store.purge_state_involving (sval, m_mgr);
2432 m_constraints->purge_state_involving (sval);
2433 m_dynamic_extents.purge_state_involving (sval);
2434 if (ctxt)
2435 ctxt->purge_state_involving (sval);
2436}
2437
c65d3c7f
DM
2438/* A pending_note subclass for adding a note about an
2439 __attribute__((access, ...)) to a diagnostic. */
2440
2441class reason_attr_access : public pending_note_subclass<reason_attr_access>
2442{
2443public:
2444 reason_attr_access (tree callee_fndecl, const attr_access &access)
2445 : m_callee_fndecl (callee_fndecl),
2446 m_ptr_argno (access.ptrarg),
2447 m_access_str (TREE_STRING_POINTER (access.to_external_string ()))
2448 {
2449 }
2450
ff171cb1 2451 const char *get_kind () const final override { return "reason_attr_access"; }
c65d3c7f 2452
2ac1459f 2453 void emit () const final override
c65d3c7f
DM
2454 {
2455 inform (DECL_SOURCE_LOCATION (m_callee_fndecl),
2456 "parameter %i of %qD marked with attribute %qs",
2457 m_ptr_argno + 1, m_callee_fndecl, m_access_str);
2458 }
2459
2460 bool operator== (const reason_attr_access &other) const
2461 {
2462 return (m_callee_fndecl == other.m_callee_fndecl
2463 && m_ptr_argno == other.m_ptr_argno
2464 && !strcmp (m_access_str, other.m_access_str));
2465 }
2466
2467private:
2468 tree m_callee_fndecl;
2469 unsigned m_ptr_argno;
2470 const char *m_access_str;
2471};
2472
b6eaf90c
DM
2473/* Check CALL a call to external function CALLEE_FNDECL based on
2474 any __attribute__ ((access, ....) on the latter, complaining to
2475 CTXT about any issues.
2476
2477 Currently we merely call check_region_for_write on any regions
2478 pointed to by arguments marked with a "write_only" or "read_write"
2479 attribute. */
2480
2481void
2482region_model::
2483check_external_function_for_access_attr (const gcall *call,
2484 tree callee_fndecl,
2485 region_model_context *ctxt) const
2486{
2487 gcc_assert (call);
2488 gcc_assert (callee_fndecl);
2489 gcc_assert (ctxt);
2490
2491 tree fntype = TREE_TYPE (callee_fndecl);
2492 if (!fntype)
2493 return;
2494
2495 if (!TYPE_ATTRIBUTES (fntype))
2496 return;
2497
2498 /* Initialize a map of attribute access specifications for arguments
2499 to the function call. */
2500 rdwr_map rdwr_idx;
2501 init_attr_rdwr_indices (&rdwr_idx, TYPE_ATTRIBUTES (fntype));
2502
2503 unsigned argno = 0;
2504
2505 for (tree iter = TYPE_ARG_TYPES (fntype); iter;
2506 iter = TREE_CHAIN (iter), ++argno)
2507 {
2508 const attr_access* access = rdwr_idx.get (argno);
2509 if (!access)
2510 continue;
2511
2512 /* Ignore any duplicate entry in the map for the size argument. */
2513 if (access->ptrarg != argno)
2514 continue;
2515
2516 if (access->mode == access_write_only
2517 || access->mode == access_read_write)
2518 {
c65d3c7f
DM
2519 /* Subclass of decorated_region_model_context that
2520 adds a note about the attr access to any saved diagnostics. */
2521 class annotating_ctxt : public note_adding_context
2522 {
2523 public:
2524 annotating_ctxt (tree callee_fndecl,
2525 const attr_access &access,
2526 region_model_context *ctxt)
2527 : note_adding_context (ctxt),
2528 m_callee_fndecl (callee_fndecl),
2529 m_access (access)
2530 {
2531 }
ff171cb1 2532 pending_note *make_note () final override
c65d3c7f
DM
2533 {
2534 return new reason_attr_access (m_callee_fndecl, m_access);
2535 }
2536 private:
2537 tree m_callee_fndecl;
2538 const attr_access &m_access;
2539 };
2540
2541 /* Use this ctxt below so that any diagnostics get the
2542 note added to them. */
2543 annotating_ctxt my_ctxt (callee_fndecl, *access, ctxt);
2544
b6eaf90c 2545 tree ptr_tree = gimple_call_arg (call, access->ptrarg);
c65d3c7f
DM
2546 const svalue *ptr_sval = get_rvalue (ptr_tree, &my_ctxt);
2547 const region *reg = deref_rvalue (ptr_sval, ptr_tree, &my_ctxt);
2548 check_region_for_write (reg, &my_ctxt);
b6eaf90c
DM
2549 /* We don't use the size arg for now. */
2550 }
2551 }
2552}
2553
ef7827b0
DM
2554/* Handle a call CALL to a function with unknown behavior.
2555
2556 Traverse the regions in this model, determining what regions are
2557 reachable from pointer arguments to CALL and from global variables,
2558 recursively.
2559
2560 Set all reachable regions to new unknown values and purge sm-state
2561 from their values, and from values that point to them. */
2562
2563void
2564region_model::handle_unrecognized_call (const gcall *call,
2565 region_model_context *ctxt)
2566{
2567 tree fndecl = get_fndecl_for_call (call, ctxt);
2568
b6eaf90c
DM
2569 if (fndecl && ctxt)
2570 check_external_function_for_access_attr (call, fndecl, ctxt);
2571
c710051a 2572 reachable_regions reachable_regs (this);
ef7827b0
DM
2573
2574 /* Determine the reachable regions and their mutability. */
2575 {
808f4dfe
DM
2576 /* Add globals and regions that already escaped in previous
2577 unknown calls. */
2578 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
2579 &reachable_regs);
ef7827b0
DM
2580
2581 /* Params that are pointers. */
2582 tree iter_param_types = NULL_TREE;
2583 if (fndecl)
2584 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2585 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
2586 {
2587 /* Track expected param type, where available. */
2588 tree param_type = NULL_TREE;
2589 if (iter_param_types)
2590 {
2591 param_type = TREE_VALUE (iter_param_types);
2592 gcc_assert (param_type);
2593 iter_param_types = TREE_CHAIN (iter_param_types);
2594 }
2595
2596 tree parm = gimple_call_arg (call, arg_idx);
808f4dfe
DM
2597 const svalue *parm_sval = get_rvalue (parm, ctxt);
2598 reachable_regs.handle_parm (parm_sval, param_type);
ef7827b0
DM
2599 }
2600 }
2601
33255ad3 2602 uncertainty_t *uncertainty = ctxt ? ctxt->get_uncertainty () : NULL;
3a66c289 2603
808f4dfe
DM
2604 /* Purge sm-state for the svalues that were reachable,
2605 both in non-mutable and mutable form. */
2606 for (svalue_set::iterator iter
2607 = reachable_regs.begin_reachable_svals ();
2608 iter != reachable_regs.end_reachable_svals (); ++iter)
ef7827b0 2609 {
808f4dfe 2610 const svalue *sval = (*iter);
33255ad3
DM
2611 if (ctxt)
2612 ctxt->on_unknown_change (sval, false);
808f4dfe
DM
2613 }
2614 for (svalue_set::iterator iter
2615 = reachable_regs.begin_mutable_svals ();
2616 iter != reachable_regs.end_mutable_svals (); ++iter)
2617 {
2618 const svalue *sval = (*iter);
33255ad3
DM
2619 if (ctxt)
2620 ctxt->on_unknown_change (sval, true);
3a66c289
DM
2621 if (uncertainty)
2622 uncertainty->on_mutable_sval_at_unknown_call (sval);
808f4dfe 2623 }
ef7827b0 2624
808f4dfe 2625 /* Mark any clusters that have escaped. */
af66094d 2626 reachable_regs.mark_escaped_clusters (ctxt);
ef7827b0 2627
808f4dfe
DM
2628 /* Update bindings for all clusters that have escaped, whether above,
2629 or previously. */
3734527d
DM
2630 m_store.on_unknown_fncall (call, m_mgr->get_store_manager (),
2631 conjured_purge (this, ctxt));
9a2c9579
DM
2632
2633 /* Purge dynamic extents from any regions that have escaped mutably:
2634 realloc could have been called on them. */
2635 for (hash_set<const region *>::iterator
2636 iter = reachable_regs.begin_mutable_base_regs ();
2637 iter != reachable_regs.end_mutable_base_regs ();
2638 ++iter)
2639 {
2640 const region *base_reg = (*iter);
2641 unset_dynamic_extents (base_reg);
2642 }
808f4dfe 2643}
ef7827b0 2644
808f4dfe
DM
2645/* Traverse the regions in this model, determining what regions are
2646 reachable from the store and populating *OUT.
ef7827b0 2647
808f4dfe
DM
2648 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
2649 for reachability (for handling return values from functions when
2650 analyzing return of the only function on the stack).
2651
3a66c289
DM
2652 If UNCERTAINTY is non-NULL, treat any svalues that were recorded
2653 within it as being maybe-bound as additional "roots" for reachability.
2654
808f4dfe
DM
2655 Find svalues that haven't leaked. */
2656
2657void
2658region_model::get_reachable_svalues (svalue_set *out,
3a66c289
DM
2659 const svalue *extra_sval,
2660 const uncertainty_t *uncertainty)
808f4dfe 2661{
c710051a 2662 reachable_regions reachable_regs (this);
808f4dfe
DM
2663
2664 /* Add globals and regions that already escaped in previous
2665 unknown calls. */
2666 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
2667 &reachable_regs);
2668
2669 if (extra_sval)
2670 reachable_regs.handle_sval (extra_sval);
ef7827b0 2671
3a66c289
DM
2672 if (uncertainty)
2673 for (uncertainty_t::iterator iter
2674 = uncertainty->begin_maybe_bound_svals ();
2675 iter != uncertainty->end_maybe_bound_svals (); ++iter)
2676 reachable_regs.handle_sval (*iter);
2677
808f4dfe
DM
2678 /* Get regions for locals that have explicitly bound values. */
2679 for (store::cluster_map_t::iterator iter = m_store.begin ();
2680 iter != m_store.end (); ++iter)
2681 {
2682 const region *base_reg = (*iter).first;
2683 if (const region *parent = base_reg->get_parent_region ())
2684 if (parent->get_kind () == RK_FRAME)
2685 reachable_regs.add (base_reg, false);
2686 }
2687
2688 /* Populate *OUT based on the values that were reachable. */
2689 for (svalue_set::iterator iter
2690 = reachable_regs.begin_reachable_svals ();
2691 iter != reachable_regs.end_reachable_svals (); ++iter)
2692 out->add (*iter);
757bf1df
DM
2693}
2694
2695/* Update this model for the RETURN_STMT, using CTXT to report any
2696 diagnostics. */
2697
2698void
2699region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
2700{
2701 tree callee = get_current_function ()->decl;
2702 tree lhs = DECL_RESULT (callee);
2703 tree rhs = gimple_return_retval (return_stmt);
2704
2705 if (lhs && rhs)
13ad6d9f
DM
2706 {
2707 const svalue *sval = get_rvalue (rhs, ctxt);
2708 const region *ret_reg = get_lvalue (lhs, ctxt);
2709 set_value (ret_reg, sval, ctxt);
2710 }
757bf1df
DM
2711}
2712
342e14ff
DM
2713/* Update this model for a call and return of setjmp/sigsetjmp at CALL within
2714 ENODE, using CTXT to report any diagnostics.
757bf1df 2715
342e14ff
DM
2716 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
2717 0), as opposed to any second return due to longjmp/sigsetjmp. */
757bf1df
DM
2718
2719void
2720region_model::on_setjmp (const gcall *call, const exploded_node *enode,
2721 region_model_context *ctxt)
2722{
808f4dfe
DM
2723 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
2724 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
2725 ctxt);
757bf1df 2726
808f4dfe
DM
2727 /* Create a setjmp_svalue for this call and store it in BUF_REG's
2728 region. */
2729 if (buf_reg)
757bf1df 2730 {
fd9982bb 2731 setjmp_record r (enode, call);
808f4dfe
DM
2732 const svalue *sval
2733 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
2734 set_value (buf_reg, sval, ctxt);
757bf1df
DM
2735 }
2736
2737 /* Direct calls to setjmp return 0. */
2738 if (tree lhs = gimple_call_lhs (call))
2739 {
1aff29d4
DM
2740 const svalue *new_sval
2741 = m_mgr->get_or_create_int_cst (TREE_TYPE (lhs), 0);
808f4dfe
DM
2742 const region *lhs_reg = get_lvalue (lhs, ctxt);
2743 set_value (lhs_reg, new_sval, ctxt);
757bf1df
DM
2744 }
2745}
2746
2747/* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
2748 to a "setjmp" at SETJMP_CALL where the final stack depth should be
808f4dfe
DM
2749 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
2750 done, and should be done by the caller. */
757bf1df
DM
2751
2752void
2753region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
808f4dfe 2754 int setjmp_stack_depth, region_model_context *ctxt)
757bf1df
DM
2755{
2756 /* Evaluate the val, using the frame of the "longjmp". */
2757 tree fake_retval = gimple_call_arg (longjmp_call, 1);
808f4dfe 2758 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
757bf1df
DM
2759
2760 /* Pop any frames until we reach the stack depth of the function where
2761 setjmp was called. */
2762 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
2763 while (get_stack_depth () > setjmp_stack_depth)
808f4dfe 2764 pop_frame (NULL, NULL, ctxt);
757bf1df
DM
2765
2766 gcc_assert (get_stack_depth () == setjmp_stack_depth);
2767
2768 /* Assign to LHS of "setjmp" in new_state. */
2769 if (tree lhs = gimple_call_lhs (setjmp_call))
2770 {
2771 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
1aff29d4
DM
2772 const svalue *zero_sval
2773 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 0);
808f4dfe 2774 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
757bf1df
DM
2775 /* If we have 0, use 1. */
2776 if (eq_zero.is_true ())
2777 {
808f4dfe 2778 const svalue *one_sval
1aff29d4 2779 = m_mgr->get_or_create_int_cst (TREE_TYPE (fake_retval), 1);
808f4dfe 2780 fake_retval_sval = one_sval;
757bf1df
DM
2781 }
2782 else
2783 {
2784 /* Otherwise note that the value is nonzero. */
808f4dfe 2785 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
757bf1df
DM
2786 }
2787
808f4dfe
DM
2788 /* Decorate the return value from setjmp as being unmergeable,
2789 so that we don't attempt to merge states with it as zero
2790 with states in which it's nonzero, leading to a clean distinction
2791 in the exploded_graph betweeen the first return and the second
2792 return. */
2793 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
757bf1df 2794
808f4dfe
DM
2795 const region *lhs_reg = get_lvalue (lhs, ctxt);
2796 set_value (lhs_reg, fake_retval_sval, ctxt);
2797 }
757bf1df
DM
2798}
2799
2800/* Update this region_model for a phi stmt of the form
2801 LHS = PHI <...RHS...>.
e0a7a675
DM
2802 where RHS is for the appropriate edge.
2803 Get state from OLD_STATE so that all of the phi stmts for a basic block
2804 are effectively handled simultaneously. */
757bf1df
DM
2805
2806void
8525d1f5 2807region_model::handle_phi (const gphi *phi,
808f4dfe 2808 tree lhs, tree rhs,
e0a7a675 2809 const region_model &old_state,
757bf1df
DM
2810 region_model_context *ctxt)
2811{
2812 /* For now, don't bother tracking the .MEM SSA names. */
2813 if (tree var = SSA_NAME_VAR (lhs))
2814 if (TREE_CODE (var) == VAR_DECL)
2815 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
2816 return;
2817
e0a7a675
DM
2818 const svalue *src_sval = old_state.get_rvalue (rhs, ctxt);
2819 const region *dst_reg = old_state.get_lvalue (lhs, ctxt);
757bf1df 2820
e0a7a675 2821 set_value (dst_reg, src_sval, ctxt);
8525d1f5
DM
2822
2823 if (ctxt)
2824 ctxt->on_phi (phi, rhs);
757bf1df
DM
2825}
2826
2827/* Implementation of region_model::get_lvalue; the latter adds type-checking.
2828
2829 Get the id of the region for PV within this region_model,
2830 emitting any diagnostics to CTXT. */
2831
808f4dfe 2832const region *
53cb324c 2833region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2834{
2835 tree expr = pv.m_tree;
2836
2837 gcc_assert (expr);
2838
2839 switch (TREE_CODE (expr))
2840 {
2841 default:
808f4dfe
DM
2842 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
2843 dump_location_t ());
757bf1df
DM
2844
2845 case ARRAY_REF:
2846 {
2847 tree array = TREE_OPERAND (expr, 0);
2848 tree index = TREE_OPERAND (expr, 1);
757bf1df 2849
808f4dfe
DM
2850 const region *array_reg = get_lvalue (array, ctxt);
2851 const svalue *index_sval = get_rvalue (index, ctxt);
2852 return m_mgr->get_element_region (array_reg,
2853 TREE_TYPE (TREE_TYPE (array)),
2854 index_sval);
757bf1df
DM
2855 }
2856 break;
2857
93e759fc
DM
2858 case BIT_FIELD_REF:
2859 {
2860 tree inner_expr = TREE_OPERAND (expr, 0);
2861 const region *inner_reg = get_lvalue (inner_expr, ctxt);
2862 tree num_bits = TREE_OPERAND (expr, 1);
2863 tree first_bit_offset = TREE_OPERAND (expr, 2);
2864 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
2865 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
2866 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
2867 TREE_INT_CST_LOW (num_bits));
2868 return m_mgr->get_bit_range (inner_reg, TREE_TYPE (expr), bits);
2869 }
2870 break;
2871
757bf1df
DM
2872 case MEM_REF:
2873 {
2874 tree ptr = TREE_OPERAND (expr, 0);
2875 tree offset = TREE_OPERAND (expr, 1);
808f4dfe
DM
2876 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
2877 const svalue *offset_sval = get_rvalue (offset, ctxt);
2878 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
2879 return m_mgr->get_offset_region (star_ptr,
2880 TREE_TYPE (expr),
2881 offset_sval);
757bf1df
DM
2882 }
2883 break;
2884
808f4dfe
DM
2885 case FUNCTION_DECL:
2886 return m_mgr->get_region_for_fndecl (expr);
2887
2888 case LABEL_DECL:
2889 return m_mgr->get_region_for_label (expr);
2890
757bf1df
DM
2891 case VAR_DECL:
2892 /* Handle globals. */
2893 if (is_global_var (expr))
808f4dfe 2894 return m_mgr->get_region_for_global (expr);
757bf1df
DM
2895
2896 /* Fall through. */
2897
2898 case SSA_NAME:
2899 case PARM_DECL:
2900 case RESULT_DECL:
2901 {
2902 gcc_assert (TREE_CODE (expr) == SSA_NAME
2903 || TREE_CODE (expr) == PARM_DECL
2904 || TREE_CODE (expr) == VAR_DECL
2905 || TREE_CODE (expr) == RESULT_DECL);
2906
808f4dfe
DM
2907 int stack_index = pv.m_stack_depth;
2908 const frame_region *frame = get_frame_at_index (stack_index);
757bf1df 2909 gcc_assert (frame);
4cebae09 2910 return frame->get_region_for_local (m_mgr, expr, ctxt);
757bf1df
DM
2911 }
2912
2913 case COMPONENT_REF:
2914 {
2915 /* obj.field */
2916 tree obj = TREE_OPERAND (expr, 0);
2917 tree field = TREE_OPERAND (expr, 1);
808f4dfe
DM
2918 const region *obj_reg = get_lvalue (obj, ctxt);
2919 return m_mgr->get_field_region (obj_reg, field);
41a9e940
DM
2920 }
2921 break;
2922
757bf1df 2923 case STRING_CST:
808f4dfe 2924 return m_mgr->get_region_for_string (expr);
757bf1df
DM
2925 }
2926}
2927
2928/* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
2929
09bea584
DM
2930static void
2931assert_compat_types (tree src_type, tree dst_type)
2932{
2933 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
808f4dfe
DM
2934 {
2935#if CHECKING_P
2936 if (!(useless_type_conversion_p (src_type, dst_type)))
2937 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
2938#endif
2939 }
09bea584 2940}
757bf1df 2941
ea4e3218
DM
2942/* Return true if SRC_TYPE can be converted to DST_TYPE as a no-op. */
2943
e66b9f67 2944bool
ea4e3218
DM
2945compat_types_p (tree src_type, tree dst_type)
2946{
2947 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
2948 if (!(useless_type_conversion_p (src_type, dst_type)))
2949 return false;
2950 return true;
2951}
2952
808f4dfe 2953/* Get the region for PV within this region_model,
757bf1df
DM
2954 emitting any diagnostics to CTXT. */
2955
808f4dfe 2956const region *
53cb324c 2957region_model::get_lvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2958{
2959 if (pv.m_tree == NULL_TREE)
808f4dfe 2960 return NULL;
757bf1df 2961
808f4dfe
DM
2962 const region *result_reg = get_lvalue_1 (pv, ctxt);
2963 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
2964 return result_reg;
757bf1df
DM
2965}
2966
808f4dfe 2967/* Get the region for EXPR within this region_model (assuming the most
757bf1df
DM
2968 recent stack frame if it's a local). */
2969
808f4dfe 2970const region *
53cb324c 2971region_model::get_lvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
2972{
2973 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
2974}
2975
2976/* Implementation of region_model::get_rvalue; the latter adds type-checking.
2977
2978 Get the value of PV within this region_model,
2979 emitting any diagnostics to CTXT. */
2980
808f4dfe 2981const svalue *
53cb324c 2982region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt) const
757bf1df
DM
2983{
2984 gcc_assert (pv.m_tree);
2985
2986 switch (TREE_CODE (pv.m_tree))
2987 {
2988 default:
2242b975 2989 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
757bf1df
DM
2990
2991 case ADDR_EXPR:
2992 {
2993 /* "&EXPR". */
2994 tree expr = pv.m_tree;
2995 tree op0 = TREE_OPERAND (expr, 0);
808f4dfe
DM
2996 const region *expr_reg = get_lvalue (op0, ctxt);
2997 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
757bf1df
DM
2998 }
2999 break;
3000
808f4dfe 3001 case BIT_FIELD_REF:
d3b1ef7a
DM
3002 {
3003 tree expr = pv.m_tree;
3004 tree op0 = TREE_OPERAND (expr, 0);
3005 const region *reg = get_lvalue (op0, ctxt);
3006 tree num_bits = TREE_OPERAND (expr, 1);
3007 tree first_bit_offset = TREE_OPERAND (expr, 2);
3008 gcc_assert (TREE_CODE (num_bits) == INTEGER_CST);
3009 gcc_assert (TREE_CODE (first_bit_offset) == INTEGER_CST);
3010 bit_range bits (TREE_INT_CST_LOW (first_bit_offset),
3011 TREE_INT_CST_LOW (num_bits));
9faf8348 3012 return get_rvalue_for_bits (TREE_TYPE (expr), reg, bits, ctxt);
d3b1ef7a 3013 }
808f4dfe
DM
3014
3015 case SSA_NAME:
3016 case VAR_DECL:
3017 case PARM_DECL:
3018 case RESULT_DECL:
757bf1df
DM
3019 case ARRAY_REF:
3020 {
da7c2773 3021 const region *reg = get_lvalue (pv, ctxt);
9faf8348 3022 return get_store_value (reg, ctxt);
757bf1df
DM
3023 }
3024
808f4dfe
DM
3025 case REALPART_EXPR:
3026 case IMAGPART_EXPR:
3027 case VIEW_CONVERT_EXPR:
3028 {
3029 tree expr = pv.m_tree;
3030 tree arg = TREE_OPERAND (expr, 0);
3031 const svalue *arg_sval = get_rvalue (arg, ctxt);
3032 const svalue *sval_unaryop
3033 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
3034 arg_sval);
3035 return sval_unaryop;
3036 };
3037
757bf1df
DM
3038 case INTEGER_CST:
3039 case REAL_CST:
808f4dfe
DM
3040 case COMPLEX_CST:
3041 case VECTOR_CST:
757bf1df 3042 case STRING_CST:
808f4dfe
DM
3043 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
3044
3045 case POINTER_PLUS_EXPR:
3046 {
3047 tree expr = pv.m_tree;
3048 tree ptr = TREE_OPERAND (expr, 0);
3049 tree offset = TREE_OPERAND (expr, 1);
3050 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
3051 const svalue *offset_sval = get_rvalue (offset, ctxt);
3052 const svalue *sval_binop
3053 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
3054 ptr_sval, offset_sval);
3055 return sval_binop;
3056 }
3057
3058 /* Binary ops. */
3059 case PLUS_EXPR:
3060 case MULT_EXPR:
3061 {
3062 tree expr = pv.m_tree;
3063 tree arg0 = TREE_OPERAND (expr, 0);
3064 tree arg1 = TREE_OPERAND (expr, 1);
3065 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
3066 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
3067 const svalue *sval_binop
3068 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
3069 arg0_sval, arg1_sval);
3070 return sval_binop;
3071 }
757bf1df
DM
3072
3073 case COMPONENT_REF:
3074 case MEM_REF:
757bf1df 3075 {
808f4dfe 3076 const region *ref_reg = get_lvalue (pv, ctxt);
9faf8348 3077 return get_store_value (ref_reg, ctxt);
757bf1df 3078 }
1b342485
AS
3079 case OBJ_TYPE_REF:
3080 {
3081 tree expr = OBJ_TYPE_REF_EXPR (pv.m_tree);
3082 return get_rvalue (expr, ctxt);
3083 }
757bf1df
DM
3084 }
3085}
3086
3087/* Get the value of PV within this region_model,
3088 emitting any diagnostics to CTXT. */
3089
808f4dfe 3090const svalue *
53cb324c 3091region_model::get_rvalue (path_var pv, region_model_context *ctxt) const
757bf1df
DM
3092{
3093 if (pv.m_tree == NULL_TREE)
808f4dfe 3094 return NULL;
757bf1df 3095
808f4dfe 3096 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
757bf1df 3097
808f4dfe
DM
3098 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
3099
33255ad3
DM
3100 result_sval = check_for_poison (result_sval, pv.m_tree, ctxt);
3101
808f4dfe 3102 return result_sval;
757bf1df
DM
3103}
3104
3105/* Get the value of EXPR within this region_model (assuming the most
3106 recent stack frame if it's a local). */
3107
808f4dfe 3108const svalue *
53cb324c 3109region_model::get_rvalue (tree expr, region_model_context *ctxt) const
757bf1df
DM
3110{
3111 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
3112}
3113
623bc027
DM
3114/* Return true if this model is on a path with "main" as the entrypoint
3115 (as opposed to one in which we're merely analyzing a subset of the
3116 path through the code). */
3117
3118bool
3119region_model::called_from_main_p () const
3120{
3121 if (!m_current_frame)
3122 return false;
3123 /* Determine if the oldest stack frame in this model is for "main". */
3124 const frame_region *frame0 = get_frame_at_index (0);
3125 gcc_assert (frame0);
3126 return id_equal (DECL_NAME (frame0->get_function ()->decl), "main");
3127}
3128
3129/* Subroutine of region_model::get_store_value for when REG is (or is within)
3130 a global variable that hasn't been touched since the start of this path
3131 (or was implicitly touched due to a call to an unknown function). */
3132
3133const svalue *
3134region_model::get_initial_value_for_global (const region *reg) const
3135{
3136 /* Get the decl that REG is for (or is within). */
3137 const decl_region *base_reg
3138 = reg->get_base_region ()->dyn_cast_decl_region ();
3139 gcc_assert (base_reg);
3140 tree decl = base_reg->get_decl ();
3141
3142 /* Special-case: to avoid having to explicitly update all previously
3143 untracked globals when calling an unknown fn, they implicitly have
3144 an unknown value if an unknown call has occurred, unless this is
3145 static to-this-TU and hasn't escaped. Globals that have escaped
3146 are explicitly tracked, so we shouldn't hit this case for them. */
af66094d
DM
3147 if (m_store.called_unknown_fn_p ()
3148 && TREE_PUBLIC (decl)
3149 && !TREE_READONLY (decl))
623bc027
DM
3150 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
3151
3152 /* If we are on a path from the entrypoint from "main" and we have a
3153 global decl defined in this TU that hasn't been touched yet, then
3154 the initial value of REG can be taken from the initialization value
3155 of the decl. */
16ad9ae8 3156 if (called_from_main_p () || TREE_READONLY (decl))
623bc027 3157 {
61a43de5
DM
3158 /* Attempt to get the initializer value for base_reg. */
3159 if (const svalue *base_reg_init
3160 = base_reg->get_svalue_for_initializer (m_mgr))
623bc027 3161 {
61a43de5
DM
3162 if (reg == base_reg)
3163 return base_reg_init;
3164 else
623bc027 3165 {
61a43de5
DM
3166 /* Get the value for REG within base_reg_init. */
3167 binding_cluster c (base_reg);
e61ffa20 3168 c.bind (m_mgr->get_store_manager (), base_reg, base_reg_init);
61a43de5
DM
3169 const svalue *sval
3170 = c.get_any_binding (m_mgr->get_store_manager (), reg);
3171 if (sval)
3172 {
3173 if (reg->get_type ())
3174 sval = m_mgr->get_or_create_cast (reg->get_type (),
3175 sval);
3176 return sval;
3177 }
623bc027
DM
3178 }
3179 }
3180 }
3181
3182 /* Otherwise, return INIT_VAL(REG). */
3183 return m_mgr->get_or_create_initial_value (reg);
3184}
3185
808f4dfe 3186/* Get a value for REG, looking it up in the store, or otherwise falling
9faf8348
DM
3187 back to "initial" or "unknown" values.
3188 Use CTXT to report any warnings associated with reading from REG. */
757bf1df 3189
808f4dfe 3190const svalue *
9faf8348
DM
3191region_model::get_store_value (const region *reg,
3192 region_model_context *ctxt) const
757bf1df 3193{
9faf8348
DM
3194 check_region_for_read (reg, ctxt);
3195
2867118d
DM
3196 /* Special-case: handle var_decls in the constant pool. */
3197 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
3198 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
3199 return sval;
3200
808f4dfe
DM
3201 const svalue *sval
3202 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
3203 if (sval)
757bf1df 3204 {
808f4dfe
DM
3205 if (reg->get_type ())
3206 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
3207 return sval;
757bf1df 3208 }
757bf1df 3209
808f4dfe
DM
3210 /* Special-case: read at a constant index within a STRING_CST. */
3211 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
3212 if (tree byte_offset_cst
3213 = offset_reg->get_byte_offset ()->maybe_get_constant ())
3214 if (const string_region *str_reg
3215 = reg->get_parent_region ()->dyn_cast_string_region ())
757bf1df 3216 {
808f4dfe
DM
3217 tree string_cst = str_reg->get_string_cst ();
3218 if (const svalue *char_sval
3219 = m_mgr->maybe_get_char_from_string_cst (string_cst,
3220 byte_offset_cst))
3221 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
757bf1df 3222 }
757bf1df 3223
808f4dfe
DM
3224 /* Special-case: read the initial char of a STRING_CST. */
3225 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
3226 if (const string_region *str_reg
3227 = cast_reg->get_original_region ()->dyn_cast_string_region ())
3228 {
3229 tree string_cst = str_reg->get_string_cst ();
3230 tree byte_offset_cst = build_int_cst (integer_type_node, 0);
3231 if (const svalue *char_sval
3232 = m_mgr->maybe_get_char_from_string_cst (string_cst,
3233 byte_offset_cst))
3234 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
3235 }
757bf1df 3236
808f4dfe
DM
3237 /* Otherwise we implicitly have the initial value of the region
3238 (if the cluster had been touched, binding_cluster::get_any_binding,
3239 would have returned UNKNOWN, and we would already have returned
3240 that above). */
757bf1df 3241
623bc027
DM
3242 /* Handle globals. */
3243 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
3244 == RK_GLOBALS)
3245 return get_initial_value_for_global (reg);
757bf1df 3246
808f4dfe 3247 return m_mgr->get_or_create_initial_value (reg);
757bf1df
DM
3248}
3249
808f4dfe
DM
3250/* Return false if REG does not exist, true if it may do.
3251 This is for detecting regions within the stack that don't exist anymore
3252 after frames are popped. */
757bf1df 3253
808f4dfe
DM
3254bool
3255region_model::region_exists_p (const region *reg) const
757bf1df 3256{
808f4dfe
DM
3257 /* If within a stack frame, check that the stack frame is live. */
3258 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
757bf1df 3259 {
808f4dfe
DM
3260 /* Check that the current frame is the enclosing frame, or is called
3261 by it. */
3262 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
3263 iter_frame = iter_frame->get_calling_frame ())
3264 if (iter_frame == enclosing_frame)
3265 return true;
3266 return false;
757bf1df 3267 }
808f4dfe
DM
3268
3269 return true;
757bf1df
DM
3270}
3271
808f4dfe
DM
3272/* Get a region for referencing PTR_SVAL, creating a region if need be, and
3273 potentially generating warnings via CTXT.
35e3f082 3274 PTR_SVAL must be of pointer type.
808f4dfe 3275 PTR_TREE if non-NULL can be used when emitting diagnostics. */
757bf1df 3276
808f4dfe
DM
3277const region *
3278region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
53cb324c 3279 region_model_context *ctxt) const
757bf1df 3280{
808f4dfe 3281 gcc_assert (ptr_sval);
35e3f082 3282 gcc_assert (POINTER_TYPE_P (ptr_sval->get_type ()));
757bf1df 3283
49bfbf18
DM
3284 /* If we're dereferencing PTR_SVAL, assume that it is non-NULL; add this
3285 as a constraint. This suppresses false positives from
3286 -Wanalyzer-null-dereference for the case where we later have an
3287 if (PTR_SVAL) that would occur if we considered the false branch
3288 and transitioned the malloc state machine from start->null. */
3289 tree null_ptr_cst = build_int_cst (ptr_sval->get_type (), 0);
3290 const svalue *null_ptr = m_mgr->get_or_create_constant_svalue (null_ptr_cst);
3291 m_constraints->add_constraint (ptr_sval, NE_EXPR, null_ptr);
3292
808f4dfe 3293 switch (ptr_sval->get_kind ())
757bf1df 3294 {
808f4dfe 3295 default:
23ebfda0 3296 break;
808f4dfe 3297
757bf1df
DM
3298 case SK_REGION:
3299 {
808f4dfe
DM
3300 const region_svalue *region_sval
3301 = as_a <const region_svalue *> (ptr_sval);
757bf1df
DM
3302 return region_sval->get_pointee ();
3303 }
3304
808f4dfe
DM
3305 case SK_BINOP:
3306 {
3307 const binop_svalue *binop_sval
3308 = as_a <const binop_svalue *> (ptr_sval);
3309 switch (binop_sval->get_op ())
3310 {
3311 case POINTER_PLUS_EXPR:
3312 {
3313 /* If we have a symbolic value expressing pointer arithmentic,
3314 try to convert it to a suitable region. */
3315 const region *parent_region
3316 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
3317 const svalue *offset = binop_sval->get_arg1 ();
3318 tree type= TREE_TYPE (ptr_sval->get_type ());
3319 return m_mgr->get_offset_region (parent_region, type, offset);
3320 }
3321 default:
23ebfda0 3322 break;
808f4dfe
DM
3323 }
3324 }
23ebfda0 3325 break;
757bf1df
DM
3326
3327 case SK_POISONED:
3328 {
3329 if (ctxt)
808f4dfe
DM
3330 {
3331 tree ptr = get_representative_tree (ptr_sval);
3332 /* If we can't get a representative tree for PTR_SVAL
3333 (e.g. if it hasn't been bound into the store), then
3334 fall back on PTR_TREE, if non-NULL. */
3335 if (!ptr)
3336 ptr = ptr_tree;
3337 if (ptr)
3338 {
3339 const poisoned_svalue *poisoned_sval
3340 = as_a <const poisoned_svalue *> (ptr_sval);
3341 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
00e7d024 3342 ctxt->warn (new poisoned_value_diagnostic (ptr, pkind, NULL));
808f4dfe
DM
3343 }
3344 }
757bf1df 3345 }
23ebfda0 3346 break;
757bf1df
DM
3347 }
3348
23ebfda0 3349 return m_mgr->get_symbolic_region (ptr_sval);
757bf1df
DM
3350}
3351
d3b1ef7a
DM
3352/* Attempt to get BITS within any value of REG, as TYPE.
3353 In particular, extract values from compound_svalues for the case
3354 where there's a concrete binding at BITS.
9faf8348
DM
3355 Return an unknown svalue if we can't handle the given case.
3356 Use CTXT to report any warnings associated with reading from REG. */
d3b1ef7a
DM
3357
3358const svalue *
3359region_model::get_rvalue_for_bits (tree type,
3360 const region *reg,
9faf8348
DM
3361 const bit_range &bits,
3362 region_model_context *ctxt) const
d3b1ef7a 3363{
9faf8348 3364 const svalue *sval = get_store_value (reg, ctxt);
e61ffa20 3365 return m_mgr->get_or_create_bits_within (type, bits, sval);
d3b1ef7a
DM
3366}
3367
3175d40f
DM
3368/* A subclass of pending_diagnostic for complaining about writes to
3369 constant regions of memory. */
3370
3371class write_to_const_diagnostic
3372: public pending_diagnostic_subclass<write_to_const_diagnostic>
3373{
3374public:
3375 write_to_const_diagnostic (const region *reg, tree decl)
3376 : m_reg (reg), m_decl (decl)
3377 {}
3378
ff171cb1 3379 const char *get_kind () const final override
3175d40f
DM
3380 {
3381 return "write_to_const_diagnostic";
3382 }
3383
3384 bool operator== (const write_to_const_diagnostic &other) const
3385 {
3386 return (m_reg == other.m_reg
3387 && m_decl == other.m_decl);
3388 }
3389
ff171cb1 3390 int get_controlling_option () const final override
7fd6e36e
DM
3391 {
3392 return OPT_Wanalyzer_write_to_const;
3393 }
3394
ff171cb1 3395 bool emit (rich_location *rich_loc) final override
3175d40f 3396 {
111fd515
DM
3397 auto_diagnostic_group d;
3398 bool warned;
3399 switch (m_reg->get_kind ())
3400 {
3401 default:
7fd6e36e 3402 warned = warning_at (rich_loc, get_controlling_option (),
111fd515
DM
3403 "write to %<const%> object %qE", m_decl);
3404 break;
3405 case RK_FUNCTION:
7fd6e36e 3406 warned = warning_at (rich_loc, get_controlling_option (),
111fd515
DM
3407 "write to function %qE", m_decl);
3408 break;
3409 case RK_LABEL:
7fd6e36e 3410 warned = warning_at (rich_loc, get_controlling_option (),
111fd515
DM
3411 "write to label %qE", m_decl);
3412 break;
3413 }
3175d40f
DM
3414 if (warned)
3415 inform (DECL_SOURCE_LOCATION (m_decl), "declared here");
3416 return warned;
3417 }
3418
ff171cb1 3419 label_text describe_final_event (const evdesc::final_event &ev) final override
3175d40f 3420 {
111fd515
DM
3421 switch (m_reg->get_kind ())
3422 {
3423 default:
3424 return ev.formatted_print ("write to %<const%> object %qE here", m_decl);
3425 case RK_FUNCTION:
3426 return ev.formatted_print ("write to function %qE here", m_decl);
3427 case RK_LABEL:
3428 return ev.formatted_print ("write to label %qE here", m_decl);
3429 }
3175d40f
DM
3430 }
3431
3432private:
3433 const region *m_reg;
3434 tree m_decl;
3435};
3436
3437/* A subclass of pending_diagnostic for complaining about writes to
3438 string literals. */
3439
3440class write_to_string_literal_diagnostic
3441: public pending_diagnostic_subclass<write_to_string_literal_diagnostic>
3442{
3443public:
3444 write_to_string_literal_diagnostic (const region *reg)
3445 : m_reg (reg)
3446 {}
3447
ff171cb1 3448 const char *get_kind () const final override
3175d40f
DM
3449 {
3450 return "write_to_string_literal_diagnostic";
3451 }
3452
3453 bool operator== (const write_to_string_literal_diagnostic &other) const
3454 {
3455 return m_reg == other.m_reg;
3456 }
3457
ff171cb1 3458 int get_controlling_option () const final override
7fd6e36e
DM
3459 {
3460 return OPT_Wanalyzer_write_to_string_literal;
3461 }
3462
ff171cb1 3463 bool emit (rich_location *rich_loc) final override
3175d40f 3464 {
7fd6e36e 3465 return warning_at (rich_loc, get_controlling_option (),
3175d40f
DM
3466 "write to string literal");
3467 /* Ideally we would show the location of the STRING_CST as well,
3468 but it is not available at this point. */
3469 }
3470
ff171cb1 3471 label_text describe_final_event (const evdesc::final_event &ev) final override
3175d40f
DM
3472 {
3473 return ev.formatted_print ("write to string literal here");
3474 }
3475
3476private:
3477 const region *m_reg;
3478};
3479
3480/* Use CTXT to warn If DEST_REG is a region that shouldn't be written to. */
3481
3482void
3483region_model::check_for_writable_region (const region* dest_reg,
3484 region_model_context *ctxt) const
3485{
3486 /* Fail gracefully if CTXT is NULL. */
3487 if (!ctxt)
3488 return;
3489
3490 const region *base_reg = dest_reg->get_base_region ();
3491 switch (base_reg->get_kind ())
3492 {
3493 default:
3494 break;
111fd515
DM
3495 case RK_FUNCTION:
3496 {
3497 const function_region *func_reg = as_a <const function_region *> (base_reg);
3498 tree fndecl = func_reg->get_fndecl ();
3499 ctxt->warn (new write_to_const_diagnostic (func_reg, fndecl));
3500 }
3501 break;
3502 case RK_LABEL:
3503 {
3504 const label_region *label_reg = as_a <const label_region *> (base_reg);
3505 tree label = label_reg->get_label ();
3506 ctxt->warn (new write_to_const_diagnostic (label_reg, label));
3507 }
3508 break;
3175d40f
DM
3509 case RK_DECL:
3510 {
3511 const decl_region *decl_reg = as_a <const decl_region *> (base_reg);
3512 tree decl = decl_reg->get_decl ();
3513 /* Warn about writes to const globals.
3514 Don't warn for writes to const locals, and params in particular,
3515 since we would warn in push_frame when setting them up (e.g the
3516 "this" param is "T* const"). */
3517 if (TREE_READONLY (decl)
3518 && is_global_var (decl))
3519 ctxt->warn (new write_to_const_diagnostic (dest_reg, decl));
3520 }
3521 break;
3522 case RK_STRING:
3523 ctxt->warn (new write_to_string_literal_diagnostic (dest_reg));
3524 break;
3525 }
3526}
3527
9a2c9579
DM
3528/* Get the capacity of REG in bytes. */
3529
3530const svalue *
3531region_model::get_capacity (const region *reg) const
3532{
3533 switch (reg->get_kind ())
3534 {
3535 default:
3536 break;
3537 case RK_DECL:
3538 {
3539 const decl_region *decl_reg = as_a <const decl_region *> (reg);
3540 tree decl = decl_reg->get_decl ();
3541 if (TREE_CODE (decl) == SSA_NAME)
3542 {
3543 tree type = TREE_TYPE (decl);
3544 tree size = TYPE_SIZE (type);
3545 return get_rvalue (size, NULL);
3546 }
3547 else
3548 {
3549 tree size = decl_init_size (decl, false);
3550 if (size)
3551 return get_rvalue (size, NULL);
3552 }
3553 }
3554 break;
e61ffa20
DM
3555 case RK_SIZED:
3556 /* Look through sized regions to get at the capacity
3557 of the underlying regions. */
3558 return get_capacity (reg->get_parent_region ());
9a2c9579
DM
3559 }
3560
3561 if (const svalue *recorded = get_dynamic_extents (reg))
3562 return recorded;
3563
3564 return m_mgr->get_or_create_unknown_svalue (sizetype);
3565}
3566
0a9c0d4a
TL
3567/* Return the string size, including the 0-terminator, if SVAL is a
3568 constant_svalue holding a string. Otherwise, return an unknown_svalue. */
3569
3570const svalue *
3571region_model::get_string_size (const svalue *sval) const
3572{
3573 tree cst = sval->maybe_get_constant ();
3574 if (!cst || TREE_CODE (cst) != STRING_CST)
3575 return m_mgr->get_or_create_unknown_svalue (size_type_node);
3576
3577 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
3578 return m_mgr->get_or_create_constant_svalue (out);
3579}
3580
3581/* Return the string size, including the 0-terminator, if REG is a
3582 string_region. Otherwise, return an unknown_svalue. */
3583
3584const svalue *
3585region_model::get_string_size (const region *reg) const
3586{
3587 const string_region *str_reg = dyn_cast <const string_region *> (reg);
3588 if (!str_reg)
3589 return m_mgr->get_or_create_unknown_svalue (size_type_node);
3590
3591 tree cst = str_reg->get_string_cst ();
3592 tree out = build_int_cst (size_type_node, TREE_STRING_LENGTH (cst));
3593 return m_mgr->get_or_create_constant_svalue (out);
3594}
3595
9faf8348
DM
3596/* If CTXT is non-NULL, use it to warn about any problems accessing REG,
3597 using DIR to determine if this access is a read or write. */
3598
3599void
3600region_model::check_region_access (const region *reg,
3601 enum access_direction dir,
3602 region_model_context *ctxt) const
3603{
3604 /* Fail gracefully if CTXT is NULL. */
3605 if (!ctxt)
3606 return;
3607
b9365b93 3608 check_region_for_taint (reg, dir, ctxt);
7e3b45be 3609 check_region_bounds (reg, dir, ctxt);
b9365b93 3610
9faf8348
DM
3611 switch (dir)
3612 {
3613 default:
3614 gcc_unreachable ();
3615 case DIR_READ:
3616 /* Currently a no-op. */
3617 break;
3618 case DIR_WRITE:
3619 check_for_writable_region (reg, ctxt);
3620 break;
3621 }
3622}
3623
3624/* If CTXT is non-NULL, use it to warn about any problems writing to REG. */
3625
3626void
3627region_model::check_region_for_write (const region *dest_reg,
3628 region_model_context *ctxt) const
3629{
3630 check_region_access (dest_reg, DIR_WRITE, ctxt);
3631}
3632
3633/* If CTXT is non-NULL, use it to warn about any problems reading from REG. */
3634
3635void
3636region_model::check_region_for_read (const region *src_reg,
3637 region_model_context *ctxt) const
3638{
3639 check_region_access (src_reg, DIR_READ, ctxt);
3640}
3641
e6c3bb37
TL
3642/* Concrete subclass for casts of pointers that lead to trailing bytes. */
3643
3644class dubious_allocation_size
3645: public pending_diagnostic_subclass<dubious_allocation_size>
3646{
3647public:
3648 dubious_allocation_size (const region *lhs, const region *rhs)
3649 : m_lhs (lhs), m_rhs (rhs), m_expr (NULL_TREE)
3650 {}
3651
3652 dubious_allocation_size (const region *lhs, const region *rhs,
3653 tree expr)
3654 : m_lhs (lhs), m_rhs (rhs), m_expr (expr)
3655 {}
3656
3657 const char *get_kind () const final override
3658 {
3659 return "dubious_allocation_size";
3660 }
3661
3662 bool operator== (const dubious_allocation_size &other) const
3663 {
3664 return m_lhs == other.m_lhs && m_rhs == other.m_rhs
3665 && pending_diagnostic::same_tree_p (m_expr, other.m_expr);
3666 }
3667
3668 int get_controlling_option () const final override
3669 {
3670 return OPT_Wanalyzer_allocation_size;
3671 }
3672
3673 bool emit (rich_location *rich_loc) final override
3674 {
3675 diagnostic_metadata m;
3676 m.add_cwe (131);
3677
3678 return warning_meta (rich_loc, m, get_controlling_option (),
c83e9731
TL
3679 "allocated buffer size is not a multiple"
3680 " of the pointee's size");
e6c3bb37
TL
3681 }
3682
3683 label_text
3684 describe_region_creation_event (const evdesc::region_creation &ev) final
3685 override
3686 {
3687 m_allocation_event = &ev;
3688 if (m_expr)
3689 {
3690 if (TREE_CODE (m_expr) == INTEGER_CST)
3691 return ev.formatted_print ("allocated %E bytes here", m_expr);
3692 else
3693 return ev.formatted_print ("allocated %qE bytes here", m_expr);
3694 }
3695
3696 return ev.formatted_print ("allocated here");
3697 }
3698
3699 label_text describe_final_event (const evdesc::final_event &ev) final
3700 override
3701 {
3702 tree pointee_type = TREE_TYPE (m_lhs->get_type ());
3703 if (m_allocation_event)
3704 /* Fallback: Typically, we should always
3705 see an m_allocation_event before. */
3706 return ev.formatted_print ("assigned to %qT here;"
3707 " %<sizeof (%T)%> is %qE",
3708 m_lhs->get_type (), pointee_type,
3709 size_in_bytes (pointee_type));
3710
3711 if (m_expr)
3712 {
3713 if (TREE_CODE (m_expr) == INTEGER_CST)
3714 return ev.formatted_print ("allocated %E bytes and assigned to"
3715 " %qT here; %<sizeof (%T)%> is %qE",
3716 m_expr, m_lhs->get_type (), pointee_type,
3717 size_in_bytes (pointee_type));
3718 else
3719 return ev.formatted_print ("allocated %qE bytes and assigned to"
3720 " %qT here; %<sizeof (%T)%> is %qE",
3721 m_expr, m_lhs->get_type (), pointee_type,
3722 size_in_bytes (pointee_type));
3723 }
3724
3725 return ev.formatted_print ("allocated and assigned to %qT here;"
3726 " %<sizeof (%T)%> is %qE",
3727 m_lhs->get_type (), pointee_type,
3728 size_in_bytes (pointee_type));
3729 }
3730
3731 void mark_interesting_stuff (interesting_t *interest) final override
3732 {
3733 interest->add_region_creation (m_rhs);
3734 }
3735
3736private:
3737 const region *m_lhs;
3738 const region *m_rhs;
3739 const tree m_expr;
3740 const evdesc::region_creation *m_allocation_event;
3741};
3742
3743/* Return true on dubious allocation sizes for constant sizes. */
3744
3745static bool
3746capacity_compatible_with_type (tree cst, tree pointee_size_tree,
3747 bool is_struct)
3748{
3749 gcc_assert (TREE_CODE (cst) == INTEGER_CST);
3750 gcc_assert (TREE_CODE (pointee_size_tree) == INTEGER_CST);
3751
3752 unsigned HOST_WIDE_INT pointee_size = TREE_INT_CST_LOW (pointee_size_tree);
3753 unsigned HOST_WIDE_INT alloc_size = TREE_INT_CST_LOW (cst);
3754
3755 if (is_struct)
b4cc945c 3756 return alloc_size == 0 || alloc_size >= pointee_size;
e6c3bb37
TL
3757 return alloc_size % pointee_size == 0;
3758}
3759
3760static bool
3761capacity_compatible_with_type (tree cst, tree pointee_size_tree)
3762{
3763 return capacity_compatible_with_type (cst, pointee_size_tree, false);
3764}
3765
3766/* Checks whether SVAL could be a multiple of SIZE_CST.
3767
3768 It works by visiting all svalues inside SVAL until it reaches
3769 atomic nodes. From those, it goes back up again and adds each
3770 node that might be a multiple of SIZE_CST to the RESULT_SET. */
3771
3772class size_visitor : public visitor
3773{
3774public:
c83e9731
TL
3775 size_visitor (tree size_cst, const svalue *root_sval, constraint_manager *cm)
3776 : m_size_cst (size_cst), m_root_sval (root_sval), m_cm (cm)
e6c3bb37 3777 {
c83e9731 3778 m_root_sval->accept (this);
e6c3bb37
TL
3779 }
3780
3781 bool get_result ()
3782 {
c83e9731 3783 return result_set.contains (m_root_sval);
e6c3bb37
TL
3784 }
3785
3786 void visit_constant_svalue (const constant_svalue *sval) final override
3787 {
c83e9731 3788 check_constant (sval->get_constant (), sval);
e6c3bb37
TL
3789 }
3790
3791 void visit_unknown_svalue (const unknown_svalue *sval ATTRIBUTE_UNUSED)
3792 final override
3793 {
3794 result_set.add (sval);
3795 }
3796
3797 void visit_poisoned_svalue (const poisoned_svalue *sval ATTRIBUTE_UNUSED)
3798 final override
3799 {
3800 result_set.add (sval);
3801 }
3802
bdd385b2 3803 void visit_unaryop_svalue (const unaryop_svalue *sval) final override
e6c3bb37
TL
3804 {
3805 const svalue *arg = sval->get_arg ();
3806 if (result_set.contains (arg))
3807 result_set.add (sval);
3808 }
3809
3810 void visit_binop_svalue (const binop_svalue *sval) final override
3811 {
3812 const svalue *arg0 = sval->get_arg0 ();
3813 const svalue *arg1 = sval->get_arg1 ();
3814
3815 if (sval->get_op () == MULT_EXPR)
3816 {
3817 if (result_set.contains (arg0) || result_set.contains (arg1))
3818 result_set.add (sval);
3819 }
3820 else
3821 {
3822 if (result_set.contains (arg0) && result_set.contains (arg1))
3823 result_set.add (sval);
3824 }
3825 }
3826
bdd385b2 3827 void visit_repeated_svalue (const repeated_svalue *sval) final override
e6c3bb37
TL
3828 {
3829 sval->get_inner_svalue ()->accept (this);
3830 if (result_set.contains (sval->get_inner_svalue ()))
3831 result_set.add (sval);
3832 }
3833
3834 void visit_unmergeable_svalue (const unmergeable_svalue *sval) final override
3835 {
3836 sval->get_arg ()->accept (this);
3837 if (result_set.contains (sval->get_arg ()))
3838 result_set.add (sval);
3839 }
3840
3841 void visit_widening_svalue (const widening_svalue *sval) final override
3842 {
3843 const svalue *base = sval->get_base_svalue ();
3844 const svalue *iter = sval->get_iter_svalue ();
3845
3846 if (result_set.contains (base) && result_set.contains (iter))
3847 result_set.add (sval);
3848 }
3849
3850 void visit_conjured_svalue (const conjured_svalue *sval ATTRIBUTE_UNUSED)
3851 final override
3852 {
3853 equiv_class_id id (-1);
3854 if (m_cm->get_equiv_class_by_svalue (sval, &id))
3855 {
c83e9731
TL
3856 if (tree cst = id.get_obj (*m_cm).get_any_constant ())
3857 check_constant (cst, sval);
e6c3bb37 3858 else
c83e9731 3859 result_set.add (sval);
e6c3bb37
TL
3860 }
3861 }
3862
3863 void visit_asm_output_svalue (const asm_output_svalue *sval ATTRIBUTE_UNUSED)
3864 final override
3865 {
3866 result_set.add (sval);
3867 }
3868
3869 void visit_const_fn_result_svalue (const const_fn_result_svalue
3870 *sval ATTRIBUTE_UNUSED) final override
3871 {
3872 result_set.add (sval);
3873 }
3874
3875private:
c83e9731
TL
3876 void check_constant (tree cst, const svalue *sval)
3877 {
3878 switch (TREE_CODE (cst))
3879 {
3880 default:
3881 /* Assume all unhandled operands are compatible. */
3882 result_set.add (sval);
3883 break;
3884 case INTEGER_CST:
3885 if (capacity_compatible_with_type (cst, m_size_cst))
3886 result_set.add (sval);
3887 break;
3888 }
3889 }
3890
e6c3bb37 3891 tree m_size_cst;
c83e9731 3892 const svalue *m_root_sval;
e6c3bb37
TL
3893 constraint_manager *m_cm;
3894 svalue_set result_set; /* Used as a mapping of svalue*->bool. */
3895};
3896
3897/* Return true if a struct or union either uses the inheritance pattern,
3898 where the first field is a base struct, or the flexible array member
3899 pattern, where the last field is an array without a specified size. */
3900
3901static bool
3902struct_or_union_with_inheritance_p (tree struc)
3903{
3904 tree iter = TYPE_FIELDS (struc);
3905 if (iter == NULL_TREE)
3906 return false;
3907 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (iter)))
3908 return true;
3909
3910 tree last_field;
3911 while (iter != NULL_TREE)
3912 {
3913 last_field = iter;
3914 iter = DECL_CHAIN (iter);
3915 }
3916
3917 if (last_field != NULL_TREE
3918 && TREE_CODE (TREE_TYPE (last_field)) == ARRAY_TYPE)
3919 return true;
3920
3921 return false;
3922}
3923
3924/* Return true if the lhs and rhs of an assignment have different types. */
3925
3926static bool
3927is_any_cast_p (const gimple *stmt)
3928{
c83e9731 3929 if (const gassign *assign = dyn_cast <const gassign *> (stmt))
e6c3bb37
TL
3930 return gimple_assign_cast_p (assign)
3931 || !pending_diagnostic::same_tree_p (
3932 TREE_TYPE (gimple_assign_lhs (assign)),
3933 TREE_TYPE (gimple_assign_rhs1 (assign)));
c83e9731 3934 else if (const gcall *call = dyn_cast <const gcall *> (stmt))
e6c3bb37
TL
3935 {
3936 tree lhs = gimple_call_lhs (call);
3937 return lhs != NULL_TREE && !pending_diagnostic::same_tree_p (
3938 TREE_TYPE (gimple_call_lhs (call)),
3939 gimple_call_return_type (call));
3940 }
3941
3942 return false;
3943}
3944
3945/* On pointer assignments, check whether the buffer size of
3946 RHS_SVAL is compatible with the type of the LHS_REG.
3947 Use a non-null CTXT to report allocation size warnings. */
3948
3949void
3950region_model::check_region_size (const region *lhs_reg, const svalue *rhs_sval,
3951 region_model_context *ctxt) const
3952{
3953 if (!ctxt || ctxt->get_stmt () == NULL)
3954 return;
3955 /* Only report warnings on assignments that actually change the type. */
3956 if (!is_any_cast_p (ctxt->get_stmt ()))
3957 return;
3958
3959 const region_svalue *reg_sval = dyn_cast <const region_svalue *> (rhs_sval);
3960 if (!reg_sval)
3961 return;
3962
3963 tree pointer_type = lhs_reg->get_type ();
3964 if (pointer_type == NULL_TREE || !POINTER_TYPE_P (pointer_type))
3965 return;
3966
3967 tree pointee_type = TREE_TYPE (pointer_type);
3968 /* Make sure that the type on the left-hand size actually has a size. */
3969 if (pointee_type == NULL_TREE || VOID_TYPE_P (pointee_type)
3970 || TYPE_SIZE_UNIT (pointee_type) == NULL_TREE)
3971 return;
3972
3973 /* Bail out early on pointers to structs where we can
3974 not deduce whether the buffer size is compatible. */
3975 bool is_struct = RECORD_OR_UNION_TYPE_P (pointee_type);
3976 if (is_struct && struct_or_union_with_inheritance_p (pointee_type))
3977 return;
3978
3979 tree pointee_size_tree = size_in_bytes (pointee_type);
3980 /* We give up if the type size is not known at compile-time or the
3981 type size is always compatible regardless of the buffer size. */
3982 if (TREE_CODE (pointee_size_tree) != INTEGER_CST
3983 || integer_zerop (pointee_size_tree)
3984 || integer_onep (pointee_size_tree))
3985 return;
3986
3987 const region *rhs_reg = reg_sval->get_pointee ();
3988 const svalue *capacity = get_capacity (rhs_reg);
3989 switch (capacity->get_kind ())
3990 {
3991 case svalue_kind::SK_CONSTANT:
3992 {
3993 const constant_svalue *cst_cap_sval
c83e9731 3994 = as_a <const constant_svalue *> (capacity);
e6c3bb37 3995 tree cst_cap = cst_cap_sval->get_constant ();
c83e9731
TL
3996 if (TREE_CODE (cst_cap) == INTEGER_CST
3997 && !capacity_compatible_with_type (cst_cap, pointee_size_tree,
3998 is_struct))
e6c3bb37
TL
3999 ctxt->warn (new dubious_allocation_size (lhs_reg, rhs_reg,
4000 cst_cap));
4001 }
4002 break;
4003 default:
4004 {
4005 if (!is_struct)
4006 {
4007 size_visitor v (pointee_size_tree, capacity, m_constraints);
4008 if (!v.get_result ())
4009 {
4010 tree expr = get_representative_tree (capacity);
4011 ctxt->warn (new dubious_allocation_size (lhs_reg, rhs_reg,
4012 expr));
4013 }
4014 }
4015 break;
4016 }
4017 }
4018}
4019
808f4dfe 4020/* Set the value of the region given by LHS_REG to the value given
9faf8348
DM
4021 by RHS_SVAL.
4022 Use CTXT to report any warnings associated with writing to LHS_REG. */
757bf1df 4023
808f4dfe
DM
4024void
4025region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
3175d40f 4026 region_model_context *ctxt)
757bf1df 4027{
808f4dfe
DM
4028 gcc_assert (lhs_reg);
4029 gcc_assert (rhs_sval);
4030
e6c3bb37
TL
4031 check_region_size (lhs_reg, rhs_sval, ctxt);
4032
9faf8348 4033 check_region_for_write (lhs_reg, ctxt);
3175d40f 4034
808f4dfe 4035 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
e61ffa20 4036 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
4037}
4038
808f4dfe 4039/* Set the value of the region given by LHS to the value given by RHS. */
757bf1df
DM
4040
4041void
808f4dfe 4042region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
757bf1df 4043{
808f4dfe
DM
4044 const region *lhs_reg = get_lvalue (lhs, ctxt);
4045 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
4046 gcc_assert (lhs_reg);
4047 gcc_assert (rhs_sval);
4048 set_value (lhs_reg, rhs_sval, ctxt);
757bf1df
DM
4049}
4050
808f4dfe 4051/* Remove all bindings overlapping REG within the store. */
884d9141
DM
4052
4053void
808f4dfe
DM
4054region_model::clobber_region (const region *reg)
4055{
4056 m_store.clobber_region (m_mgr->get_store_manager(), reg);
4057}
4058
4059/* Remove any bindings for REG within the store. */
4060
4061void
4062region_model::purge_region (const region *reg)
4063{
4064 m_store.purge_region (m_mgr->get_store_manager(), reg);
4065}
4066
e61ffa20
DM
4067/* Fill REG with SVAL. */
4068
4069void
4070region_model::fill_region (const region *reg, const svalue *sval)
4071{
4072 m_store.fill_region (m_mgr->get_store_manager(), reg, sval);
4073}
4074
808f4dfe
DM
4075/* Zero-fill REG. */
4076
4077void
4078region_model::zero_fill_region (const region *reg)
4079{
4080 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
4081}
4082
4083/* Mark REG as having unknown content. */
4084
4085void
3a66c289
DM
4086region_model::mark_region_as_unknown (const region *reg,
4087 uncertainty_t *uncertainty)
884d9141 4088{
3a66c289
DM
4089 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg,
4090 uncertainty);
884d9141
DM
4091}
4092
808f4dfe 4093/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
757bf1df
DM
4094 this model. */
4095
4096tristate
808f4dfe
DM
4097region_model::eval_condition (const svalue *lhs,
4098 enum tree_code op,
4099 const svalue *rhs) const
757bf1df 4100{
e978955d
DM
4101 /* For now, make no attempt to capture constraints on floating-point
4102 values. */
4103 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
4104 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
4105 return tristate::unknown ();
4106
808f4dfe 4107 tristate ts = eval_condition_without_cm (lhs, op, rhs);
757bf1df
DM
4108 if (ts.is_known ())
4109 return ts;
4110
4111 /* Otherwise, try constraints. */
808f4dfe 4112 return m_constraints->eval_condition (lhs, op, rhs);
757bf1df
DM
4113}
4114
808f4dfe 4115/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
757bf1df
DM
4116 this model, without resorting to the constraint_manager.
4117
4118 This is exposed so that impl_region_model_context::on_state_leak can
4119 check for equality part-way through region_model::purge_unused_svalues
4120 without risking creating new ECs. */
4121
4122tristate
808f4dfe
DM
4123region_model::eval_condition_without_cm (const svalue *lhs,
4124 enum tree_code op,
4125 const svalue *rhs) const
757bf1df 4126{
757bf1df
DM
4127 gcc_assert (lhs);
4128 gcc_assert (rhs);
4129
4130 /* See what we know based on the values. */
808f4dfe
DM
4131
4132 /* For now, make no attempt to capture constraints on floating-point
4133 values. */
4134 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
4135 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
4136 return tristate::unknown ();
4137
4138 /* Unwrap any unmergeable values. */
4139 lhs = lhs->unwrap_any_unmergeable ();
4140 rhs = rhs->unwrap_any_unmergeable ();
4141
4142 if (lhs == rhs)
757bf1df 4143 {
808f4dfe
DM
4144 /* If we have the same svalue, then we have equality
4145 (apart from NaN-handling).
4146 TODO: should this definitely be the case for poisoned values? */
4147 /* Poisoned and unknown values are "unknowable". */
4148 if (lhs->get_kind () == SK_POISONED
4149 || lhs->get_kind () == SK_UNKNOWN)
4150 return tristate::TS_UNKNOWN;
e978955d 4151
808f4dfe 4152 switch (op)
757bf1df 4153 {
808f4dfe
DM
4154 case EQ_EXPR:
4155 case GE_EXPR:
4156 case LE_EXPR:
4157 return tristate::TS_TRUE;
07c86323 4158
808f4dfe
DM
4159 case NE_EXPR:
4160 case GT_EXPR:
4161 case LT_EXPR:
4162 return tristate::TS_FALSE;
4163
4164 default:
4165 /* For other ops, use the logic below. */
4166 break;
757bf1df 4167 }
808f4dfe 4168 }
757bf1df 4169
808f4dfe
DM
4170 /* If we have a pair of region_svalues, compare them. */
4171 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
4172 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
4173 {
4174 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
4175 if (res.is_known ())
4176 return res;
4177 /* Otherwise, only known through constraints. */
4178 }
757bf1df 4179
808f4dfe
DM
4180 /* If we have a pair of constants, compare them. */
4181 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
4182 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
4183 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
757bf1df 4184
e82e0f14
DM
4185 /* Handle comparison against zero. */
4186 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
4187 if (zerop (cst_rhs->get_constant ()))
4188 {
4189 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
4190 {
4191 /* A region_svalue is a non-NULL pointer, except in certain
4192 special cases (see the comment for region::non_null_p). */
4193 const region *pointee = ptr->get_pointee ();
4194 if (pointee->non_null_p ())
4195 {
4196 switch (op)
4197 {
4198 default:
4199 gcc_unreachable ();
4200
4201 case EQ_EXPR:
4202 case GE_EXPR:
4203 case LE_EXPR:
4204 return tristate::TS_FALSE;
4205
4206 case NE_EXPR:
4207 case GT_EXPR:
4208 case LT_EXPR:
4209 return tristate::TS_TRUE;
4210 }
4211 }
4212 }
4213 else if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
4214 {
4215 /* Treat offsets from a non-NULL pointer as being non-NULL. This
4216 isn't strictly true, in that eventually ptr++ will wrap
4217 around and be NULL, but it won't occur in practise and thus
4218 can be used to suppress effectively false positives that we
4219 shouldn't warn for. */
4220 if (binop->get_op () == POINTER_PLUS_EXPR)
4221 {
4222 tristate lhs_ts
4223 = eval_condition_without_cm (binop->get_arg0 (),
4224 op, rhs);
4225 if (lhs_ts.is_known ())
4226 return lhs_ts;
4227 }
4228 }
4229 }
808f4dfe
DM
4230
4231 /* Handle rejection of equality for comparisons of the initial values of
4232 "external" values (such as params) with the address of locals. */
4233 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
4234 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
4235 {
4236 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
4237 if (res.is_known ())
4238 return res;
4239 }
4240 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
4241 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
4242 {
4243 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
4244 if (res.is_known ())
4245 return res;
4246 }
4247
4248 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
4249 if (tree rhs_cst = rhs->maybe_get_constant ())
4250 {
4251 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
4252 if (res.is_known ())
4253 return res;
4254 }
4255
7a6564c9
TL
4256 /* Handle comparisons between two svalues with more than one operand. */
4257 if (const binop_svalue *binop = lhs->dyn_cast_binop_svalue ())
4258 {
4259 switch (op)
4260 {
4261 default:
4262 break;
4263 case EQ_EXPR:
4264 {
4265 /* TODO: binops can be equal even if they are not structurally
4266 equal in case of commutative operators. */
4267 tristate res = structural_equality (lhs, rhs);
4268 if (res.is_true ())
4269 return res;
4270 }
4271 break;
4272 case LE_EXPR:
4273 {
4274 tristate res = structural_equality (lhs, rhs);
4275 if (res.is_true ())
4276 return res;
4277 }
4278 break;
4279 case GE_EXPR:
4280 {
4281 tristate res = structural_equality (lhs, rhs);
4282 if (res.is_true ())
4283 return res;
4284 res = symbolic_greater_than (binop, rhs);
4285 if (res.is_true ())
4286 return res;
4287 }
4288 break;
4289 case GT_EXPR:
4290 {
4291 tristate res = symbolic_greater_than (binop, rhs);
4292 if (res.is_true ())
4293 return res;
4294 }
4295 break;
4296 }
4297 }
4298
808f4dfe
DM
4299 return tristate::TS_UNKNOWN;
4300}
4301
4302/* Subroutine of region_model::eval_condition_without_cm, for rejecting
4303 equality of INIT_VAL(PARM) with &LOCAL. */
4304
4305tristate
4306region_model::compare_initial_and_pointer (const initial_svalue *init,
4307 const region_svalue *ptr) const
4308{
4309 const region *pointee = ptr->get_pointee ();
4310
4311 /* If we have a pointer to something within a stack frame, it can't be the
4312 initial value of a param. */
4313 if (pointee->maybe_get_frame_region ())
e0139b2a
DM
4314 if (init->initial_value_of_param_p ())
4315 return tristate::TS_FALSE;
757bf1df
DM
4316
4317 return tristate::TS_UNKNOWN;
4318}
4319
7a6564c9
TL
4320/* Return true if SVAL is definitely positive. */
4321
4322static bool
4323is_positive_svalue (const svalue *sval)
4324{
4325 if (tree cst = sval->maybe_get_constant ())
4326 return !zerop (cst) && get_range_pos_neg (cst) == 1;
4327 tree type = sval->get_type ();
4328 if (!type)
4329 return false;
4330 /* Consider a binary operation size_t + int. The analyzer wraps the int in
4331 an unaryop_svalue, converting it to a size_t, but in the dynamic execution
4332 the result is smaller than the first operand. Thus, we have to look if
4333 the argument of the unaryop_svalue is also positive. */
4334 if (const unaryop_svalue *un_op = dyn_cast <const unaryop_svalue *> (sval))
4335 return CONVERT_EXPR_CODE_P (un_op->get_op ()) && TYPE_UNSIGNED (type)
4336 && is_positive_svalue (un_op->get_arg ());
4337 return TYPE_UNSIGNED (type);
4338}
4339
4340/* Return true if A is definitely larger than B.
4341
4342 Limitation: does not account for integer overflows and does not try to
4343 return false, so it can not be used negated. */
4344
4345tristate
4346region_model::symbolic_greater_than (const binop_svalue *bin_a,
4347 const svalue *b) const
4348{
4349 if (bin_a->get_op () == PLUS_EXPR || bin_a->get_op () == MULT_EXPR)
4350 {
4351 /* Eliminate the right-hand side of both svalues. */
4352 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
4353 if (bin_a->get_op () == bin_b->get_op ()
4354 && eval_condition_without_cm (bin_a->get_arg1 (),
4355 GT_EXPR,
4356 bin_b->get_arg1 ()).is_true ()
4357 && eval_condition_without_cm (bin_a->get_arg0 (),
4358 GE_EXPR,
4359 bin_b->get_arg0 ()).is_true ())
4360 return tristate (tristate::TS_TRUE);
4361
4362 /* Otherwise, try to remove a positive offset or factor from BIN_A. */
4363 if (is_positive_svalue (bin_a->get_arg1 ())
4364 && eval_condition_without_cm (bin_a->get_arg0 (),
4365 GE_EXPR, b).is_true ())
4366 return tristate (tristate::TS_TRUE);
4367 }
4368 return tristate::unknown ();
4369}
4370
4371/* Return true if A and B are equal structurally.
4372
4373 Structural equality means that A and B are equal if the svalues A and B have
4374 the same nodes at the same positions in the tree and the leafs are equal.
4375 Equality for conjured_svalues and initial_svalues is determined by comparing
4376 the pointers while constants are compared by value. That behavior is useful
4377 to check for binaryop_svlaues that evaluate to the same concrete value but
4378 might use one operand with a different type but the same constant value.
4379
4380 For example,
4381 binop_svalue (mult_expr,
4382 initial_svalue (‘size_t’, decl_region (..., 'some_var')),
4383 constant_svalue (‘size_t’, 4))
4384 and
4385 binop_svalue (mult_expr,
4386 initial_svalue (‘size_t’, decl_region (..., 'some_var'),
4387 constant_svalue (‘sizetype’, 4))
4388 are structurally equal. A concrete C code example, where this occurs, can
4389 be found in test7 of out-of-bounds-5.c. */
4390
4391tristate
4392region_model::structural_equality (const svalue *a, const svalue *b) const
4393{
4394 /* If A and B are referentially equal, they are also structurally equal. */
4395 if (a == b)
4396 return tristate (tristate::TS_TRUE);
4397
4398 switch (a->get_kind ())
4399 {
4400 default:
4401 return tristate::unknown ();
4402 /* SK_CONJURED and SK_INITIAL are already handled
4403 by the referential equality above. */
4404 case SK_CONSTANT:
4405 {
4406 tree a_cst = a->maybe_get_constant ();
4407 tree b_cst = b->maybe_get_constant ();
4408 if (a_cst && b_cst)
4409 return tristate (tree_int_cst_equal (a_cst, b_cst));
4410 }
4411 return tristate (tristate::TS_FALSE);
4412 case SK_UNARYOP:
4413 {
4414 const unaryop_svalue *un_a = as_a <const unaryop_svalue *> (a);
4415 if (const unaryop_svalue *un_b = dyn_cast <const unaryop_svalue *> (b))
4416 return tristate (pending_diagnostic::same_tree_p (un_a->get_type (),
4417 un_b->get_type ())
4418 && un_a->get_op () == un_b->get_op ()
4419 && structural_equality (un_a->get_arg (),
4420 un_b->get_arg ()));
4421 }
4422 return tristate (tristate::TS_FALSE);
4423 case SK_BINOP:
4424 {
4425 const binop_svalue *bin_a = as_a <const binop_svalue *> (a);
4426 if (const binop_svalue *bin_b = dyn_cast <const binop_svalue *> (b))
4427 return tristate (bin_a->get_op () == bin_b->get_op ()
4428 && structural_equality (bin_a->get_arg0 (),
4429 bin_b->get_arg0 ())
4430 && structural_equality (bin_a->get_arg1 (),
4431 bin_b->get_arg1 ()));
4432 }
4433 return tristate (tristate::TS_FALSE);
4434 }
4435}
4436
48e8a7a6
DM
4437/* Handle various constraints of the form:
4438 LHS: ((bool)INNER_LHS INNER_OP INNER_RHS))
4439 OP : == or !=
4440 RHS: zero
4441 and (with a cast):
4442 LHS: CAST([long]int, ((bool)INNER_LHS INNER_OP INNER_RHS))
4443 OP : == or !=
4444 RHS: zero
4445 by adding constraints for INNER_LHS INNEROP INNER_RHS.
4446
4447 Return true if this function can fully handle the constraint; if
4448 so, add the implied constraint(s) and write true to *OUT if they
4449 are consistent with existing constraints, or write false to *OUT
4450 if they contradicts existing constraints.
4451
4452 Return false for cases that this function doeesn't know how to handle.
4453
4454 For example, if we're checking a stored conditional, we'll have
4455 something like:
4456 LHS: CAST(long int, (&HEAP_ALLOCATED_REGION(8)!=(int *)0B))
4457 OP : NE_EXPR
4458 RHS: zero
4459 which this function can turn into an add_constraint of:
4460 (&HEAP_ALLOCATED_REGION(8) != (int *)0B)
4461
4462 Similarly, optimized && and || conditionals lead to e.g.
4463 if (p && q)
4464 becoming gimple like this:
4465 _1 = p_6 == 0B;
4466 _2 = q_8 == 0B
4467 _3 = _1 | _2
4468 On the "_3 is false" branch we can have constraints of the form:
4469 ((&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
4470 | (&HEAP_ALLOCATED_REGION(10)!=(int *)0B))
4471 == 0
4472 which implies that both _1 and _2 are false,
4473 which this function can turn into a pair of add_constraints of
4474 (&HEAP_ALLOCATED_REGION(8)!=(int *)0B)
4475 and:
4476 (&HEAP_ALLOCATED_REGION(10)!=(int *)0B). */
4477
4478bool
4479region_model::add_constraints_from_binop (const svalue *outer_lhs,
4480 enum tree_code outer_op,
4481 const svalue *outer_rhs,
4482 bool *out,
4483 region_model_context *ctxt)
4484{
4485 while (const svalue *cast = outer_lhs->maybe_undo_cast ())
4486 outer_lhs = cast;
4487 const binop_svalue *binop_sval = outer_lhs->dyn_cast_binop_svalue ();
4488 if (!binop_sval)
4489 return false;
4490 if (!outer_rhs->all_zeroes_p ())
4491 return false;
4492
4493 const svalue *inner_lhs = binop_sval->get_arg0 ();
4494 enum tree_code inner_op = binop_sval->get_op ();
4495 const svalue *inner_rhs = binop_sval->get_arg1 ();
4496
4497 if (outer_op != NE_EXPR && outer_op != EQ_EXPR)
4498 return false;
4499
4500 /* We have either
4501 - "OUTER_LHS != false" (i.e. OUTER is true), or
4502 - "OUTER_LHS == false" (i.e. OUTER is false). */
4503 bool is_true = outer_op == NE_EXPR;
4504
4505 switch (inner_op)
4506 {
4507 default:
4508 return false;
4509
4510 case EQ_EXPR:
4511 case NE_EXPR:
4512 {
4513 /* ...and "(inner_lhs OP inner_rhs) == 0"
4514 then (inner_lhs OP inner_rhs) must have the same
4515 logical value as LHS. */
4516 if (!is_true)
4517 inner_op = invert_tree_comparison (inner_op, false /* honor_nans */);
4518 *out = add_constraint (inner_lhs, inner_op, inner_rhs, ctxt);
4519 return true;
4520 }
4521 break;
4522
4523 case BIT_AND_EXPR:
4524 if (is_true)
4525 {
4526 /* ...and "(inner_lhs & inner_rhs) != 0"
4527 then both inner_lhs and inner_rhs must be true. */
4528 const svalue *false_sval
4529 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
4530 bool sat1 = add_constraint (inner_lhs, NE_EXPR, false_sval, ctxt);
4531 bool sat2 = add_constraint (inner_rhs, NE_EXPR, false_sval, ctxt);
4532 *out = sat1 && sat2;
4533 return true;
4534 }
4535 return false;
4536
4537 case BIT_IOR_EXPR:
4538 if (!is_true)
4539 {
4540 /* ...and "(inner_lhs | inner_rhs) == 0"
4541 i.e. "(inner_lhs | inner_rhs)" is false
4542 then both inner_lhs and inner_rhs must be false. */
4543 const svalue *false_sval
4544 = m_mgr->get_or_create_constant_svalue (boolean_false_node);
4545 bool sat1 = add_constraint (inner_lhs, EQ_EXPR, false_sval, ctxt);
4546 bool sat2 = add_constraint (inner_rhs, EQ_EXPR, false_sval, ctxt);
4547 *out = sat1 && sat2;
4548 return true;
4549 }
4550 return false;
4551 }
4552}
4553
757bf1df
DM
4554/* Attempt to add the constraint "LHS OP RHS" to this region_model.
4555 If it is consistent with existing constraints, add it, and return true.
4556 Return false if it contradicts existing constraints.
4557 Use CTXT for reporting any diagnostics associated with the accesses. */
4558
4559bool
4560region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
4561 region_model_context *ctxt)
4562{
e978955d
DM
4563 /* For now, make no attempt to capture constraints on floating-point
4564 values. */
4565 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
4566 return true;
4567
808f4dfe
DM
4568 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
4569 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
757bf1df 4570
48e8a7a6
DM
4571 return add_constraint (lhs_sval, op, rhs_sval, ctxt);
4572}
4573
4574/* Attempt to add the constraint "LHS OP RHS" to this region_model.
4575 If it is consistent with existing constraints, add it, and return true.
4576 Return false if it contradicts existing constraints.
4577 Use CTXT for reporting any diagnostics associated with the accesses. */
4578
4579bool
4580region_model::add_constraint (const svalue *lhs,
4581 enum tree_code op,
4582 const svalue *rhs,
4583 region_model_context *ctxt)
4584{
4585 tristate t_cond = eval_condition (lhs, op, rhs);
757bf1df
DM
4586
4587 /* If we already have the condition, do nothing. */
4588 if (t_cond.is_true ())
4589 return true;
4590
4591 /* Reject a constraint that would contradict existing knowledge, as
4592 unsatisfiable. */
4593 if (t_cond.is_false ())
4594 return false;
4595
48e8a7a6
DM
4596 bool out;
4597 if (add_constraints_from_binop (lhs, op, rhs, &out, ctxt))
4598 return out;
757bf1df 4599
c4b8f373
DM
4600 /* Attempt to store the constraint. */
4601 if (!m_constraints->add_constraint (lhs, op, rhs))
4602 return false;
757bf1df
DM
4603
4604 /* Notify the context, if any. This exists so that the state machines
4605 in a program_state can be notified about the condition, and so can
4606 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
4607 when synthesizing constraints as above. */
4608 if (ctxt)
4609 ctxt->on_condition (lhs, op, rhs);
4610
9a2c9579
DM
4611 /* If we have &REGION == NULL, then drop dynamic extents for REGION (for
4612 the case where REGION is heap-allocated and thus could be NULL). */
48e8a7a6
DM
4613 if (tree rhs_cst = rhs->maybe_get_constant ())
4614 if (op == EQ_EXPR && zerop (rhs_cst))
4615 if (const region_svalue *region_sval = lhs->dyn_cast_region_svalue ())
4616 unset_dynamic_extents (region_sval->get_pointee ());
9a2c9579 4617
757bf1df
DM
4618 return true;
4619}
4620
84fb3546
DM
4621/* As above, but when returning false, if OUT is non-NULL, write a
4622 new rejected_constraint to *OUT. */
4623
4624bool
4625region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
4626 region_model_context *ctxt,
4627 rejected_constraint **out)
4628{
4629 bool sat = add_constraint (lhs, op, rhs, ctxt);
4630 if (!sat && out)
8ca7fa84 4631 *out = new rejected_op_constraint (*this, lhs, op, rhs);
84fb3546
DM
4632 return sat;
4633}
4634
757bf1df
DM
4635/* Determine what is known about the condition "LHS OP RHS" within
4636 this model.
4637 Use CTXT for reporting any diagnostics associated with the accesses. */
4638
4639tristate
4640region_model::eval_condition (tree lhs,
4641 enum tree_code op,
4642 tree rhs,
4643 region_model_context *ctxt)
4644{
e978955d
DM
4645 /* For now, make no attempt to model constraints on floating-point
4646 values. */
4647 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
4648 return tristate::unknown ();
4649
757bf1df
DM
4650 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
4651}
4652
467a4820
DM
4653/* Implementation of region_model::get_representative_path_var.
4654 Attempt to return a path_var that represents SVAL, or return NULL_TREE.
808f4dfe
DM
4655 Use VISITED to prevent infinite mutual recursion with the overload for
4656 regions. */
757bf1df 4657
808f4dfe 4658path_var
467a4820
DM
4659region_model::get_representative_path_var_1 (const svalue *sval,
4660 svalue_set *visited) const
757bf1df 4661{
467a4820 4662 gcc_assert (sval);
757bf1df 4663
808f4dfe
DM
4664 /* Prevent infinite recursion. */
4665 if (visited->contains (sval))
4666 return path_var (NULL_TREE, 0);
4667 visited->add (sval);
757bf1df 4668
467a4820
DM
4669 /* Handle casts by recursion into get_representative_path_var. */
4670 if (const svalue *cast_sval = sval->maybe_undo_cast ())
4671 {
4672 path_var result = get_representative_path_var (cast_sval, visited);
4673 tree orig_type = sval->get_type ();
4674 /* If necessary, wrap the result in a cast. */
4675 if (result.m_tree && orig_type)
4676 result.m_tree = build1 (NOP_EXPR, orig_type, result.m_tree);
4677 return result;
4678 }
4679
808f4dfe
DM
4680 auto_vec<path_var> pvs;
4681 m_store.get_representative_path_vars (this, visited, sval, &pvs);
757bf1df 4682
808f4dfe
DM
4683 if (tree cst = sval->maybe_get_constant ())
4684 pvs.safe_push (path_var (cst, 0));
757bf1df 4685
90f7c300 4686 /* Handle string literals and various other pointers. */
808f4dfe
DM
4687 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
4688 {
4689 const region *reg = ptr_sval->get_pointee ();
4690 if (path_var pv = get_representative_path_var (reg, visited))
4691 return path_var (build1 (ADDR_EXPR,
467a4820 4692 sval->get_type (),
808f4dfe
DM
4693 pv.m_tree),
4694 pv.m_stack_depth);
4695 }
4696
4697 /* If we have a sub_svalue, look for ways to represent the parent. */
4698 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
90f7c300 4699 {
808f4dfe
DM
4700 const svalue *parent_sval = sub_sval->get_parent ();
4701 const region *subreg = sub_sval->get_subregion ();
4702 if (path_var parent_pv
4703 = get_representative_path_var (parent_sval, visited))
4704 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
4705 return path_var (build3 (COMPONENT_REF,
4706 sval->get_type (),
4707 parent_pv.m_tree,
4708 field_reg->get_field (),
4709 NULL_TREE),
4710 parent_pv.m_stack_depth);
90f7c300
DM
4711 }
4712
b9365b93
DM
4713 /* Handle binops. */
4714 if (const binop_svalue *binop_sval = sval->dyn_cast_binop_svalue ())
4715 if (path_var lhs_pv
4716 = get_representative_path_var (binop_sval->get_arg0 (), visited))
4717 if (path_var rhs_pv
4718 = get_representative_path_var (binop_sval->get_arg1 (), visited))
4719 return path_var (build2 (binop_sval->get_op (),
4720 sval->get_type (),
4721 lhs_pv.m_tree, rhs_pv.m_tree),
4722 lhs_pv.m_stack_depth);
4723
808f4dfe
DM
4724 if (pvs.length () < 1)
4725 return path_var (NULL_TREE, 0);
4726
4727 pvs.qsort (readability_comparator);
4728 return pvs[0];
757bf1df
DM
4729}
4730
467a4820
DM
4731/* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
4732 Use VISITED to prevent infinite mutual recursion with the overload for
4733 regions
4734
4735 This function defers to get_representative_path_var_1 to do the work;
4736 it adds verification that get_representative_path_var_1 returned a tree
4737 of the correct type. */
4738
4739path_var
4740region_model::get_representative_path_var (const svalue *sval,
4741 svalue_set *visited) const
4742{
4743 if (sval == NULL)
4744 return path_var (NULL_TREE, 0);
4745
4746 tree orig_type = sval->get_type ();
4747
4748 path_var result = get_representative_path_var_1 (sval, visited);
4749
4750 /* Verify that the result has the same type as SVAL, if any. */
4751 if (result.m_tree && orig_type)
4752 gcc_assert (TREE_TYPE (result.m_tree) == orig_type);
4753
4754 return result;
4755}
4756
4757/* Attempt to return a tree that represents SVAL, or return NULL_TREE.
4758
4759 Strip off any top-level cast, to avoid messages like
4760 double-free of '(void *)ptr'
4761 from analyzer diagnostics. */
757bf1df 4762
808f4dfe
DM
4763tree
4764region_model::get_representative_tree (const svalue *sval) const
757bf1df 4765{
808f4dfe 4766 svalue_set visited;
467a4820
DM
4767 tree expr = get_representative_path_var (sval, &visited).m_tree;
4768
4769 /* Strip off any top-level cast. */
7e3b45be
TL
4770 if (expr && TREE_CODE (expr) == NOP_EXPR)
4771 expr = TREE_OPERAND (expr, 0);
4772
4773 return fixup_tree_for_diagnostic (expr);
4774}
4775
4776tree
4777region_model::get_representative_tree (const region *reg) const
4778{
4779 svalue_set visited;
4780 tree expr = get_representative_path_var (reg, &visited).m_tree;
4781
4782 /* Strip off any top-level cast. */
467a4820 4783 if (expr && TREE_CODE (expr) == NOP_EXPR)
e4bb1bd6 4784 expr = TREE_OPERAND (expr, 0);
467a4820 4785
e4bb1bd6 4786 return fixup_tree_for_diagnostic (expr);
808f4dfe
DM
4787}
4788
467a4820
DM
4789/* Implementation of region_model::get_representative_path_var.
4790
4791 Attempt to return a path_var that represents REG, or return
808f4dfe
DM
4792 the NULL path_var.
4793 For example, a region for a field of a local would be a path_var
4794 wrapping a COMPONENT_REF.
4795 Use VISITED to prevent infinite mutual recursion with the overload for
4796 svalues. */
757bf1df 4797
808f4dfe 4798path_var
467a4820
DM
4799region_model::get_representative_path_var_1 (const region *reg,
4800 svalue_set *visited) const
808f4dfe
DM
4801{
4802 switch (reg->get_kind ())
757bf1df 4803 {
808f4dfe
DM
4804 default:
4805 gcc_unreachable ();
e516294a 4806
808f4dfe
DM
4807 case RK_FRAME:
4808 case RK_GLOBALS:
4809 case RK_CODE:
4810 case RK_HEAP:
4811 case RK_STACK:
4812 case RK_ROOT:
4813 /* Regions that represent memory spaces are not expressible as trees. */
4814 return path_var (NULL_TREE, 0);
757bf1df 4815
808f4dfe 4816 case RK_FUNCTION:
884d9141 4817 {
808f4dfe
DM
4818 const function_region *function_reg
4819 = as_a <const function_region *> (reg);
4820 return path_var (function_reg->get_fndecl (), 0);
884d9141 4821 }
808f4dfe 4822 case RK_LABEL:
9e78634c
DM
4823 {
4824 const label_region *label_reg = as_a <const label_region *> (reg);
4825 return path_var (label_reg->get_label (), 0);
4826 }
90f7c300 4827
808f4dfe
DM
4828 case RK_SYMBOLIC:
4829 {
4830 const symbolic_region *symbolic_reg
4831 = as_a <const symbolic_region *> (reg);
4832 const svalue *pointer = symbolic_reg->get_pointer ();
4833 path_var pointer_pv = get_representative_path_var (pointer, visited);
4834 if (!pointer_pv)
4835 return path_var (NULL_TREE, 0);
4836 tree offset = build_int_cst (pointer->get_type (), 0);
4837 return path_var (build2 (MEM_REF,
4838 reg->get_type (),
4839 pointer_pv.m_tree,
4840 offset),
4841 pointer_pv.m_stack_depth);
4842 }
4843 case RK_DECL:
4844 {
4845 const decl_region *decl_reg = as_a <const decl_region *> (reg);
4846 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
4847 }
4848 case RK_FIELD:
4849 {
4850 const field_region *field_reg = as_a <const field_region *> (reg);
4851 path_var parent_pv
4852 = get_representative_path_var (reg->get_parent_region (), visited);
4853 if (!parent_pv)
4854 return path_var (NULL_TREE, 0);
4855 return path_var (build3 (COMPONENT_REF,
4856 reg->get_type (),
4857 parent_pv.m_tree,
4858 field_reg->get_field (),
4859 NULL_TREE),
4860 parent_pv.m_stack_depth);
4861 }
757bf1df 4862
808f4dfe
DM
4863 case RK_ELEMENT:
4864 {
4865 const element_region *element_reg
4866 = as_a <const element_region *> (reg);
4867 path_var parent_pv
4868 = get_representative_path_var (reg->get_parent_region (), visited);
4869 if (!parent_pv)
4870 return path_var (NULL_TREE, 0);
4871 path_var index_pv
4872 = get_representative_path_var (element_reg->get_index (), visited);
4873 if (!index_pv)
4874 return path_var (NULL_TREE, 0);
4875 return path_var (build4 (ARRAY_REF,
4876 reg->get_type (),
4877 parent_pv.m_tree, index_pv.m_tree,
4878 NULL_TREE, NULL_TREE),
4879 parent_pv.m_stack_depth);
4880 }
757bf1df 4881
808f4dfe 4882 case RK_OFFSET:
757bf1df 4883 {
808f4dfe
DM
4884 const offset_region *offset_reg
4885 = as_a <const offset_region *> (reg);
4886 path_var parent_pv
4887 = get_representative_path_var (reg->get_parent_region (), visited);
4888 if (!parent_pv)
4889 return path_var (NULL_TREE, 0);
4890 path_var offset_pv
4891 = get_representative_path_var (offset_reg->get_byte_offset (),
4892 visited);
29f5db8e 4893 if (!offset_pv || TREE_CODE (offset_pv.m_tree) != INTEGER_CST)
808f4dfe 4894 return path_var (NULL_TREE, 0);
29f5db8e
DM
4895 tree addr_parent = build1 (ADDR_EXPR,
4896 build_pointer_type (reg->get_type ()),
4897 parent_pv.m_tree);
808f4dfe
DM
4898 return path_var (build2 (MEM_REF,
4899 reg->get_type (),
29f5db8e 4900 addr_parent, offset_pv.m_tree),
808f4dfe 4901 parent_pv.m_stack_depth);
757bf1df 4902 }
757bf1df 4903
e61ffa20
DM
4904 case RK_SIZED:
4905 return path_var (NULL_TREE, 0);
4906
808f4dfe
DM
4907 case RK_CAST:
4908 {
4909 path_var parent_pv
4910 = get_representative_path_var (reg->get_parent_region (), visited);
4911 if (!parent_pv)
4912 return path_var (NULL_TREE, 0);
4913 return path_var (build1 (NOP_EXPR,
4914 reg->get_type (),
4915 parent_pv.m_tree),
4916 parent_pv.m_stack_depth);
4917 }
757bf1df 4918
808f4dfe
DM
4919 case RK_HEAP_ALLOCATED:
4920 case RK_ALLOCA:
4921 /* No good way to express heap-allocated/alloca regions as trees. */
4922 return path_var (NULL_TREE, 0);
757bf1df 4923
808f4dfe
DM
4924 case RK_STRING:
4925 {
4926 const string_region *string_reg = as_a <const string_region *> (reg);
4927 return path_var (string_reg->get_string_cst (), 0);
4928 }
757bf1df 4929
2402dc6b 4930 case RK_VAR_ARG:
808f4dfe
DM
4931 case RK_UNKNOWN:
4932 return path_var (NULL_TREE, 0);
4933 }
757bf1df
DM
4934}
4935
467a4820
DM
4936/* Attempt to return a path_var that represents REG, or return
4937 the NULL path_var.
4938 For example, a region for a field of a local would be a path_var
4939 wrapping a COMPONENT_REF.
4940 Use VISITED to prevent infinite mutual recursion with the overload for
4941 svalues.
4942
4943 This function defers to get_representative_path_var_1 to do the work;
4944 it adds verification that get_representative_path_var_1 returned a tree
4945 of the correct type. */
4946
4947path_var
4948region_model::get_representative_path_var (const region *reg,
4949 svalue_set *visited) const
4950{
4951 path_var result = get_representative_path_var_1 (reg, visited);
4952
4953 /* Verify that the result has the same type as REG, if any. */
4954 if (result.m_tree && reg->get_type ())
4955 gcc_assert (TREE_TYPE (result.m_tree) == reg->get_type ());
4956
4957 return result;
4958}
4959
757bf1df
DM
4960/* Update this model for any phis in SNODE, assuming we came from
4961 LAST_CFG_SUPEREDGE. */
4962
4963void
4964region_model::update_for_phis (const supernode *snode,
4965 const cfg_superedge *last_cfg_superedge,
4966 region_model_context *ctxt)
4967{
4968 gcc_assert (last_cfg_superedge);
4969
e0a7a675
DM
4970 /* Copy this state and pass it to handle_phi so that all of the phi stmts
4971 are effectively handled simultaneously. */
4972 const region_model old_state (*this);
4973
757bf1df
DM
4974 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
4975 !gsi_end_p (gpi); gsi_next (&gpi))
4976 {
4977 gphi *phi = gpi.phi ();
4978
4979 tree src = last_cfg_superedge->get_phi_arg (phi);
4980 tree lhs = gimple_phi_result (phi);
4981
e0a7a675
DM
4982 /* Update next_state based on phi and old_state. */
4983 handle_phi (phi, lhs, src, old_state, ctxt);
757bf1df
DM
4984 }
4985}
4986
4987/* Attempt to update this model for taking EDGE (where the last statement
4988 was LAST_STMT), returning true if the edge can be taken, false
4989 otherwise.
84fb3546
DM
4990 When returning false, if OUT is non-NULL, write a new rejected_constraint
4991 to it.
757bf1df
DM
4992
4993 For CFG superedges where LAST_STMT is a conditional or a switch
4994 statement, attempt to add the relevant conditions for EDGE to this
4995 model, returning true if they are feasible, or false if they are
4996 impossible.
4997
4998 For call superedges, push frame information and store arguments
4999 into parameters.
5000
5001 For return superedges, pop frame information and store return
5002 values into any lhs.
5003
5004 Rejection of call/return superedges happens elsewhere, in
5005 program_point::on_edge (i.e. based on program point, rather
5006 than program state). */
5007
5008bool
5009region_model::maybe_update_for_edge (const superedge &edge,
5010 const gimple *last_stmt,
84fb3546
DM
5011 region_model_context *ctxt,
5012 rejected_constraint **out)
757bf1df
DM
5013{
5014 /* Handle frame updates for interprocedural edges. */
5015 switch (edge.m_kind)
5016 {
5017 default:
5018 break;
5019
5020 case SUPEREDGE_CALL:
5021 {
5022 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
5023 update_for_call_superedge (*call_edge, ctxt);
5024 }
5025 break;
5026
5027 case SUPEREDGE_RETURN:
5028 {
5029 const return_superedge *return_edge
5030 = as_a <const return_superedge *> (&edge);
5031 update_for_return_superedge (*return_edge, ctxt);
5032 }
5033 break;
5034
5035 case SUPEREDGE_INTRAPROCEDURAL_CALL:
5036 {
5037 const callgraph_superedge *cg_sedge
5038 = as_a <const callgraph_superedge *> (&edge);
5039 update_for_call_summary (*cg_sedge, ctxt);
5040 }
5041 break;
5042 }
5043
5044 if (last_stmt == NULL)
5045 return true;
5046
5047 /* Apply any constraints for conditionals/switch statements. */
5048
5049 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
5050 {
5051 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
84fb3546 5052 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt, out);
757bf1df
DM
5053 }
5054
5055 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
5056 {
5057 const switch_cfg_superedge *switch_sedge
5058 = as_a <const switch_cfg_superedge *> (&edge);
84fb3546
DM
5059 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt,
5060 ctxt, out);
757bf1df
DM
5061 }
5062
1690a839
DM
5063 /* Apply any constraints due to an exception being thrown. */
5064 if (const cfg_superedge *cfg_sedge = dyn_cast <const cfg_superedge *> (&edge))
5065 if (cfg_sedge->get_flags () & EDGE_EH)
84fb3546 5066 return apply_constraints_for_exception (last_stmt, ctxt, out);
1690a839 5067
757bf1df
DM
5068 return true;
5069}
5070
5071/* Push a new frame_region on to the stack region.
5072 Populate the frame_region with child regions for the function call's
5073 parameters, using values from the arguments at the callsite in the
5074 caller's frame. */
5075
5076void
aef703cf 5077region_model::update_for_gcall (const gcall *call_stmt,
e92d0ff6
AS
5078 region_model_context *ctxt,
5079 function *callee)
757bf1df 5080{
808f4dfe 5081 /* Build a vec of argument svalues, using the current top
757bf1df 5082 frame for resolving tree expressions. */
808f4dfe 5083 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
757bf1df
DM
5084
5085 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
5086 {
5087 tree arg = gimple_call_arg (call_stmt, i);
808f4dfe 5088 arg_svals.quick_push (get_rvalue (arg, ctxt));
757bf1df
DM
5089 }
5090
e92d0ff6
AS
5091 if(!callee)
5092 {
5093 /* Get the function * from the gcall. */
5094 tree fn_decl = get_fndecl_for_call (call_stmt,ctxt);
5095 callee = DECL_STRUCT_FUNCTION (fn_decl);
5096 }
5097
5098 push_frame (callee, &arg_svals, ctxt);
757bf1df
DM
5099}
5100
a96f1c38
DM
5101/* Pop the top-most frame_region from the stack, and copy the return
5102 region's values (if any) into the region for the lvalue of the LHS of
757bf1df 5103 the call (if any). */
aef703cf 5104
757bf1df 5105void
aef703cf
AS
5106region_model::update_for_return_gcall (const gcall *call_stmt,
5107 region_model_context *ctxt)
757bf1df 5108{
4cebae09
DM
5109 /* Get the lvalue for the result of the call, passing it to pop_frame,
5110 so that pop_frame can determine the region with respect to the
5111 *caller* frame. */
757bf1df 5112 tree lhs = gimple_call_lhs (call_stmt);
4cebae09 5113 pop_frame (lhs, NULL, ctxt);
757bf1df
DM
5114}
5115
aef703cf
AS
5116/* Extract calling information from the superedge and update the model for the
5117 call */
5118
5119void
5120region_model::update_for_call_superedge (const call_superedge &call_edge,
5121 region_model_context *ctxt)
5122{
5123 const gcall *call_stmt = call_edge.get_call_stmt ();
e92d0ff6 5124 update_for_gcall (call_stmt, ctxt, call_edge.get_callee_function ());
aef703cf
AS
5125}
5126
5127/* Extract calling information from the return superedge and update the model
5128 for the returning call */
5129
5130void
5131region_model::update_for_return_superedge (const return_superedge &return_edge,
5132 region_model_context *ctxt)
5133{
5134 const gcall *call_stmt = return_edge.get_call_stmt ();
5135 update_for_return_gcall (call_stmt, ctxt);
5136}
5137
757bf1df
DM
5138/* Update this region_model with a summary of the effect of calling
5139 and returning from CG_SEDGE.
5140
5141 TODO: Currently this is extremely simplistic: we merely set the
5142 return value to "unknown". A proper implementation would e.g. update
5143 sm-state, and presumably be reworked to support multiple outcomes. */
5144
5145void
5146region_model::update_for_call_summary (const callgraph_superedge &cg_sedge,
5147 region_model_context *ctxt)
5148{
5149 /* For now, set any return value to "unknown". */
5150 const gcall *call_stmt = cg_sedge.get_call_stmt ();
5151 tree lhs = gimple_call_lhs (call_stmt);
5152 if (lhs)
3a66c289
DM
5153 mark_region_as_unknown (get_lvalue (lhs, ctxt),
5154 ctxt ? ctxt->get_uncertainty () : NULL);
757bf1df
DM
5155
5156 // TODO: actually implement some kind of summary here
5157}
5158
5159/* Given a true or false edge guarded by conditional statement COND_STMT,
5160 determine appropriate constraints for the edge to be taken.
5161
5162 If they are feasible, add the constraints and return true.
5163
5164 Return false if the constraints contradict existing knowledge
84fb3546
DM
5165 (and so the edge should not be taken).
5166 When returning false, if OUT is non-NULL, write a new rejected_constraint
5167 to it. */
757bf1df
DM
5168
5169bool
5170region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
5171 const gcond *cond_stmt,
84fb3546
DM
5172 region_model_context *ctxt,
5173 rejected_constraint **out)
757bf1df
DM
5174{
5175 ::edge cfg_edge = sedge.get_cfg_edge ();
5176 gcc_assert (cfg_edge != NULL);
5177 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
5178
5179 enum tree_code op = gimple_cond_code (cond_stmt);
5180 tree lhs = gimple_cond_lhs (cond_stmt);
5181 tree rhs = gimple_cond_rhs (cond_stmt);
5182 if (cfg_edge->flags & EDGE_FALSE_VALUE)
5183 op = invert_tree_comparison (op, false /* honor_nans */);
84fb3546 5184 return add_constraint (lhs, op, rhs, ctxt, out);
757bf1df
DM
5185}
5186
5187/* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
5188 for the edge to be taken.
5189
5190 If they are feasible, add the constraints and return true.
5191
5192 Return false if the constraints contradict existing knowledge
84fb3546
DM
5193 (and so the edge should not be taken).
5194 When returning false, if OUT is non-NULL, write a new rejected_constraint
5195 to it. */
757bf1df
DM
5196
5197bool
5198region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
5199 const gswitch *switch_stmt,
84fb3546
DM
5200 region_model_context *ctxt,
5201 rejected_constraint **out)
757bf1df 5202{
8ca7fa84
DM
5203 bounded_ranges_manager *ranges_mgr = get_range_manager ();
5204 const bounded_ranges *all_cases_ranges
5205 = ranges_mgr->get_or_create_ranges_for_switch (&edge, switch_stmt);
757bf1df 5206 tree index = gimple_switch_index (switch_stmt);
8ca7fa84
DM
5207 const svalue *index_sval = get_rvalue (index, ctxt);
5208 bool sat = m_constraints->add_bounded_ranges (index_sval, all_cases_ranges);
5209 if (!sat && out)
5210 *out = new rejected_ranges_constraint (*this, index, all_cases_ranges);
2c044ff1
DM
5211 if (sat && ctxt && !all_cases_ranges->empty_p ())
5212 ctxt->on_bounded_ranges (*index_sval, *all_cases_ranges);
8ca7fa84 5213 return sat;
757bf1df
DM
5214}
5215
1690a839
DM
5216/* Apply any constraints due to an exception being thrown at LAST_STMT.
5217
5218 If they are feasible, add the constraints and return true.
5219
5220 Return false if the constraints contradict existing knowledge
84fb3546
DM
5221 (and so the edge should not be taken).
5222 When returning false, if OUT is non-NULL, write a new rejected_constraint
5223 to it. */
1690a839
DM
5224
5225bool
5226region_model::apply_constraints_for_exception (const gimple *last_stmt,
84fb3546
DM
5227 region_model_context *ctxt,
5228 rejected_constraint **out)
1690a839
DM
5229{
5230 gcc_assert (last_stmt);
5231 if (const gcall *call = dyn_cast <const gcall *> (last_stmt))
5232 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
5233 if (is_named_call_p (callee_fndecl, "operator new", call, 1)
5234 || is_named_call_p (callee_fndecl, "operator new []", call, 1))
5235 {
5236 /* We have an exception thrown from operator new.
5237 Add a constraint that the result was NULL, to avoid a false
5238 leak report due to the result being lost when following
5239 the EH edge. */
5240 if (tree lhs = gimple_call_lhs (call))
84fb3546 5241 return add_constraint (lhs, EQ_EXPR, null_pointer_node, ctxt, out);
1690a839
DM
5242 return true;
5243 }
5244 return true;
5245}
5246
808f4dfe
DM
5247/* For use with push_frame when handling a top-level call within the analysis.
5248 PARAM has a defined but unknown initial value.
5249 Anything it points to has escaped, since the calling context "knows"
5250 the pointer, and thus calls to unknown functions could read/write into
5251 the region. */
757bf1df
DM
5252
5253void
808f4dfe 5254region_model::on_top_level_param (tree param,
3a25f345 5255 region_model_context *ctxt)
757bf1df 5256{
808f4dfe 5257 if (POINTER_TYPE_P (TREE_TYPE (param)))
5eae0ac7 5258 {
808f4dfe
DM
5259 const region *param_reg = get_lvalue (param, ctxt);
5260 const svalue *init_ptr_sval
5261 = m_mgr->get_or_create_initial_value (param_reg);
5262 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
5263 m_store.mark_as_escaped (pointee_reg);
5eae0ac7 5264 }
757bf1df
DM
5265}
5266
808f4dfe
DM
5267/* Update this region_model to reflect pushing a frame onto the stack
5268 for a call to FUN.
757bf1df 5269
808f4dfe
DM
5270 If ARG_SVALS is non-NULL, use it to populate the parameters
5271 in the new frame.
5272 Otherwise, the params have their initial_svalues.
757bf1df 5273
808f4dfe 5274 Return the frame_region for the new frame. */
757bf1df 5275
808f4dfe
DM
5276const region *
5277region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
5278 region_model_context *ctxt)
757bf1df 5279{
808f4dfe
DM
5280 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
5281 if (arg_svals)
757bf1df 5282 {
808f4dfe
DM
5283 /* Arguments supplied from a caller frame. */
5284 tree fndecl = fun->decl;
5285 unsigned idx = 0;
5286 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
5287 iter_parm = DECL_CHAIN (iter_parm), ++idx)
757bf1df 5288 {
808f4dfe
DM
5289 /* If there's a mismatching declaration, the call stmt might
5290 not have enough args. Handle this case by leaving the
5291 rest of the params as uninitialized. */
5292 if (idx >= arg_svals->length ())
5293 break;
294b6da2
DM
5294 tree parm_lval = iter_parm;
5295 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
5296 parm_lval = parm_default_ssa;
5297 const region *parm_reg = get_lvalue (parm_lval, ctxt);
808f4dfe 5298 const svalue *arg_sval = (*arg_svals)[idx];
808f4dfe 5299 set_value (parm_reg, arg_sval, ctxt);
757bf1df 5300 }
2402dc6b
DM
5301
5302 /* Handle any variadic args. */
5303 unsigned va_arg_idx = 0;
5304 for (; idx < arg_svals->length (); idx++, va_arg_idx++)
5305 {
5306 const svalue *arg_sval = (*arg_svals)[idx];
5307 const region *var_arg_reg
5308 = m_mgr->get_var_arg_region (m_current_frame,
5309 va_arg_idx);
5310 set_value (var_arg_reg, arg_sval, ctxt);
5311 }
757bf1df 5312 }
808f4dfe 5313 else
757bf1df 5314 {
808f4dfe
DM
5315 /* Otherwise we have a top-level call within the analysis. The params
5316 have defined but unknown initial values.
5317 Anything they point to has escaped. */
5318 tree fndecl = fun->decl;
5319 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
5320 iter_parm = DECL_CHAIN (iter_parm))
757bf1df 5321 {
294b6da2 5322 if (tree parm_default_ssa = ssa_default_def (fun, iter_parm))
808f4dfe 5323 on_top_level_param (parm_default_ssa, ctxt);
294b6da2
DM
5324 else
5325 on_top_level_param (iter_parm, ctxt);
757bf1df
DM
5326 }
5327 }
757bf1df 5328
808f4dfe 5329 return m_current_frame;
757bf1df
DM
5330}
5331
808f4dfe
DM
5332/* Get the function of the top-most frame in this region_model's stack.
5333 There must be such a frame. */
757bf1df 5334
808f4dfe
DM
5335function *
5336region_model::get_current_function () const
757bf1df 5337{
808f4dfe
DM
5338 const frame_region *frame = get_current_frame ();
5339 gcc_assert (frame);
5340 return frame->get_function ();
757bf1df
DM
5341}
5342
808f4dfe 5343/* Pop the topmost frame_region from this region_model's stack;
757bf1df 5344
4cebae09
DM
5345 If RESULT_LVALUE is non-null, copy any return value from the frame
5346 into the corresponding region (evaluated with respect to the *caller*
5347 frame, rather than the called frame).
808f4dfe
DM
5348 If OUT_RESULT is non-null, copy any return value from the frame
5349 into *OUT_RESULT.
757bf1df 5350
808f4dfe
DM
5351 Purge the frame region and all its descendent regions.
5352 Convert any pointers that point into such regions into
5353 POISON_KIND_POPPED_STACK svalues. */
757bf1df 5354
808f4dfe 5355void
4cebae09 5356region_model::pop_frame (tree result_lvalue,
808f4dfe
DM
5357 const svalue **out_result,
5358 region_model_context *ctxt)
5359{
5360 gcc_assert (m_current_frame);
757bf1df 5361
808f4dfe
DM
5362 /* Evaluate the result, within the callee frame. */
5363 const frame_region *frame_reg = m_current_frame;
5364 tree fndecl = m_current_frame->get_function ()->decl;
5365 tree result = DECL_RESULT (fndecl);
4cebae09 5366 const svalue *retval = NULL;
808f4dfe
DM
5367 if (result && TREE_TYPE (result) != void_type_node)
5368 {
4cebae09 5369 retval = get_rvalue (result, ctxt);
808f4dfe 5370 if (out_result)
13ad6d9f 5371 *out_result = retval;
808f4dfe 5372 }
757bf1df 5373
808f4dfe
DM
5374 /* Pop the frame. */
5375 m_current_frame = m_current_frame->get_calling_frame ();
757bf1df 5376
4cebae09
DM
5377 if (result_lvalue && retval)
5378 {
5379 /* Compute result_dst_reg using RESULT_LVALUE *after* popping
5380 the frame, but before poisoning pointers into the old frame. */
5381 const region *result_dst_reg = get_lvalue (result_lvalue, ctxt);
5382 set_value (result_dst_reg, retval, ctxt);
5383 }
5384
808f4dfe 5385 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
757bf1df
DM
5386}
5387
808f4dfe 5388/* Get the number of frames in this region_model's stack. */
757bf1df 5389
808f4dfe
DM
5390int
5391region_model::get_stack_depth () const
757bf1df 5392{
808f4dfe
DM
5393 const frame_region *frame = get_current_frame ();
5394 if (frame)
5395 return frame->get_stack_depth ();
5396 else
5397 return 0;
757bf1df
DM
5398}
5399
808f4dfe
DM
5400/* Get the frame_region with the given index within the stack.
5401 The frame_region must exist. */
757bf1df 5402
808f4dfe
DM
5403const frame_region *
5404region_model::get_frame_at_index (int index) const
757bf1df 5405{
808f4dfe
DM
5406 const frame_region *frame = get_current_frame ();
5407 gcc_assert (frame);
5408 gcc_assert (index >= 0);
5409 gcc_assert (index <= frame->get_index ());
5410 while (index != frame->get_index ())
5411 {
5412 frame = frame->get_calling_frame ();
5413 gcc_assert (frame);
5414 }
5415 return frame;
757bf1df
DM
5416}
5417
808f4dfe
DM
5418/* Unbind svalues for any regions in REG and below.
5419 Find any pointers to such regions; convert them to
9a2c9579
DM
5420 poisoned values of kind PKIND.
5421 Also purge any dynamic extents. */
757bf1df 5422
808f4dfe
DM
5423void
5424region_model::unbind_region_and_descendents (const region *reg,
5425 enum poison_kind pkind)
757bf1df 5426{
808f4dfe
DM
5427 /* Gather a set of base regions to be unbound. */
5428 hash_set<const region *> base_regs;
5429 for (store::cluster_map_t::iterator iter = m_store.begin ();
5430 iter != m_store.end (); ++iter)
757bf1df 5431 {
808f4dfe
DM
5432 const region *iter_base_reg = (*iter).first;
5433 if (iter_base_reg->descendent_of_p (reg))
5434 base_regs.add (iter_base_reg);
757bf1df 5435 }
808f4dfe
DM
5436 for (hash_set<const region *>::iterator iter = base_regs.begin ();
5437 iter != base_regs.end (); ++iter)
5438 m_store.purge_cluster (*iter);
757bf1df 5439
808f4dfe
DM
5440 /* Find any pointers to REG or its descendents; convert to poisoned. */
5441 poison_any_pointers_to_descendents (reg, pkind);
9a2c9579
DM
5442
5443 /* Purge dynamic extents of any base regions in REG and below
5444 (e.g. VLAs and alloca stack regions). */
5445 for (auto iter : m_dynamic_extents)
5446 {
5447 const region *iter_reg = iter.first;
5448 if (iter_reg->descendent_of_p (reg))
5449 unset_dynamic_extents (iter_reg);
5450 }
757bf1df
DM
5451}
5452
808f4dfe
DM
5453/* Implementation of BindingVisitor.
5454 Update the bound svalues for regions below REG to use poisoned
5455 values instead. */
757bf1df 5456
808f4dfe 5457struct bad_pointer_finder
757bf1df 5458{
808f4dfe
DM
5459 bad_pointer_finder (const region *reg, enum poison_kind pkind,
5460 region_model_manager *mgr)
5461 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
5462 {}
757bf1df 5463
808f4dfe
DM
5464 void on_binding (const binding_key *, const svalue *&sval)
5465 {
5466 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
5467 {
5468 const region *ptr_dst = ptr_sval->get_pointee ();
5469 /* Poison ptrs to descendents of REG, but not to REG itself,
5470 otherwise double-free detection doesn't work (since sm-state
5471 for "free" is stored on the original ptr svalue). */
5472 if (ptr_dst->descendent_of_p (m_reg)
5473 && ptr_dst != m_reg)
5474 {
5475 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
5476 sval->get_type ());
5477 ++m_count;
5478 }
5479 }
5480 }
757bf1df 5481
808f4dfe
DM
5482 const region *m_reg;
5483 enum poison_kind m_pkind;
5484 region_model_manager *const m_mgr;
5485 int m_count;
5486};
757bf1df 5487
808f4dfe
DM
5488/* Find any pointers to REG or its descendents; convert them to
5489 poisoned values of kind PKIND.
5490 Return the number of pointers that were poisoned. */
757bf1df 5491
808f4dfe
DM
5492int
5493region_model::poison_any_pointers_to_descendents (const region *reg,
5494 enum poison_kind pkind)
5495{
5496 bad_pointer_finder bv (reg, pkind, m_mgr);
5497 m_store.for_each_binding (bv);
5498 return bv.m_count;
757bf1df
DM
5499}
5500
808f4dfe
DM
5501/* Attempt to merge THIS with OTHER_MODEL, writing the result
5502 to OUT_MODEL. Use POINT to distinguish values created as a
5503 result of merging. */
757bf1df 5504
808f4dfe
DM
5505bool
5506region_model::can_merge_with_p (const region_model &other_model,
5507 const program_point &point,
f573d351
DM
5508 region_model *out_model,
5509 const extrinsic_state *ext_state,
5510 const program_state *state_a,
5511 const program_state *state_b) const
757bf1df 5512{
808f4dfe
DM
5513 gcc_assert (out_model);
5514 gcc_assert (m_mgr == other_model.m_mgr);
5515 gcc_assert (m_mgr == out_model->m_mgr);
757bf1df 5516
808f4dfe
DM
5517 if (m_current_frame != other_model.m_current_frame)
5518 return false;
5519 out_model->m_current_frame = m_current_frame;
757bf1df 5520
f573d351
DM
5521 model_merger m (this, &other_model, point, out_model,
5522 ext_state, state_a, state_b);
757bf1df 5523
808f4dfe
DM
5524 if (!store::can_merge_p (&m_store, &other_model.m_store,
5525 &out_model->m_store, m_mgr->get_store_manager (),
5526 &m))
5527 return false;
5528
9a2c9579
DM
5529 if (!m_dynamic_extents.can_merge_with_p (other_model.m_dynamic_extents,
5530 &out_model->m_dynamic_extents))
5531 return false;
5532
808f4dfe
DM
5533 /* Merge constraints. */
5534 constraint_manager::merge (*m_constraints,
5535 *other_model.m_constraints,
c710051a 5536 out_model->m_constraints);
757bf1df 5537
808f4dfe 5538 return true;
757bf1df
DM
5539}
5540
5541/* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
5542 otherwise. */
5543
5544tree
5545region_model::get_fndecl_for_call (const gcall *call,
5546 region_model_context *ctxt)
5547{
5548 tree fn_ptr = gimple_call_fn (call);
5549 if (fn_ptr == NULL_TREE)
5550 return NULL_TREE;
808f4dfe
DM
5551 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
5552 if (const region_svalue *fn_ptr_ptr
5553 = fn_ptr_sval->dyn_cast_region_svalue ())
757bf1df 5554 {
808f4dfe
DM
5555 const region *reg = fn_ptr_ptr->get_pointee ();
5556 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
757bf1df 5557 {
808f4dfe 5558 tree fn_decl = fn_reg->get_fndecl ();
0ba70d1b
DM
5559 cgraph_node *node = cgraph_node::get (fn_decl);
5560 if (!node)
5561 return NULL_TREE;
5562 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
91f993b7
DM
5563 if (ultimate_node)
5564 return ultimate_node->decl;
757bf1df
DM
5565 }
5566 }
5567
5568 return NULL_TREE;
5569}
5570
808f4dfe 5571/* Would be much simpler to use a lambda here, if it were supported. */
757bf1df 5572
faacafd2 5573struct append_regions_cb_data
757bf1df 5574{
808f4dfe
DM
5575 const region_model *model;
5576 auto_vec<const decl_region *> *out;
5577};
757bf1df 5578
faacafd2 5579/* Populate *OUT with all decl_regions in the current
808f4dfe 5580 frame that have clusters within the store. */
757bf1df
DM
5581
5582void
808f4dfe 5583region_model::
faacafd2 5584get_regions_for_current_frame (auto_vec<const decl_region *> *out) const
757bf1df 5585{
faacafd2 5586 append_regions_cb_data data;
808f4dfe
DM
5587 data.model = this;
5588 data.out = out;
faacafd2 5589 m_store.for_each_cluster (append_regions_cb, &data);
757bf1df
DM
5590}
5591
faacafd2 5592/* Implementation detail of get_regions_for_current_frame. */
757bf1df 5593
808f4dfe 5594void
faacafd2
DM
5595region_model::append_regions_cb (const region *base_reg,
5596 append_regions_cb_data *cb_data)
757bf1df 5597{
808f4dfe
DM
5598 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
5599 return;
5600 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
faacafd2 5601 cb_data->out->safe_push (decl_reg);
757bf1df
DM
5602}
5603
c83e9731
TL
5604
5605/* Abstract class for diagnostics related to the use of
5606 floating-point arithmetic where precision is needed. */
5607
5608class imprecise_floating_point_arithmetic : public pending_diagnostic
5609{
5610public:
5611 int get_controlling_option () const final override
5612 {
5613 return OPT_Wanalyzer_imprecise_fp_arithmetic;
5614 }
5615};
5616
5617/* Concrete diagnostic to complain about uses of floating-point arithmetic
5618 in the size argument of malloc etc. */
5619
5620class float_as_size_arg : public imprecise_floating_point_arithmetic
5621{
5622public:
5623 float_as_size_arg (tree arg) : m_arg (arg)
5624 {}
5625
5626 const char *get_kind () const final override
5627 {
5628 return "float_as_size_arg_diagnostic";
5629 }
5630
ac9230fb 5631 bool subclass_equal_p (const pending_diagnostic &other) const final override
c83e9731
TL
5632 {
5633 return same_tree_p (m_arg, ((const float_as_size_arg &) other).m_arg);
5634 }
5635
5636 bool emit (rich_location *rich_loc) final override
5637 {
5638 diagnostic_metadata m;
5639 bool warned = warning_meta (rich_loc, m, get_controlling_option (),
5640 "use of floating-point arithmetic here might"
5641 " yield unexpected results");
5642 if (warned)
5643 inform (rich_loc->get_loc (), "only use operands of an integer type"
5644 " inside the size argument");
5645 return warned;
5646 }
5647
5648 label_text describe_final_event (const evdesc::final_event &ev) final
5649 override
5650 {
5651 if (m_arg)
5652 return ev.formatted_print ("operand %qE is of type %qT",
5653 m_arg, TREE_TYPE (m_arg));
5654 return ev.formatted_print ("at least one operand of the size argument is"
5655 " of a floating-point type");
5656 }
5657
5658private:
5659 tree m_arg;
5660};
5661
5662/* Visitor to find uses of floating-point variables/constants in an svalue. */
5663
5664class contains_floating_point_visitor : public visitor
5665{
5666public:
5667 contains_floating_point_visitor (const svalue *root_sval) : m_result (NULL)
5668 {
5669 root_sval->accept (this);
5670 }
5671
5672 const svalue *get_svalue_to_report ()
5673 {
5674 return m_result;
5675 }
5676
5677 void visit_constant_svalue (const constant_svalue *sval) final override
5678 {
5679 /* At the point the analyzer runs, constant integer operands in a floating
5680 point expression are already implictly converted to floating-points.
5681 Thus, we do prefer to report non-constants such that the diagnostic
5682 always reports a floating-point operand. */
5683 tree type = sval->get_type ();
5684 if (type && FLOAT_TYPE_P (type) && !m_result)
5685 m_result = sval;
5686 }
5687
5688 void visit_conjured_svalue (const conjured_svalue *sval) final override
5689 {
5690 tree type = sval->get_type ();
5691 if (type && FLOAT_TYPE_P (type))
5692 m_result = sval;
5693 }
5694
5695 void visit_initial_svalue (const initial_svalue *sval) final override
5696 {
5697 tree type = sval->get_type ();
5698 if (type && FLOAT_TYPE_P (type))
5699 m_result = sval;
5700 }
5701
5702private:
5703 /* Non-null if at least one floating-point operand was found. */
5704 const svalue *m_result;
5705};
5706
5707/* May complain about uses of floating-point operands in SIZE_IN_BYTES. */
5708
5709void
5710region_model::check_dynamic_size_for_floats (const svalue *size_in_bytes,
5711 region_model_context *ctxt) const
5712{
5713 gcc_assert (ctxt);
5714
5715 contains_floating_point_visitor v (size_in_bytes);
5716 if (const svalue *float_sval = v.get_svalue_to_report ())
5717 {
5718 tree diag_arg = get_representative_tree (float_sval);
5719 ctxt->warn (new float_as_size_arg (diag_arg));
5720 }
5721}
5722
b9365b93
DM
5723/* Return a new region describing a heap-allocated block of memory.
5724 Use CTXT to complain about tainted sizes. */
757bf1df 5725
808f4dfe 5726const region *
b9365b93
DM
5727region_model::create_region_for_heap_alloc (const svalue *size_in_bytes,
5728 region_model_context *ctxt)
757bf1df 5729{
808f4dfe 5730 const region *reg = m_mgr->create_region_for_heap_alloc ();
ea4e3218 5731 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
b9365b93 5732 set_dynamic_extents (reg, size_in_bytes, ctxt);
808f4dfe 5733 return reg;
757bf1df
DM
5734}
5735
808f4dfe 5736/* Return a new region describing a block of memory allocated within the
b9365b93
DM
5737 current frame.
5738 Use CTXT to complain about tainted sizes. */
757bf1df 5739
808f4dfe 5740const region *
b9365b93
DM
5741region_model::create_region_for_alloca (const svalue *size_in_bytes,
5742 region_model_context *ctxt)
757bf1df 5743{
808f4dfe 5744 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
ea4e3218 5745 if (compat_types_p (size_in_bytes->get_type (), size_type_node))
b9365b93 5746 set_dynamic_extents (reg, size_in_bytes, ctxt);
808f4dfe 5747 return reg;
757bf1df
DM
5748}
5749
b9365b93
DM
5750/* Record that the size of REG is SIZE_IN_BYTES.
5751 Use CTXT to complain about tainted sizes. */
757bf1df
DM
5752
5753void
9a2c9579 5754region_model::set_dynamic_extents (const region *reg,
b9365b93
DM
5755 const svalue *size_in_bytes,
5756 region_model_context *ctxt)
9a2c9579
DM
5757{
5758 assert_compat_types (size_in_bytes->get_type (), size_type_node);
b9365b93 5759 if (ctxt)
c83e9731
TL
5760 {
5761 check_dynamic_size_for_taint (reg->get_memory_space (), size_in_bytes,
5762 ctxt);
5763 check_dynamic_size_for_floats (size_in_bytes, ctxt);
5764 }
9a2c9579
DM
5765 m_dynamic_extents.put (reg, size_in_bytes);
5766}
5767
5768/* Get the recording of REG in bytes, or NULL if no dynamic size was
5769 recorded. */
5770
5771const svalue *
5772region_model::get_dynamic_extents (const region *reg) const
757bf1df 5773{
9a2c9579
DM
5774 if (const svalue * const *slot = m_dynamic_extents.get (reg))
5775 return *slot;
5776 return NULL;
5777}
5778
5779/* Unset any recorded dynamic size of REG. */
5780
5781void
5782region_model::unset_dynamic_extents (const region *reg)
5783{
5784 m_dynamic_extents.remove (reg);
757bf1df
DM
5785}
5786
eafa9d96
DM
5787/* class noop_region_model_context : public region_model_context. */
5788
c65d3c7f
DM
5789void
5790noop_region_model_context::add_note (pending_note *pn)
5791{
5792 delete pn;
5793}
5794
eafa9d96
DM
5795void
5796noop_region_model_context::bifurcate (custom_edge_info *info)
5797{
5798 delete info;
5799}
5800
5801void
5802noop_region_model_context::terminate_path ()
5803{
5804}
5805
808f4dfe 5806/* struct model_merger. */
757bf1df 5807
808f4dfe 5808/* Dump a multiline representation of this merger to PP. */
757bf1df
DM
5809
5810void
808f4dfe 5811model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
757bf1df 5812{
808f4dfe
DM
5813 pp_string (pp, "model A:");
5814 pp_newline (pp);
5815 m_model_a->dump_to_pp (pp, simple, true);
5816 pp_newline (pp);
757bf1df 5817
808f4dfe 5818 pp_string (pp, "model B:");
757bf1df 5819 pp_newline (pp);
808f4dfe 5820 m_model_b->dump_to_pp (pp, simple, true);
757bf1df
DM
5821 pp_newline (pp);
5822
808f4dfe 5823 pp_string (pp, "merged model:");
757bf1df 5824 pp_newline (pp);
808f4dfe 5825 m_merged_model->dump_to_pp (pp, simple, true);
757bf1df
DM
5826 pp_newline (pp);
5827}
5828
808f4dfe 5829/* Dump a multiline representation of this merger to FILE. */
757bf1df
DM
5830
5831void
808f4dfe 5832model_merger::dump (FILE *fp, bool simple) const
757bf1df
DM
5833{
5834 pretty_printer pp;
5835 pp_format_decoder (&pp) = default_tree_printer;
5836 pp_show_color (&pp) = pp_show_color (global_dc->printer);
5837 pp.buffer->stream = fp;
808f4dfe 5838 dump_to_pp (&pp, simple);
757bf1df
DM
5839 pp_flush (&pp);
5840}
5841
808f4dfe 5842/* Dump a multiline representation of this merger to stderr. */
757bf1df
DM
5843
5844DEBUG_FUNCTION void
808f4dfe 5845model_merger::dump (bool simple) const
757bf1df 5846{
808f4dfe 5847 dump (stderr, simple);
757bf1df
DM
5848}
5849
f573d351
DM
5850/* Return true if it's OK to merge SVAL with other svalues. */
5851
5852bool
5853model_merger::mergeable_svalue_p (const svalue *sval) const
5854{
5855 if (m_ext_state)
5856 {
5857 /* Reject merging svalues that have non-purgable sm-state,
5858 to avoid falsely reporting memory leaks by merging them
5859 with something else. For example, given a local var "p",
5860 reject the merger of a:
5861 store_a mapping "p" to a malloc-ed ptr
5862 with:
5863 store_b mapping "p" to a NULL ptr. */
5864 if (m_state_a)
5865 if (!m_state_a->can_purge_p (*m_ext_state, sval))
5866 return false;
5867 if (m_state_b)
5868 if (!m_state_b->can_purge_p (*m_ext_state, sval))
5869 return false;
5870 }
5871 return true;
5872}
5873
75038aa6
DM
5874} // namespace ana
5875
808f4dfe 5876/* Dump RMODEL fully to stderr (i.e. without summarization). */
757bf1df 5877
808f4dfe
DM
5878DEBUG_FUNCTION void
5879debug (const region_model &rmodel)
757bf1df 5880{
808f4dfe 5881 rmodel.dump (false);
757bf1df
DM
5882}
5883
8ca7fa84 5884/* class rejected_op_constraint : public rejected_constraint. */
84fb3546
DM
5885
5886void
8ca7fa84 5887rejected_op_constraint::dump_to_pp (pretty_printer *pp) const
84fb3546
DM
5888{
5889 region_model m (m_model);
5890 const svalue *lhs_sval = m.get_rvalue (m_lhs, NULL);
5891 const svalue *rhs_sval = m.get_rvalue (m_rhs, NULL);
5892 lhs_sval->dump_to_pp (pp, true);
5893 pp_printf (pp, " %s ", op_symbol_code (m_op));
5894 rhs_sval->dump_to_pp (pp, true);
5895}
5896
8ca7fa84
DM
5897/* class rejected_ranges_constraint : public rejected_constraint. */
5898
5899void
5900rejected_ranges_constraint::dump_to_pp (pretty_printer *pp) const
5901{
5902 region_model m (m_model);
5903 const svalue *sval = m.get_rvalue (m_expr, NULL);
5904 sval->dump_to_pp (pp, true);
5905 pp_string (pp, " in ");
5906 m_ranges->dump_to_pp (pp, true);
5907}
5908
808f4dfe 5909/* class engine. */
757bf1df 5910
11a2ff8d
DM
5911/* engine's ctor. */
5912
4cebae09
DM
5913engine::engine (const supergraph *sg, logger *logger)
5914: m_sg (sg), m_mgr (logger)
11a2ff8d
DM
5915{
5916}
5917
808f4dfe 5918/* Dump the managed objects by class to LOGGER, and the per-class totals. */
757bf1df 5919
808f4dfe
DM
5920void
5921engine::log_stats (logger *logger) const
757bf1df 5922{
808f4dfe 5923 m_mgr.log_stats (logger, true);
757bf1df
DM
5924}
5925
75038aa6
DM
5926namespace ana {
5927
757bf1df
DM
5928#if CHECKING_P
5929
5930namespace selftest {
5931
8c08c983
DM
5932/* Build a constant tree of the given type from STR. */
5933
5934static tree
5935build_real_cst_from_string (tree type, const char *str)
5936{
5937 REAL_VALUE_TYPE real;
5938 real_from_string (&real, str);
5939 return build_real (type, real);
5940}
5941
5942/* Append various "interesting" constants to OUT (e.g. NaN). */
5943
5944static void
5945append_interesting_constants (auto_vec<tree> *out)
5946{
5947 out->safe_push (build_int_cst (integer_type_node, 0));
5948 out->safe_push (build_int_cst (integer_type_node, 42));
5949 out->safe_push (build_int_cst (unsigned_type_node, 0));
5950 out->safe_push (build_int_cst (unsigned_type_node, 42));
5951 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
5952 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
5953 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
5954 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
5955 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
5956 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
5957 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
5958 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
5959}
5960
5961/* Verify that tree_cmp is a well-behaved comparator for qsort, even
5962 if the underlying constants aren't comparable. */
5963
5964static void
5965test_tree_cmp_on_constants ()
5966{
5967 auto_vec<tree> csts;
5968 append_interesting_constants (&csts);
5969
5970 /* Try sorting every triple. */
5971 const unsigned num = csts.length ();
5972 for (unsigned i = 0; i < num; i++)
5973 for (unsigned j = 0; j < num; j++)
5974 for (unsigned k = 0; k < num; k++)
5975 {
5976 auto_vec<tree> v (3);
5977 v.quick_push (csts[i]);
5978 v.quick_push (csts[j]);
5979 v.quick_push (csts[k]);
5980 v.qsort (tree_cmp);
5981 }
5982}
5983
757bf1df
DM
5984/* Implementation detail of the ASSERT_CONDITION_* macros. */
5985
808f4dfe
DM
5986void
5987assert_condition (const location &loc,
5988 region_model &model,
5989 const svalue *lhs, tree_code op, const svalue *rhs,
5990 tristate expected)
5991{
5992 tristate actual = model.eval_condition (lhs, op, rhs);
5993 ASSERT_EQ_AT (loc, actual, expected);
5994}
5995
5996/* Implementation detail of the ASSERT_CONDITION_* macros. */
5997
757bf1df
DM
5998void
5999assert_condition (const location &loc,
6000 region_model &model,
6001 tree lhs, tree_code op, tree rhs,
6002 tristate expected)
6003{
6004 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
6005 ASSERT_EQ_AT (loc, actual, expected);
6006}
6007
90f7c300
DM
6008/* Implementation detail of ASSERT_DUMP_TREE_EQ. */
6009
6010static void
6011assert_dump_tree_eq (const location &loc, tree t, const char *expected)
6012{
6013 auto_fix_quotes sentinel;
6014 pretty_printer pp;
6015 pp_format_decoder (&pp) = default_tree_printer;
6016 dump_tree (&pp, t);
6017 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
6018}
6019
6020/* Assert that dump_tree (T) is EXPECTED. */
6021
6022#define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
6023 SELFTEST_BEGIN_STMT \
6024 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
6025 SELFTEST_END_STMT
6026
757bf1df
DM
6027/* Implementation detail of ASSERT_DUMP_EQ. */
6028
6029static void
6030assert_dump_eq (const location &loc,
6031 const region_model &model,
6032 bool summarize,
6033 const char *expected)
6034{
6035 auto_fix_quotes sentinel;
6036 pretty_printer pp;
6037 pp_format_decoder (&pp) = default_tree_printer;
808f4dfe
DM
6038
6039 model.dump_to_pp (&pp, summarize, true);
757bf1df
DM
6040 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
6041}
6042
6043/* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
6044
6045#define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
6046 SELFTEST_BEGIN_STMT \
6047 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
6048 SELFTEST_END_STMT
6049
6050/* Smoketest for region_model::dump_to_pp. */
6051
6052static void
6053test_dump ()
6054{
808f4dfe
DM
6055 region_model_manager mgr;
6056 region_model model (&mgr);
757bf1df
DM
6057
6058 ASSERT_DUMP_EQ (model, false,
808f4dfe
DM
6059 "stack depth: 0\n"
6060 "m_called_unknown_fn: FALSE\n"
6061 "constraint_manager:\n"
6062 " equiv classes:\n"
6063 " constraints:\n");
6064 ASSERT_DUMP_EQ (model, true,
6065 "stack depth: 0\n"
6066 "m_called_unknown_fn: FALSE\n"
6067 "constraint_manager:\n"
757bf1df
DM
6068 " equiv classes:\n"
6069 " constraints:\n");
757bf1df
DM
6070}
6071
884d9141
DM
6072/* Helper function for selftests. Create a struct or union type named NAME,
6073 with the fields given by the FIELD_DECLS in FIELDS.
6074 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
6075 create a UNION_TYPE. */
6076
6077static tree
6078make_test_compound_type (const char *name, bool is_struct,
6079 const auto_vec<tree> *fields)
6080{
6081 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
6082 TYPE_NAME (t) = get_identifier (name);
6083 TYPE_SIZE (t) = 0;
6084
6085 tree fieldlist = NULL;
6086 int i;
6087 tree field;
6088 FOR_EACH_VEC_ELT (*fields, i, field)
6089 {
6090 gcc_assert (TREE_CODE (field) == FIELD_DECL);
6091 DECL_CONTEXT (field) = t;
6092 fieldlist = chainon (field, fieldlist);
6093 }
6094 fieldlist = nreverse (fieldlist);
6095 TYPE_FIELDS (t) = fieldlist;
6096
6097 layout_type (t);
6098 return t;
6099}
6100
a96f1c38
DM
6101/* Selftest fixture for creating the type "struct coord {int x; int y; };". */
6102
6103struct coord_test
6104{
6105 coord_test ()
6106 {
6107 auto_vec<tree> fields;
6108 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
6109 get_identifier ("x"), integer_type_node);
6110 fields.safe_push (m_x_field);
6111 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
6112 get_identifier ("y"), integer_type_node);
6113 fields.safe_push (m_y_field);
6114 m_coord_type = make_test_compound_type ("coord", true, &fields);
6115 }
6116
6117 tree m_x_field;
6118 tree m_y_field;
6119 tree m_coord_type;
6120};
6121
808f4dfe 6122/* Verify usage of a struct. */
884d9141
DM
6123
6124static void
808f4dfe 6125test_struct ()
884d9141 6126{
a96f1c38
DM
6127 coord_test ct;
6128
6129 tree c = build_global_decl ("c", ct.m_coord_type);
6130 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6131 c, ct.m_x_field, NULL_TREE);
6132 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6133 c, ct.m_y_field, NULL_TREE);
884d9141
DM
6134
6135 tree int_17 = build_int_cst (integer_type_node, 17);
6136 tree int_m3 = build_int_cst (integer_type_node, -3);
6137
808f4dfe
DM
6138 region_model_manager mgr;
6139 region_model model (&mgr);
884d9141
DM
6140 model.set_value (c_x, int_17, NULL);
6141 model.set_value (c_y, int_m3, NULL);
6142
808f4dfe
DM
6143 /* Verify get_offset for "c.x". */
6144 {
6145 const region *c_x_reg = model.get_lvalue (c_x, NULL);
7a6564c9 6146 region_offset offset = c_x_reg->get_offset (&mgr);
808f4dfe
DM
6147 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
6148 ASSERT_EQ (offset.get_bit_offset (), 0);
6149 }
6150
6151 /* Verify get_offset for "c.y". */
6152 {
6153 const region *c_y_reg = model.get_lvalue (c_y, NULL);
7a6564c9 6154 region_offset offset = c_y_reg->get_offset (&mgr);
808f4dfe
DM
6155 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
6156 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
6157 }
884d9141
DM
6158}
6159
808f4dfe 6160/* Verify usage of an array element. */
884d9141
DM
6161
6162static void
808f4dfe 6163test_array_1 ()
884d9141
DM
6164{
6165 tree tlen = size_int (10);
6166 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
6167
6168 tree a = build_global_decl ("a", arr_type);
6169
808f4dfe
DM
6170 region_model_manager mgr;
6171 region_model model (&mgr);
884d9141
DM
6172 tree int_0 = build_int_cst (integer_type_node, 0);
6173 tree a_0 = build4 (ARRAY_REF, char_type_node,
6174 a, int_0, NULL_TREE, NULL_TREE);
6175 tree char_A = build_int_cst (char_type_node, 'A');
6176 model.set_value (a_0, char_A, NULL);
884d9141
DM
6177}
6178
90f7c300
DM
6179/* Verify that region_model::get_representative_tree works as expected. */
6180
6181static void
6182test_get_representative_tree ()
6183{
808f4dfe
DM
6184 region_model_manager mgr;
6185
90f7c300
DM
6186 /* STRING_CST. */
6187 {
6188 tree string_cst = build_string (4, "foo");
808f4dfe
DM
6189 region_model m (&mgr);
6190 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
6191 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
6192 ASSERT_EQ (rep, string_cst);
6193 }
6194
6195 /* String literal. */
6196 {
6197 tree string_cst_ptr = build_string_literal (4, "foo");
808f4dfe
DM
6198 region_model m (&mgr);
6199 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
6200 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
6201 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
6202 }
808f4dfe
DM
6203
6204 /* Value of an element within an array. */
6205 {
6206 tree tlen = size_int (10);
6207 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
6208 tree a = build_global_decl ("a", arr_type);
6209 placeholder_svalue test_sval (char_type_node, "test value");
6210
6211 /* Value of a[3]. */
6212 {
6213 test_region_model_context ctxt;
6214 region_model model (&mgr);
6215 tree int_3 = build_int_cst (integer_type_node, 3);
6216 tree a_3 = build4 (ARRAY_REF, char_type_node,
6217 a, int_3, NULL_TREE, NULL_TREE);
6218 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
6219 model.set_value (a_3_reg, &test_sval, &ctxt);
6220 tree rep = model.get_representative_tree (&test_sval);
6221 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
6222 }
6223
6224 /* Value of a[0]. */
6225 {
6226 test_region_model_context ctxt;
6227 region_model model (&mgr);
6228 tree idx = build_int_cst (integer_type_node, 0);
6229 tree a_0 = build4 (ARRAY_REF, char_type_node,
6230 a, idx, NULL_TREE, NULL_TREE);
6231 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
6232 model.set_value (a_0_reg, &test_sval, &ctxt);
6233 tree rep = model.get_representative_tree (&test_sval);
6234 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
6235 }
6236 }
6237
6238 /* Value of a field within a struct. */
6239 {
6240 coord_test ct;
6241
6242 tree c = build_global_decl ("c", ct.m_coord_type);
6243 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6244 c, ct.m_x_field, NULL_TREE);
6245 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6246 c, ct.m_y_field, NULL_TREE);
6247
6248 test_region_model_context ctxt;
6249
6250 /* Value of initial field. */
6251 {
6252 region_model m (&mgr);
6253 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
6254 placeholder_svalue test_sval_x (integer_type_node, "test x val");
6255 m.set_value (c_x_reg, &test_sval_x, &ctxt);
6256 tree rep = m.get_representative_tree (&test_sval_x);
6257 ASSERT_DUMP_TREE_EQ (rep, "c.x");
6258 }
6259
6260 /* Value of non-initial field. */
6261 {
6262 region_model m (&mgr);
6263 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
6264 placeholder_svalue test_sval_y (integer_type_node, "test y val");
6265 m.set_value (c_y_reg, &test_sval_y, &ctxt);
6266 tree rep = m.get_representative_tree (&test_sval_y);
6267 ASSERT_DUMP_TREE_EQ (rep, "c.y");
6268 }
6269 }
90f7c300
DM
6270}
6271
757bf1df 6272/* Verify that calling region_model::get_rvalue repeatedly on the same
808f4dfe 6273 tree constant retrieves the same svalue *. */
757bf1df
DM
6274
6275static void
6276test_unique_constants ()
6277{
6278 tree int_0 = build_int_cst (integer_type_node, 0);
6279 tree int_42 = build_int_cst (integer_type_node, 42);
6280
6281 test_region_model_context ctxt;
808f4dfe
DM
6282 region_model_manager mgr;
6283 region_model model (&mgr);
757bf1df
DM
6284 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
6285 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
6286 model.get_rvalue (int_42, &ctxt));
6287 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
6288 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
757bf1df 6289
808f4dfe
DM
6290 /* A "(const int)42" will be a different tree from "(int)42)"... */
6291 tree const_int_type_node
6292 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
6293 tree const_int_42 = build_int_cst (const_int_type_node, 42);
6294 ASSERT_NE (int_42, const_int_42);
6295 /* It should have a different const_svalue. */
6296 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
6297 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
6298 ASSERT_NE (int_42_sval, const_int_42_sval);
6299 /* But they should compare as equal. */
6300 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
6301 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
757bf1df
DM
6302}
6303
808f4dfe
DM
6304/* Verify that each type gets its own singleton unknown_svalue within a
6305 region_model_manager, and that NULL_TREE gets its own singleton. */
757bf1df
DM
6306
6307static void
808f4dfe 6308test_unique_unknowns ()
757bf1df 6309{
808f4dfe
DM
6310 region_model_manager mgr;
6311 const svalue *unknown_int
6312 = mgr.get_or_create_unknown_svalue (integer_type_node);
6313 /* Repeated calls with the same type should get the same "unknown"
6314 svalue. */
6315 const svalue *unknown_int_2
6316 = mgr.get_or_create_unknown_svalue (integer_type_node);
6317 ASSERT_EQ (unknown_int, unknown_int_2);
757bf1df 6318
808f4dfe
DM
6319 /* Different types (or the NULL type) should have different
6320 unknown_svalues. */
6321 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
6322 ASSERT_NE (unknown_NULL_type, unknown_int);
757bf1df 6323
808f4dfe
DM
6324 /* Repeated calls with NULL for the type should get the same "unknown"
6325 svalue. */
6326 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
6327 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
757bf1df
DM
6328}
6329
808f4dfe 6330/* Verify that initial_svalue are handled as expected. */
757bf1df 6331
808f4dfe
DM
6332static void
6333test_initial_svalue_folding ()
757bf1df 6334{
808f4dfe
DM
6335 region_model_manager mgr;
6336 tree x = build_global_decl ("x", integer_type_node);
6337 tree y = build_global_decl ("y", integer_type_node);
757bf1df 6338
808f4dfe
DM
6339 test_region_model_context ctxt;
6340 region_model model (&mgr);
6341 const svalue *x_init = model.get_rvalue (x, &ctxt);
6342 const svalue *y_init = model.get_rvalue (y, &ctxt);
6343 ASSERT_NE (x_init, y_init);
6344 const region *x_reg = model.get_lvalue (x, &ctxt);
6345 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
757bf1df 6346
808f4dfe 6347}
757bf1df 6348
808f4dfe 6349/* Verify that unary ops are folded as expected. */
757bf1df
DM
6350
6351static void
808f4dfe 6352test_unaryop_svalue_folding ()
757bf1df 6353{
808f4dfe 6354 region_model_manager mgr;
757bf1df
DM
6355 tree x = build_global_decl ("x", integer_type_node);
6356 tree y = build_global_decl ("y", integer_type_node);
6357
808f4dfe
DM
6358 test_region_model_context ctxt;
6359 region_model model (&mgr);
6360 const svalue *x_init = model.get_rvalue (x, &ctxt);
6361 const svalue *y_init = model.get_rvalue (y, &ctxt);
6362 const region *x_reg = model.get_lvalue (x, &ctxt);
6363 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
6364
6365 /* "(int)x" -> "x". */
6366 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
6367
6368 /* "(void *)x" -> something other than "x". */
6369 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
6370
6371 /* "!(x == y)" -> "x != y". */
6372 ASSERT_EQ (mgr.get_or_create_unaryop
6373 (boolean_type_node, TRUTH_NOT_EXPR,
6374 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
6375 x_init, y_init)),
6376 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
6377 x_init, y_init));
6378 /* "!(x > y)" -> "x <= y". */
6379 ASSERT_EQ (mgr.get_or_create_unaryop
6380 (boolean_type_node, TRUTH_NOT_EXPR,
6381 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
6382 x_init, y_init)),
6383 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
6384 x_init, y_init));
6385}
6386
6387/* Verify that binops on constant svalues are folded. */
757bf1df 6388
808f4dfe
DM
6389static void
6390test_binop_svalue_folding ()
6391{
6392#define NUM_CSTS 10
6393 tree cst_int[NUM_CSTS];
6394 region_model_manager mgr;
6395 const svalue *cst_sval[NUM_CSTS];
6396 for (int i = 0; i < NUM_CSTS; i++)
6397 {
6398 cst_int[i] = build_int_cst (integer_type_node, i);
6399 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
6400 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
6401 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
6402 }
757bf1df 6403
808f4dfe
DM
6404 for (int i = 0; i < NUM_CSTS; i++)
6405 for (int j = 0; j < NUM_CSTS; j++)
6406 {
6407 if (i != j)
6408 ASSERT_NE (cst_sval[i], cst_sval[j]);
6409 if (i + j < NUM_CSTS)
6410 {
6411 const svalue *sum
6412 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6413 cst_sval[i], cst_sval[j]);
6414 ASSERT_EQ (sum, cst_sval[i + j]);
6415 }
6416 if (i - j >= 0)
6417 {
6418 const svalue *difference
6419 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
6420 cst_sval[i], cst_sval[j]);
6421 ASSERT_EQ (difference, cst_sval[i - j]);
6422 }
6423 if (i * j < NUM_CSTS)
6424 {
6425 const svalue *product
6426 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6427 cst_sval[i], cst_sval[j]);
6428 ASSERT_EQ (product, cst_sval[i * j]);
6429 }
6430 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
6431 cst_sval[i], cst_sval[j]);
6432 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
6433 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
6434 cst_sval[i], cst_sval[j]);
6435 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
6436 // etc
6437 }
757bf1df 6438
808f4dfe 6439 tree x = build_global_decl ("x", integer_type_node);
757bf1df 6440
808f4dfe
DM
6441 test_region_model_context ctxt;
6442 region_model model (&mgr);
6443 const svalue *x_init = model.get_rvalue (x, &ctxt);
6444
6445 /* PLUS_EXPR folding. */
6446 const svalue *x_init_plus_zero
6447 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6448 x_init, cst_sval[0]);
6449 ASSERT_EQ (x_init_plus_zero, x_init);
6450 const svalue *zero_plus_x_init
6451 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6452 cst_sval[0], x_init);
6453 ASSERT_EQ (zero_plus_x_init, x_init);
6454
6455 /* MULT_EXPR folding. */
6456 const svalue *x_init_times_zero
6457 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6458 x_init, cst_sval[0]);
6459 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
6460 const svalue *zero_times_x_init
6461 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6462 cst_sval[0], x_init);
6463 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
6464
6465 const svalue *x_init_times_one
6466 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6467 x_init, cst_sval[1]);
6468 ASSERT_EQ (x_init_times_one, x_init);
6469 const svalue *one_times_x_init
6470 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6471 cst_sval[1], x_init);
6472 ASSERT_EQ (one_times_x_init, x_init);
6473
6474 // etc
6475 // TODO: do we want to use the match-and-simplify DSL for this?
6476
6477 /* Verify that binops put any constants on the RHS. */
6478 const svalue *four_times_x_init
6479 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6480 cst_sval[4], x_init);
6481 const svalue *x_init_times_four
6482 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
6483 x_init, cst_sval[4]);
6484 ASSERT_EQ (four_times_x_init, x_init_times_four);
6485 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
6486 ASSERT_EQ (binop->get_op (), MULT_EXPR);
6487 ASSERT_EQ (binop->get_arg0 (), x_init);
6488 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
6489
6490 /* Verify that ((x + 1) + 1) == (x + 2). */
6491 const svalue *x_init_plus_one
6492 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6493 x_init, cst_sval[1]);
6494 const svalue *x_init_plus_two
6495 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6496 x_init, cst_sval[2]);
6497 const svalue *x_init_plus_one_plus_one
6498 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
6499 x_init_plus_one, cst_sval[1]);
6500 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
4f34f8cc
DM
6501
6502 /* Verify various binops on booleans. */
6503 {
6504 const svalue *sval_true = mgr.get_or_create_int_cst (boolean_type_node, 1);
6505 const svalue *sval_false = mgr.get_or_create_int_cst (boolean_type_node, 0);
6506 const svalue *sval_unknown
6507 = mgr.get_or_create_unknown_svalue (boolean_type_node);
6508 const placeholder_svalue sval_placeholder (boolean_type_node, "v");
6509 for (auto op : {BIT_IOR_EXPR, TRUTH_OR_EXPR})
6510 {
6511 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6512 sval_true, sval_unknown),
6513 sval_true);
6514 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6515 sval_false, sval_unknown),
6516 sval_unknown);
6517 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6518 sval_false, &sval_placeholder),
6519 &sval_placeholder);
6520 }
6521 for (auto op : {BIT_AND_EXPR, TRUTH_AND_EXPR})
6522 {
6523 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6524 sval_false, sval_unknown),
6525 sval_false);
6526 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6527 sval_true, sval_unknown),
6528 sval_unknown);
6529 ASSERT_EQ (mgr.get_or_create_binop (boolean_type_node, op,
6530 sval_true, &sval_placeholder),
6531 &sval_placeholder);
6532 }
6533 }
808f4dfe
DM
6534}
6535
6536/* Verify that sub_svalues are folded as expected. */
757bf1df 6537
808f4dfe
DM
6538static void
6539test_sub_svalue_folding ()
6540{
6541 coord_test ct;
6542 tree c = build_global_decl ("c", ct.m_coord_type);
6543 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6544 c, ct.m_x_field, NULL_TREE);
757bf1df 6545
808f4dfe
DM
6546 region_model_manager mgr;
6547 region_model model (&mgr);
6548 test_region_model_context ctxt;
6549 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
757bf1df 6550
808f4dfe
DM
6551 /* Verify that sub_svalue of "unknown" simply
6552 yields an unknown. */
757bf1df 6553
808f4dfe
DM
6554 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
6555 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
6556 unknown, c_x_reg);
6557 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
6558 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
757bf1df
DM
6559}
6560
808f4dfe 6561/* Test that region::descendent_of_p works as expected. */
757bf1df
DM
6562
6563static void
808f4dfe 6564test_descendent_of_p ()
757bf1df 6565{
808f4dfe
DM
6566 region_model_manager mgr;
6567 const region *stack = mgr.get_stack_region ();
6568 const region *heap = mgr.get_heap_region ();
6569 const region *code = mgr.get_code_region ();
6570 const region *globals = mgr.get_globals_region ();
757bf1df 6571
808f4dfe
DM
6572 /* descendent_of_p should return true when used on the region itself. */
6573 ASSERT_TRUE (stack->descendent_of_p (stack));
6574 ASSERT_FALSE (stack->descendent_of_p (heap));
6575 ASSERT_FALSE (stack->descendent_of_p (code));
6576 ASSERT_FALSE (stack->descendent_of_p (globals));
757bf1df 6577
808f4dfe
DM
6578 tree x = build_global_decl ("x", integer_type_node);
6579 const region *x_reg = mgr.get_region_for_global (x);
6580 ASSERT_TRUE (x_reg->descendent_of_p (globals));
757bf1df 6581
808f4dfe
DM
6582 /* A cast_region should be a descendent of the original region. */
6583 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
6584 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
757bf1df
DM
6585}
6586
391512ad
DM
6587/* Verify that bit_range_region works as expected. */
6588
6589static void
6590test_bit_range_regions ()
6591{
6592 tree x = build_global_decl ("x", integer_type_node);
6593 region_model_manager mgr;
6594 const region *x_reg = mgr.get_region_for_global (x);
6595 const region *byte0
6596 = mgr.get_bit_range (x_reg, char_type_node, bit_range (0, 8));
6597 const region *byte1
6598 = mgr.get_bit_range (x_reg, char_type_node, bit_range (8, 8));
6599 ASSERT_TRUE (byte0->descendent_of_p (x_reg));
6600 ASSERT_TRUE (byte1->descendent_of_p (x_reg));
6601 ASSERT_NE (byte0, byte1);
6602}
6603
757bf1df
DM
6604/* Verify that simple assignments work as expected. */
6605
6606static void
6607test_assignment ()
6608{
6609 tree int_0 = build_int_cst (integer_type_node, 0);
6610 tree x = build_global_decl ("x", integer_type_node);
6611 tree y = build_global_decl ("y", integer_type_node);
6612
6613 /* "x == 0", then use of y, then "y = 0;". */
808f4dfe
DM
6614 region_model_manager mgr;
6615 region_model model (&mgr);
757bf1df
DM
6616 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
6617 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
6618 model.set_value (model.get_lvalue (y, NULL),
6619 model.get_rvalue (int_0, NULL),
6620 NULL);
6621 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
6622 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
757bf1df
DM
6623}
6624
a96f1c38
DM
6625/* Verify that compound assignments work as expected. */
6626
6627static void
6628test_compound_assignment ()
6629{
6630 coord_test ct;
6631
6632 tree c = build_global_decl ("c", ct.m_coord_type);
6633 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6634 c, ct.m_x_field, NULL_TREE);
6635 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6636 c, ct.m_y_field, NULL_TREE);
6637 tree d = build_global_decl ("d", ct.m_coord_type);
6638 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
6639 d, ct.m_x_field, NULL_TREE);
6640 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
6641 d, ct.m_y_field, NULL_TREE);
6642
6643 tree int_17 = build_int_cst (integer_type_node, 17);
6644 tree int_m3 = build_int_cst (integer_type_node, -3);
6645
808f4dfe
DM
6646 region_model_manager mgr;
6647 region_model model (&mgr);
a96f1c38
DM
6648 model.set_value (c_x, int_17, NULL);
6649 model.set_value (c_y, int_m3, NULL);
6650
a96f1c38 6651 /* Copy c to d. */
13ad6d9f
DM
6652 const svalue *sval = model.get_rvalue (c, NULL);
6653 model.set_value (model.get_lvalue (d, NULL), sval, NULL);
6654
a96f1c38
DM
6655 /* Check that the fields have the same svalues. */
6656 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
6657 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
6658}
6659
757bf1df
DM
6660/* Verify the details of pushing and popping stack frames. */
6661
6662static void
6663test_stack_frames ()
6664{
6665 tree int_42 = build_int_cst (integer_type_node, 42);
6666 tree int_10 = build_int_cst (integer_type_node, 10);
6667 tree int_5 = build_int_cst (integer_type_node, 5);
6668 tree int_0 = build_int_cst (integer_type_node, 0);
6669
6670 auto_vec <tree> param_types;
6671 tree parent_fndecl = make_fndecl (integer_type_node,
6672 "parent_fn",
6673 param_types);
6674 allocate_struct_function (parent_fndecl, true);
6675
6676 tree child_fndecl = make_fndecl (integer_type_node,
6677 "child_fn",
6678 param_types);
6679 allocate_struct_function (child_fndecl, true);
6680
6681 /* "a" and "b" in the parent frame. */
6682 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6683 get_identifier ("a"),
6684 integer_type_node);
4cebae09 6685 DECL_CONTEXT (a) = parent_fndecl;
757bf1df
DM
6686 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6687 get_identifier ("b"),
6688 integer_type_node);
4cebae09 6689 DECL_CONTEXT (b) = parent_fndecl;
757bf1df
DM
6690 /* "x" and "y" in a child frame. */
6691 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6692 get_identifier ("x"),
6693 integer_type_node);
4cebae09 6694 DECL_CONTEXT (x) = child_fndecl;
757bf1df
DM
6695 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6696 get_identifier ("y"),
6697 integer_type_node);
4cebae09 6698 DECL_CONTEXT (y) = child_fndecl;
757bf1df
DM
6699
6700 /* "p" global. */
6701 tree p = build_global_decl ("p", ptr_type_node);
6702
6703 /* "q" global. */
6704 tree q = build_global_decl ("q", ptr_type_node);
6705
808f4dfe 6706 region_model_manager mgr;
757bf1df 6707 test_region_model_context ctxt;
808f4dfe 6708 region_model model (&mgr);
757bf1df
DM
6709
6710 /* Push stack frame for "parent_fn". */
808f4dfe
DM
6711 const region *parent_frame_reg
6712 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl),
6713 NULL, &ctxt);
6714 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
6715 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
6716 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
6717 model.set_value (a_in_parent_reg,
6718 model.get_rvalue (int_42, &ctxt),
6719 &ctxt);
6720 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
6721
757bf1df
DM
6722 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
6723 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
6724 tristate (tristate::TS_TRUE));
6725
6726 /* Push stack frame for "child_fn". */
808f4dfe 6727 const region *child_frame_reg
757bf1df 6728 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
808f4dfe
DM
6729 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
6730 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
6731 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
6732 model.set_value (x_in_child_reg,
6733 model.get_rvalue (int_0, &ctxt),
6734 &ctxt);
6735 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
6736
757bf1df
DM
6737 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
6738 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
6739 tristate (tristate::TS_TRUE));
6740
6741 /* Point a global pointer at a local in the child frame: p = &x. */
808f4dfe
DM
6742 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
6743 model.set_value (p_in_globals_reg,
6744 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
757bf1df 6745 &ctxt);
808f4dfe 6746 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
757bf1df
DM
6747
6748 /* Point another global pointer at p: q = &p. */
808f4dfe
DM
6749 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
6750 model.set_value (q_in_globals_reg,
6751 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
757bf1df
DM
6752 &ctxt);
6753
808f4dfe
DM
6754 /* Test region::descendent_of_p. */
6755 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
6756 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
6757 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
757bf1df
DM
6758
6759 /* Pop the "child_fn" frame from the stack. */
808f4dfe
DM
6760 model.pop_frame (NULL, NULL, &ctxt);
6761 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
6762 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
757bf1df
DM
6763
6764 /* Verify that p (which was pointing at the local "x" in the popped
6765 frame) has been poisoned. */
33255ad3 6766 const svalue *new_p_sval = model.get_rvalue (p, NULL);
757bf1df
DM
6767 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
6768 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
6769 POISON_KIND_POPPED_STACK);
6770
6771 /* Verify that q still points to p, in spite of the region
6772 renumbering. */
808f4dfe 6773 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
757bf1df 6774 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
5932dd35 6775 ASSERT_EQ (new_q_sval->maybe_get_region (),
757bf1df
DM
6776 model.get_lvalue (p, &ctxt));
6777
6778 /* Verify that top of stack has been updated. */
808f4dfe 6779 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
757bf1df
DM
6780
6781 /* Verify locals in parent frame. */
6782 /* Verify "a" still has its value. */
808f4dfe 6783 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
757bf1df
DM
6784 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
6785 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
6786 int_42);
6787 /* Verify "b" still has its constraint. */
6788 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
6789 tristate (tristate::TS_TRUE));
6790}
6791
6792/* Verify that get_representative_path_var works as expected, that
808f4dfe 6793 we can map from regions to parms and back within a recursive call
757bf1df
DM
6794 stack. */
6795
6796static void
6797test_get_representative_path_var ()
6798{
6799 auto_vec <tree> param_types;
6800 tree fndecl = make_fndecl (integer_type_node,
6801 "factorial",
6802 param_types);
6803 allocate_struct_function (fndecl, true);
6804
6805 /* Parm "n". */
6806 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
6807 get_identifier ("n"),
6808 integer_type_node);
4cebae09 6809 DECL_CONTEXT (n) = fndecl;
757bf1df 6810
808f4dfe
DM
6811 region_model_manager mgr;
6812 test_region_model_context ctxt;
6813 region_model model (&mgr);
757bf1df
DM
6814
6815 /* Push 5 stack frames for "factorial", each with a param */
808f4dfe
DM
6816 auto_vec<const region *> parm_regs;
6817 auto_vec<const svalue *> parm_svals;
757bf1df
DM
6818 for (int depth = 0; depth < 5; depth++)
6819 {
808f4dfe
DM
6820 const region *frame_n_reg
6821 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
6822 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
6823 parm_regs.safe_push (parm_n_reg);
757bf1df 6824
808f4dfe
DM
6825 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
6826 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
6827 parm_svals.safe_push (sval_n);
757bf1df
DM
6828 }
6829
6830 /* Verify that we can recognize that the regions are the parms,
6831 at every depth. */
6832 for (int depth = 0; depth < 5; depth++)
6833 {
808f4dfe
DM
6834 {
6835 svalue_set visited;
6836 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
6837 &visited),
6838 path_var (n, depth + 1));
6839 }
757bf1df
DM
6840 /* ...and that we can lookup lvalues for locals for all frames,
6841 not just the top. */
6842 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
808f4dfe 6843 parm_regs[depth]);
757bf1df 6844 /* ...and that we can locate the svalues. */
808f4dfe
DM
6845 {
6846 svalue_set visited;
6847 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
6848 &visited),
6849 path_var (n, depth + 1));
6850 }
757bf1df
DM
6851 }
6852}
6853
808f4dfe 6854/* Ensure that region_model::operator== works as expected. */
757bf1df
DM
6855
6856static void
808f4dfe 6857test_equality_1 ()
757bf1df 6858{
808f4dfe
DM
6859 tree int_42 = build_int_cst (integer_type_node, 42);
6860 tree int_17 = build_int_cst (integer_type_node, 17);
757bf1df 6861
808f4dfe
DM
6862/* Verify that "empty" region_model instances are equal to each other. */
6863 region_model_manager mgr;
6864 region_model model0 (&mgr);
6865 region_model model1 (&mgr);
757bf1df 6866 ASSERT_EQ (model0, model1);
808f4dfe
DM
6867
6868 /* Verify that setting state in model1 makes the models non-equal. */
6869 tree x = build_global_decl ("x", integer_type_node);
6870 model0.set_value (x, int_42, NULL);
6871 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
6872 ASSERT_NE (model0, model1);
6873
6874 /* Verify the copy-ctor. */
6875 region_model model2 (model0);
6876 ASSERT_EQ (model0, model2);
6877 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
6878 ASSERT_NE (model1, model2);
6879
6880 /* Verify that models obtained from copy-ctor are independently editable
6881 w/o affecting the original model. */
6882 model2.set_value (x, int_17, NULL);
6883 ASSERT_NE (model0, model2);
6884 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
6885 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
757bf1df
DM
6886}
6887
6888/* Verify that region models for
6889 x = 42; y = 113;
6890 and
6891 y = 113; x = 42;
808f4dfe 6892 are equal. */
757bf1df
DM
6893
6894static void
6895test_canonicalization_2 ()
6896{
6897 tree int_42 = build_int_cst (integer_type_node, 42);
6898 tree int_113 = build_int_cst (integer_type_node, 113);
6899 tree x = build_global_decl ("x", integer_type_node);
6900 tree y = build_global_decl ("y", integer_type_node);
6901
808f4dfe
DM
6902 region_model_manager mgr;
6903 region_model model0 (&mgr);
757bf1df
DM
6904 model0.set_value (model0.get_lvalue (x, NULL),
6905 model0.get_rvalue (int_42, NULL),
6906 NULL);
6907 model0.set_value (model0.get_lvalue (y, NULL),
6908 model0.get_rvalue (int_113, NULL),
6909 NULL);
6910
808f4dfe 6911 region_model model1 (&mgr);
757bf1df
DM
6912 model1.set_value (model1.get_lvalue (y, NULL),
6913 model1.get_rvalue (int_113, NULL),
6914 NULL);
6915 model1.set_value (model1.get_lvalue (x, NULL),
6916 model1.get_rvalue (int_42, NULL),
6917 NULL);
6918
757bf1df
DM
6919 ASSERT_EQ (model0, model1);
6920}
6921
6922/* Verify that constraints for
6923 x > 3 && y > 42
6924 and
6925 y > 42 && x > 3
6926 are equal after canonicalization. */
6927
6928static void
6929test_canonicalization_3 ()
6930{
6931 tree int_3 = build_int_cst (integer_type_node, 3);
6932 tree int_42 = build_int_cst (integer_type_node, 42);
6933 tree x = build_global_decl ("x", integer_type_node);
6934 tree y = build_global_decl ("y", integer_type_node);
6935
808f4dfe
DM
6936 region_model_manager mgr;
6937 region_model model0 (&mgr);
757bf1df
DM
6938 model0.add_constraint (x, GT_EXPR, int_3, NULL);
6939 model0.add_constraint (y, GT_EXPR, int_42, NULL);
6940
808f4dfe 6941 region_model model1 (&mgr);
757bf1df
DM
6942 model1.add_constraint (y, GT_EXPR, int_42, NULL);
6943 model1.add_constraint (x, GT_EXPR, int_3, NULL);
6944
808f4dfe
DM
6945 model0.canonicalize ();
6946 model1.canonicalize ();
757bf1df
DM
6947 ASSERT_EQ (model0, model1);
6948}
6949
8c08c983
DM
6950/* Verify that we can canonicalize a model containing NaN and other real
6951 constants. */
6952
6953static void
6954test_canonicalization_4 ()
6955{
6956 auto_vec<tree> csts;
6957 append_interesting_constants (&csts);
6958
808f4dfe
DM
6959 region_model_manager mgr;
6960 region_model model (&mgr);
8c08c983 6961
3f207ab3 6962 for (tree cst : csts)
8c08c983
DM
6963 model.get_rvalue (cst, NULL);
6964
808f4dfe 6965 model.canonicalize ();
8c08c983
DM
6966}
6967
757bf1df
DM
6968/* Assert that if we have two region_model instances
6969 with values VAL_A and VAL_B for EXPR that they are
6970 mergable. Write the merged model to *OUT_MERGED_MODEL,
6971 and the merged svalue ptr to *OUT_MERGED_SVALUE.
6972 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
6973 for that region_model. */
6974
6975static void
6976assert_region_models_merge (tree expr, tree val_a, tree val_b,
808f4dfe
DM
6977 region_model *out_merged_model,
6978 const svalue **out_merged_svalue)
757bf1df 6979{
808f4dfe 6980 region_model_manager *mgr = out_merged_model->get_manager ();
bb8e93eb
DM
6981 program_point point (program_point::origin (*mgr));
6982 test_region_model_context ctxt;
808f4dfe
DM
6983 region_model model0 (mgr);
6984 region_model model1 (mgr);
757bf1df
DM
6985 if (val_a)
6986 model0.set_value (model0.get_lvalue (expr, &ctxt),
6987 model0.get_rvalue (val_a, &ctxt),
6988 &ctxt);
6989 if (val_b)
6990 model1.set_value (model1.get_lvalue (expr, &ctxt),
6991 model1.get_rvalue (val_b, &ctxt),
6992 &ctxt);
6993
6994 /* They should be mergeable. */
808f4dfe
DM
6995 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
6996 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
757bf1df
DM
6997}
6998
6999/* Verify that we can merge region_model instances. */
7000
7001static void
7002test_state_merging ()
7003{
7004 tree int_42 = build_int_cst (integer_type_node, 42);
7005 tree int_113 = build_int_cst (integer_type_node, 113);
7006 tree x = build_global_decl ("x", integer_type_node);
7007 tree y = build_global_decl ("y", integer_type_node);
7008 tree z = build_global_decl ("z", integer_type_node);
7009 tree p = build_global_decl ("p", ptr_type_node);
7010
7011 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
7012 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
7013
7014 auto_vec <tree> param_types;
7015 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
7016 allocate_struct_function (test_fndecl, true);
7017
7018 /* Param "a". */
7019 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7020 get_identifier ("a"),
7021 integer_type_node);
4cebae09 7022 DECL_CONTEXT (a) = test_fndecl;
757bf1df
DM
7023 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
7024
455f58ec
DM
7025 /* Param "q", a pointer. */
7026 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
7027 get_identifier ("q"),
7028 ptr_type_node);
4cebae09 7029 DECL_CONTEXT (q) = test_fndecl;
455f58ec 7030
808f4dfe 7031 region_model_manager mgr;
bb8e93eb 7032 program_point point (program_point::origin (mgr));
808f4dfe 7033
757bf1df 7034 {
808f4dfe
DM
7035 region_model model0 (&mgr);
7036 region_model model1 (&mgr);
7037 region_model merged (&mgr);
757bf1df 7038 /* Verify empty models can be merged. */
808f4dfe 7039 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7040 ASSERT_EQ (model0, merged);
7041 }
7042
7043 /* Verify that we can merge two contradictory constraints on the
7044 value for a global. */
7045 /* TODO: verify that the merged model doesn't have a value for
7046 the global */
7047 {
808f4dfe
DM
7048 region_model model0 (&mgr);
7049 region_model model1 (&mgr);
7050 region_model merged (&mgr);
757bf1df
DM
7051 test_region_model_context ctxt;
7052 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7053 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe 7054 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7055 ASSERT_NE (model0, merged);
7056 ASSERT_NE (model1, merged);
7057 }
7058
7059 /* Verify handling of a PARM_DECL. */
7060 {
7061 test_region_model_context ctxt;
808f4dfe
DM
7062 region_model model0 (&mgr);
7063 region_model model1 (&mgr);
757bf1df
DM
7064 ASSERT_EQ (model0.get_stack_depth (), 0);
7065 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7066 ASSERT_EQ (model0.get_stack_depth (), 1);
757bf1df
DM
7067 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
7068
808f4dfe
DM
7069 placeholder_svalue test_sval (integer_type_node, "test sval");
7070 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
7071 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
757bf1df
DM
7072 ASSERT_EQ (model0, model1);
7073
757bf1df 7074 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7075 region_model merged (&mgr);
7076 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7077 ASSERT_EQ (model0, merged);
808f4dfe
DM
7078 /* In particular, "a" should have the placeholder value. */
7079 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
757bf1df
DM
7080 }
7081
7082 /* Verify handling of a global. */
7083 {
7084 test_region_model_context ctxt;
808f4dfe
DM
7085 region_model model0 (&mgr);
7086 region_model model1 (&mgr);
757bf1df 7087
808f4dfe
DM
7088 placeholder_svalue test_sval (integer_type_node, "test sval");
7089 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
7090 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
7091 ASSERT_EQ (model0, model1);
757bf1df
DM
7092
7093 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7094 region_model merged (&mgr);
7095 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7096 ASSERT_EQ (model0, merged);
808f4dfe
DM
7097 /* In particular, "x" should have the placeholder value. */
7098 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
757bf1df
DM
7099 }
7100
7101 /* Use global-handling to verify various combinations of values. */
7102
7103 /* Two equal constant values. */
7104 {
808f4dfe
DM
7105 region_model merged (&mgr);
7106 const svalue *merged_x_sval;
757bf1df
DM
7107 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
7108
7109 /* In particular, there should be a constant value for "x". */
7110 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
7111 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
7112 int_42);
7113 }
7114
7115 /* Two non-equal constant values. */
7116 {
808f4dfe
DM
7117 region_model merged (&mgr);
7118 const svalue *merged_x_sval;
757bf1df
DM
7119 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
7120
808f4dfe
DM
7121 /* In particular, there should be a "widening" value for "x". */
7122 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
757bf1df
DM
7123 }
7124
808f4dfe 7125 /* Initial and constant. */
757bf1df 7126 {
808f4dfe
DM
7127 region_model merged (&mgr);
7128 const svalue *merged_x_sval;
757bf1df
DM
7129 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
7130
7131 /* In particular, there should be an unknown value for "x". */
7132 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7133 }
7134
808f4dfe 7135 /* Constant and initial. */
757bf1df 7136 {
808f4dfe
DM
7137 region_model merged (&mgr);
7138 const svalue *merged_x_sval;
757bf1df
DM
7139 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
7140
7141 /* In particular, there should be an unknown value for "x". */
7142 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7143 }
7144
7145 /* Unknown and constant. */
7146 // TODO
7147
7148 /* Pointers: NULL and NULL. */
7149 // TODO
7150
7151 /* Pointers: NULL and non-NULL. */
7152 // TODO
7153
7154 /* Pointers: non-NULL and non-NULL: ptr to a local. */
7155 {
808f4dfe 7156 region_model model0 (&mgr);
757bf1df 7157 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
757bf1df
DM
7158 model0.set_value (model0.get_lvalue (p, NULL),
7159 model0.get_rvalue (addr_of_a, NULL), NULL);
7160
7161 region_model model1 (model0);
7162 ASSERT_EQ (model0, model1);
7163
7164 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7165 region_model merged (&mgr);
7166 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7167 ASSERT_EQ (model0, merged);
7168 }
7169
7170 /* Pointers: non-NULL and non-NULL: ptr to a global. */
7171 {
808f4dfe 7172 region_model merged (&mgr);
757bf1df 7173 /* p == &y in both input models. */
808f4dfe 7174 const svalue *merged_p_sval;
757bf1df
DM
7175 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
7176 &merged_p_sval);
7177
7178 /* We should get p == &y in the merged model. */
7179 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
808f4dfe
DM
7180 const region_svalue *merged_p_ptr
7181 = merged_p_sval->dyn_cast_region_svalue ();
7182 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
7183 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
757bf1df
DM
7184 }
7185
7186 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
7187 {
808f4dfe
DM
7188 region_model merged (&mgr);
7189 /* x == &y vs x == &z in the input models; these are actually casts
7190 of the ptrs to "int". */
7191 const svalue *merged_x_sval;
7192 // TODO:
757bf1df
DM
7193 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
7194 &merged_x_sval);
7195
7196 /* We should get x == unknown in the merged model. */
7197 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
7198 }
7199
7200 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
7201 {
7202 test_region_model_context ctxt;
808f4dfe 7203 region_model model0 (&mgr);
9a2c9579 7204 tree size = build_int_cst (size_type_node, 1024);
808f4dfe 7205 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
b9365b93
DM
7206 const region *new_reg
7207 = model0.create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe 7208 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
757bf1df 7209 model0.set_value (model0.get_lvalue (p, &ctxt),
808f4dfe 7210 ptr_sval, &ctxt);
757bf1df
DM
7211
7212 region_model model1 (model0);
7213
7214 ASSERT_EQ (model0, model1);
7215
808f4dfe
DM
7216 region_model merged (&mgr);
7217 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7218
808f4dfe 7219 /* The merged model ought to be identical. */
757bf1df
DM
7220 ASSERT_EQ (model0, merged);
7221 }
7222
808f4dfe
DM
7223 /* Two regions sharing the same placeholder svalue should continue sharing
7224 it after self-merger. */
757bf1df
DM
7225 {
7226 test_region_model_context ctxt;
808f4dfe
DM
7227 region_model model0 (&mgr);
7228 placeholder_svalue placeholder_sval (integer_type_node, "test");
7229 model0.set_value (model0.get_lvalue (x, &ctxt),
7230 &placeholder_sval, &ctxt);
7231 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
757bf1df
DM
7232 region_model model1 (model0);
7233
7234 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7235 region_model merged (&mgr);
7236 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7237 ASSERT_EQ (model0, merged);
7238
7239 /* In particular, we should have x == y. */
7240 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
7241 tristate (tristate::TS_TRUE));
7242 }
7243
757bf1df 7244 {
808f4dfe
DM
7245 region_model model0 (&mgr);
7246 region_model model1 (&mgr);
757bf1df
DM
7247 test_region_model_context ctxt;
7248 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7249 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
808f4dfe
DM
7250 region_model merged (&mgr);
7251 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7252 }
7253
7254 {
808f4dfe
DM
7255 region_model model0 (&mgr);
7256 region_model model1 (&mgr);
757bf1df
DM
7257 test_region_model_context ctxt;
7258 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
7259 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
7260 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe
DM
7261 region_model merged (&mgr);
7262 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7263 }
757bf1df
DM
7264
7265 // TODO: what can't we merge? need at least one such test
7266
7267 /* TODO: various things
7268 - heap regions
7269 - value merging:
7270 - every combination, but in particular
808f4dfe 7271 - pairs of regions
757bf1df
DM
7272 */
7273
7274 /* Views. */
7275 {
7276 test_region_model_context ctxt;
808f4dfe 7277 region_model model0 (&mgr);
757bf1df 7278
808f4dfe
DM
7279 const region *x_reg = model0.get_lvalue (x, &ctxt);
7280 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
757bf1df
DM
7281 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
7282
7283 region_model model1 (model0);
7284 ASSERT_EQ (model1, model0);
7285
7286 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
7287 region_model merged (&mgr);
7288 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 7289 }
455f58ec
DM
7290
7291 /* Verify that we can merge a model in which a local in an older stack
7292 frame points to a local in a more recent stack frame. */
7293 {
808f4dfe 7294 region_model model0 (&mgr);
455f58ec 7295 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
808f4dfe 7296 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
455f58ec
DM
7297
7298 /* Push a second frame. */
808f4dfe 7299 const region *reg_2nd_frame
455f58ec
DM
7300 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7301
7302 /* Have a pointer in the older frame point to a local in the
7303 more recent frame. */
808f4dfe
DM
7304 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
7305 model0.set_value (q_in_first_frame, sval_ptr, NULL);
455f58ec
DM
7306
7307 /* Verify that it's pointing at the newer frame. */
5932dd35 7308 const region *reg_pointee = sval_ptr->maybe_get_region ();
808f4dfe 7309 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
455f58ec 7310
808f4dfe 7311 model0.canonicalize ();
455f58ec
DM
7312
7313 region_model model1 (model0);
7314 ASSERT_EQ (model0, model1);
7315
7316 /* They should be mergeable, and the result should be the same
7317 (after canonicalization, at least). */
808f4dfe
DM
7318 region_model merged (&mgr);
7319 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
7320 merged.canonicalize ();
455f58ec
DM
7321 ASSERT_EQ (model0, merged);
7322 }
7323
7324 /* Verify that we can merge a model in which a local points to a global. */
7325 {
808f4dfe 7326 region_model model0 (&mgr);
455f58ec
DM
7327 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
7328 model0.set_value (model0.get_lvalue (q, NULL),
7329 model0.get_rvalue (addr_of_y, NULL), NULL);
7330
455f58ec
DM
7331 region_model model1 (model0);
7332 ASSERT_EQ (model0, model1);
7333
7334 /* They should be mergeable, and the result should be the same
7335 (after canonicalization, at least). */
808f4dfe
DM
7336 region_model merged (&mgr);
7337 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
455f58ec
DM
7338 ASSERT_EQ (model0, merged);
7339 }
757bf1df
DM
7340}
7341
7342/* Verify that constraints are correctly merged when merging region_model
7343 instances. */
7344
7345static void
7346test_constraint_merging ()
7347{
7348 tree int_0 = build_int_cst (integer_type_node, 0);
7349 tree int_5 = build_int_cst (integer_type_node, 5);
7350 tree x = build_global_decl ("x", integer_type_node);
7351 tree y = build_global_decl ("y", integer_type_node);
7352 tree z = build_global_decl ("z", integer_type_node);
7353 tree n = build_global_decl ("n", integer_type_node);
7354
808f4dfe 7355 region_model_manager mgr;
757bf1df
DM
7356 test_region_model_context ctxt;
7357
7358 /* model0: 0 <= (x == y) < n. */
808f4dfe 7359 region_model model0 (&mgr);
757bf1df
DM
7360 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
7361 model0.add_constraint (x, GE_EXPR, int_0, NULL);
7362 model0.add_constraint (x, LT_EXPR, n, NULL);
7363
7364 /* model1: z != 5 && (0 <= x < n). */
808f4dfe 7365 region_model model1 (&mgr);
757bf1df
DM
7366 model1.add_constraint (z, NE_EXPR, int_5, NULL);
7367 model1.add_constraint (x, GE_EXPR, int_0, NULL);
7368 model1.add_constraint (x, LT_EXPR, n, NULL);
7369
7370 /* They should be mergeable; the merged constraints should
7371 be: (0 <= x < n). */
bb8e93eb 7372 program_point point (program_point::origin (mgr));
808f4dfe
DM
7373 region_model merged (&mgr);
7374 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
7375
7376 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
7377 tristate (tristate::TS_TRUE));
7378 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
7379 tristate (tristate::TS_TRUE));
7380
7381 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
7382 tristate (tristate::TS_UNKNOWN));
7383 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
7384 tristate (tristate::TS_UNKNOWN));
7385}
7386
808f4dfe
DM
7387/* Verify that widening_svalue::eval_condition_without_cm works as
7388 expected. */
7389
7390static void
7391test_widening_constraints ()
7392{
bb8e93eb
DM
7393 region_model_manager mgr;
7394 program_point point (program_point::origin (mgr));
808f4dfe
DM
7395 tree int_0 = build_int_cst (integer_type_node, 0);
7396 tree int_m1 = build_int_cst (integer_type_node, -1);
7397 tree int_1 = build_int_cst (integer_type_node, 1);
7398 tree int_256 = build_int_cst (integer_type_node, 256);
808f4dfe
DM
7399 test_region_model_context ctxt;
7400 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
7401 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
7402 const svalue *w_zero_then_one_sval
7403 = mgr.get_or_create_widening_svalue (integer_type_node, point,
7404 int_0_sval, int_1_sval);
7405 const widening_svalue *w_zero_then_one
7406 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
7407 ASSERT_EQ (w_zero_then_one->get_direction (),
7408 widening_svalue::DIR_ASCENDING);
7409 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
7410 tristate::TS_FALSE);
7411 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
7412 tristate::TS_FALSE);
7413 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
7414 tristate::TS_UNKNOWN);
7415 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
7416 tristate::TS_UNKNOWN);
7417
7418 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
7419 tristate::TS_FALSE);
7420 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
7421 tristate::TS_UNKNOWN);
7422 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
7423 tristate::TS_UNKNOWN);
7424 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
7425 tristate::TS_UNKNOWN);
7426
7427 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
7428 tristate::TS_TRUE);
7429 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
7430 tristate::TS_UNKNOWN);
7431 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
7432 tristate::TS_UNKNOWN);
7433 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
7434 tristate::TS_UNKNOWN);
7435
7436 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
7437 tristate::TS_TRUE);
7438 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
7439 tristate::TS_TRUE);
7440 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
7441 tristate::TS_UNKNOWN);
7442 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
7443 tristate::TS_UNKNOWN);
7444
7445 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
7446 tristate::TS_FALSE);
7447 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
7448 tristate::TS_UNKNOWN);
7449 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
7450 tristate::TS_UNKNOWN);
7451 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
7452 tristate::TS_UNKNOWN);
7453
7454 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
7455 tristate::TS_TRUE);
7456 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
7457 tristate::TS_UNKNOWN);
7458 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
7459 tristate::TS_UNKNOWN);
7460 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
7461 tristate::TS_UNKNOWN);
7462}
7463
7464/* Verify merging constraints for states simulating successive iterations
7465 of a loop.
7466 Simulate:
7467 for (i = 0; i < 256; i++)
7468 [...body...]
7469 i.e. this gimple:.
7470 i_15 = 0;
7471 goto <bb 4>;
7472
7473 <bb 4> :
7474 i_11 = PHI <i_15(2), i_23(3)>
7475 if (i_11 <= 255)
7476 goto <bb 3>;
7477 else
7478 goto [AFTER LOOP]
7479
7480 <bb 3> :
7481 [LOOP BODY]
7482 i_23 = i_11 + 1;
7483
7484 and thus these ops (and resultant states):
7485 i_11 = PHI()
7486 {i_11: 0}
7487 add_constraint (i_11 <= 255) [for the true edge]
7488 {i_11: 0} [constraint was a no-op]
7489 i_23 = i_11 + 1;
7490 {i_22: 1}
7491 i_11 = PHI()
7492 {i_11: WIDENED (at phi, 0, 1)}
7493 add_constraint (i_11 <= 255) [for the true edge]
7494 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
7495 i_23 = i_11 + 1;
7496 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
7497 i_11 = PHI(); merge with state at phi above
7498 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
7499 [changing meaning of "WIDENED" here]
7500 if (i_11 <= 255)
7501 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
7502 F: {i_11: 256}
7503 */
7504
7505static void
7506test_iteration_1 ()
7507{
bb8e93eb
DM
7508 region_model_manager mgr;
7509 program_point point (program_point::origin (mgr));
808f4dfe
DM
7510
7511 tree int_0 = build_int_cst (integer_type_node, 0);
7512 tree int_1 = build_int_cst (integer_type_node, 1);
7513 tree int_256 = build_int_cst (integer_type_node, 256);
7514 tree int_257 = build_int_cst (integer_type_node, 257);
7515 tree i = build_global_decl ("i", integer_type_node);
7516
808f4dfe
DM
7517 test_region_model_context ctxt;
7518
7519 /* model0: i: 0. */
7520 region_model model0 (&mgr);
7521 model0.set_value (i, int_0, &ctxt);
7522
7523 /* model1: i: 1. */
7524 region_model model1 (&mgr);
7525 model1.set_value (i, int_1, &ctxt);
7526
7527 /* Should merge "i" to a widened value. */
7528 region_model model2 (&mgr);
7529 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
7530 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
7531 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
7532 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
7533 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
7534
7535 /* Add constraint: i < 256 */
7536 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
7537 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
7538 tristate (tristate::TS_TRUE));
7539 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
7540 tristate (tristate::TS_TRUE));
7541
7542 /* Try merging with the initial state. */
7543 region_model model3 (&mgr);
7544 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
7545 /* Merging the merged value with the initial value should be idempotent,
7546 so that the analysis converges. */
7547 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
7548 /* Merger of 0 and a widening value with constraint < CST
7549 should retain the constraint, even though it was implicit
7550 for the 0 case. */
7551 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
7552 tristate (tristate::TS_TRUE));
7553 /* ...and we should have equality: the analysis should have converged. */
7554 ASSERT_EQ (model3, model2);
7555
7556 /* "i_23 = i_11 + 1;" */
7557 region_model model4 (model3);
7558 ASSERT_EQ (model4, model2);
7559 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
7560 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
7561 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
7562
7563 /* Try merging with the "i: 1" state. */
7564 region_model model5 (&mgr);
7565 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
7566 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
7567 ASSERT_EQ (model5, model4);
7568
7569 /* "i_11 = PHI();" merge with state at phi above.
7570 For i, we should have a merger of WIDENING with WIDENING + 1,
7571 and this should be WIDENING again. */
7572 region_model model6 (&mgr);
7573 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
7574 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
7575 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
7576
7577 ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257);
7578}
7579
6969ac30
DM
7580/* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
7581 all cast pointers to that region are also known to be non-NULL. */
7582
7583static void
7584test_malloc_constraints ()
7585{
808f4dfe
DM
7586 region_model_manager mgr;
7587 region_model model (&mgr);
6969ac30
DM
7588 tree p = build_global_decl ("p", ptr_type_node);
7589 tree char_star = build_pointer_type (char_type_node);
7590 tree q = build_global_decl ("q", char_star);
7591 tree null_ptr = build_int_cst (ptr_type_node, 0);
7592
808f4dfe 7593 const svalue *size_in_bytes
9a2c9579 7594 = mgr.get_or_create_unknown_svalue (size_type_node);
b9365b93 7595 const region *reg = model.create_region_for_heap_alloc (size_in_bytes, NULL);
808f4dfe
DM
7596 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
7597 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
6969ac30
DM
7598 model.set_value (q, p, NULL);
7599
6969ac30
DM
7600 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
7601 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
7602 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
7603 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
7604
7605 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
7606
6969ac30
DM
7607 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
7608 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
7609 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
7610 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
7611}
7612
808f4dfe
DM
7613/* Smoketest of getting and setting the value of a variable. */
7614
7615static void
7616test_var ()
7617{
7618 /* "int i;" */
7619 tree i = build_global_decl ("i", integer_type_node);
7620
7621 tree int_17 = build_int_cst (integer_type_node, 17);
7622 tree int_m3 = build_int_cst (integer_type_node, -3);
7623
7624 region_model_manager mgr;
7625 region_model model (&mgr);
7626
7627 const region *i_reg = model.get_lvalue (i, NULL);
7628 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
7629
7630 /* Reading "i" should give a symbolic "initial value". */
7631 const svalue *sval_init = model.get_rvalue (i, NULL);
7632 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
7633 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
7634 /* ..and doing it again should give the same "initial value". */
7635 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
7636
7637 /* "i = 17;". */
7638 model.set_value (i, int_17, NULL);
7639 ASSERT_EQ (model.get_rvalue (i, NULL),
7640 model.get_rvalue (int_17, NULL));
7641
7642 /* "i = -3;". */
7643 model.set_value (i, int_m3, NULL);
7644 ASSERT_EQ (model.get_rvalue (i, NULL),
7645 model.get_rvalue (int_m3, NULL));
7646
7647 /* Verify get_offset for "i". */
7648 {
7a6564c9 7649 region_offset offset = i_reg->get_offset (&mgr);
808f4dfe
DM
7650 ASSERT_EQ (offset.get_base_region (), i_reg);
7651 ASSERT_EQ (offset.get_bit_offset (), 0);
7652 }
7653}
7654
7655static void
7656test_array_2 ()
7657{
7658 /* "int arr[10];" */
7659 tree tlen = size_int (10);
7660 tree arr_type
7661 = build_array_type (integer_type_node, build_index_type (tlen));
7662 tree arr = build_global_decl ("arr", arr_type);
7663
7664 /* "int i;" */
7665 tree i = build_global_decl ("i", integer_type_node);
7666
7667 tree int_0 = build_int_cst (integer_type_node, 0);
7668 tree int_1 = build_int_cst (integer_type_node, 1);
7669
7670 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
7671 arr, int_0, NULL_TREE, NULL_TREE);
7672 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
7673 arr, int_1, NULL_TREE, NULL_TREE);
7674 tree arr_i = build4 (ARRAY_REF, integer_type_node,
7675 arr, i, NULL_TREE, NULL_TREE);
7676
7677 tree int_17 = build_int_cst (integer_type_node, 17);
7678 tree int_42 = build_int_cst (integer_type_node, 42);
7679 tree int_m3 = build_int_cst (integer_type_node, -3);
7680
7681 region_model_manager mgr;
7682 region_model model (&mgr);
7683 /* "arr[0] = 17;". */
7684 model.set_value (arr_0, int_17, NULL);
7685 /* "arr[1] = -3;". */
7686 model.set_value (arr_1, int_m3, NULL);
7687
7688 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
7689 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
7690
7691 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
7692 model.set_value (arr_1, int_42, NULL);
7693 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
7694
7695 /* Verify get_offset for "arr[0]". */
7696 {
7697 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
7a6564c9 7698 region_offset offset = arr_0_reg->get_offset (&mgr);
808f4dfe
DM
7699 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7700 ASSERT_EQ (offset.get_bit_offset (), 0);
7701 }
7702
7703 /* Verify get_offset for "arr[1]". */
7704 {
7705 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
7a6564c9 7706 region_offset offset = arr_1_reg->get_offset (&mgr);
808f4dfe
DM
7707 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7708 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
7709 }
7710
7a6564c9
TL
7711 /* Verify get_offset for "arr[i]". */
7712 {
7713 const region *arr_i_reg = model.get_lvalue (arr_i, NULL);
7714 region_offset offset = arr_i_reg->get_offset (&mgr);
7715 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
7716 ASSERT_EQ (offset.get_symbolic_byte_offset ()->get_kind (), SK_BINOP);
7717 }
7718
808f4dfe
DM
7719 /* "arr[i] = i;" - this should remove the earlier bindings. */
7720 model.set_value (arr_i, i, NULL);
7721 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
7722 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
7723
7724 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
7725 model.set_value (arr_0, int_17, NULL);
7726 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
7727 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
7728}
7729
7730/* Smoketest of dereferencing a pointer via MEM_REF. */
7731
7732static void
7733test_mem_ref ()
7734{
7735 /*
7736 x = 17;
7737 p = &x;
7738 *p;
7739 */
7740 tree x = build_global_decl ("x", integer_type_node);
7741 tree int_star = build_pointer_type (integer_type_node);
7742 tree p = build_global_decl ("p", int_star);
7743
7744 tree int_17 = build_int_cst (integer_type_node, 17);
7745 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
7746 tree offset_0 = build_int_cst (integer_type_node, 0);
7747 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
7748
7749 region_model_manager mgr;
7750 region_model model (&mgr);
7751
7752 /* "x = 17;". */
7753 model.set_value (x, int_17, NULL);
7754
7755 /* "p = &x;". */
7756 model.set_value (p, addr_of_x, NULL);
7757
7758 const svalue *sval = model.get_rvalue (star_p, NULL);
7759 ASSERT_EQ (sval->maybe_get_constant (), int_17);
7760}
7761
7762/* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
7763 Analogous to this code:
7764 void test_6 (int a[10])
7765 {
7766 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
7767 a[3] = 42;
7768 __analyzer_eval (a[3] == 42); [should be TRUE]
7769 }
7770 from data-model-1.c, which looks like this at the gimple level:
7771 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
7772 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
7773 int _2 = *_1; # MEM_REF
7774 _Bool _3 = _2 == 42;
7775 int _4 = (int) _3;
7776 __analyzer_eval (_4);
7777
7778 # a[3] = 42;
7779 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
7780 *_5 = 42; # MEM_REF
7781
7782 # __analyzer_eval (a[3] == 42); [should be TRUE]
7783 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
7784 int _7 = *_6; # MEM_REF
7785 _Bool _8 = _7 == 42;
7786 int _9 = (int) _8;
7787 __analyzer_eval (_9); */
7788
7789static void
7790test_POINTER_PLUS_EXPR_then_MEM_REF ()
7791{
7792 tree int_star = build_pointer_type (integer_type_node);
7793 tree a = build_global_decl ("a", int_star);
7794 tree offset_12 = build_int_cst (size_type_node, 12);
7795 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
7796 tree offset_0 = build_int_cst (integer_type_node, 0);
7797 tree mem_ref = build2 (MEM_REF, integer_type_node,
7798 pointer_plus_expr, offset_0);
7799 region_model_manager mgr;
7800 region_model m (&mgr);
7801
7802 tree int_42 = build_int_cst (integer_type_node, 42);
7803 m.set_value (mem_ref, int_42, NULL);
7804 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
7805}
7806
7807/* Verify that malloc works. */
7808
7809static void
7810test_malloc ()
7811{
7812 tree int_star = build_pointer_type (integer_type_node);
7813 tree p = build_global_decl ("p", int_star);
7814 tree n = build_global_decl ("n", integer_type_node);
7815 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
7816 n, build_int_cst (size_type_node, 4));
7817
7818 region_model_manager mgr;
7819 test_region_model_context ctxt;
7820 region_model model (&mgr);
7821
7822 /* "p = malloc (n * 4);". */
7823 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
b9365b93 7824 const region *reg = model.create_region_for_heap_alloc (size_sval, &ctxt);
808f4dfe
DM
7825 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
7826 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 7827 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
7828}
7829
7830/* Verify that alloca works. */
7831
7832static void
7833test_alloca ()
7834{
7835 auto_vec <tree> param_types;
7836 tree fndecl = make_fndecl (integer_type_node,
7837 "test_fn",
7838 param_types);
7839 allocate_struct_function (fndecl, true);
7840
7841
7842 tree int_star = build_pointer_type (integer_type_node);
7843 tree p = build_global_decl ("p", int_star);
7844 tree n = build_global_decl ("n", integer_type_node);
7845 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
7846 n, build_int_cst (size_type_node, 4));
7847
7848 region_model_manager mgr;
7849 test_region_model_context ctxt;
7850 region_model model (&mgr);
7851
7852 /* Push stack frame. */
7853 const region *frame_reg
7854 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl),
7855 NULL, &ctxt);
7856 /* "p = alloca (n * 4);". */
7857 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
b9365b93 7858 const region *reg = model.create_region_for_alloca (size_sval, &ctxt);
808f4dfe
DM
7859 ASSERT_EQ (reg->get_parent_region (), frame_reg);
7860 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
7861 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
9a2c9579 7862 ASSERT_EQ (model.get_capacity (reg), size_sval);
808f4dfe
DM
7863
7864 /* Verify that the pointers to the alloca region are replaced by
7865 poisoned values when the frame is popped. */
7866 model.pop_frame (NULL, NULL, &ctxt);
33255ad3 7867 ASSERT_EQ (model.get_rvalue (p, NULL)->get_kind (), SK_POISONED);
808f4dfe
DM
7868}
7869
71fc4655
DM
7870/* Verify that svalue::involves_p works. */
7871
7872static void
7873test_involves_p ()
7874{
7875 region_model_manager mgr;
7876 tree int_star = build_pointer_type (integer_type_node);
7877 tree p = build_global_decl ("p", int_star);
7878 tree q = build_global_decl ("q", int_star);
7879
7880 test_region_model_context ctxt;
7881 region_model model (&mgr);
7882 const svalue *p_init = model.get_rvalue (p, &ctxt);
7883 const svalue *q_init = model.get_rvalue (q, &ctxt);
7884
7885 ASSERT_TRUE (p_init->involves_p (p_init));
7886 ASSERT_FALSE (p_init->involves_p (q_init));
7887
7888 const region *star_p_reg = mgr.get_symbolic_region (p_init);
7889 const region *star_q_reg = mgr.get_symbolic_region (q_init);
7890
7891 const svalue *init_star_p = mgr.get_or_create_initial_value (star_p_reg);
7892 const svalue *init_star_q = mgr.get_or_create_initial_value (star_q_reg);
7893
7894 ASSERT_TRUE (init_star_p->involves_p (p_init));
7895 ASSERT_FALSE (p_init->involves_p (init_star_p));
7896 ASSERT_FALSE (init_star_p->involves_p (q_init));
7897 ASSERT_TRUE (init_star_q->involves_p (q_init));
7898 ASSERT_FALSE (init_star_q->involves_p (p_init));
7899}
7900
757bf1df
DM
7901/* Run all of the selftests within this file. */
7902
7903void
7904analyzer_region_model_cc_tests ()
7905{
8c08c983 7906 test_tree_cmp_on_constants ();
757bf1df 7907 test_dump ();
808f4dfe
DM
7908 test_struct ();
7909 test_array_1 ();
90f7c300 7910 test_get_representative_tree ();
757bf1df 7911 test_unique_constants ();
808f4dfe
DM
7912 test_unique_unknowns ();
7913 test_initial_svalue_folding ();
7914 test_unaryop_svalue_folding ();
7915 test_binop_svalue_folding ();
7916 test_sub_svalue_folding ();
7917 test_descendent_of_p ();
391512ad 7918 test_bit_range_regions ();
757bf1df 7919 test_assignment ();
a96f1c38 7920 test_compound_assignment ();
757bf1df
DM
7921 test_stack_frames ();
7922 test_get_representative_path_var ();
808f4dfe 7923 test_equality_1 ();
757bf1df
DM
7924 test_canonicalization_2 ();
7925 test_canonicalization_3 ();
8c08c983 7926 test_canonicalization_4 ();
757bf1df
DM
7927 test_state_merging ();
7928 test_constraint_merging ();
808f4dfe
DM
7929 test_widening_constraints ();
7930 test_iteration_1 ();
6969ac30 7931 test_malloc_constraints ();
808f4dfe
DM
7932 test_var ();
7933 test_array_2 ();
7934 test_mem_ref ();
7935 test_POINTER_PLUS_EXPR_then_MEM_REF ();
7936 test_malloc ();
7937 test_alloca ();
71fc4655 7938 test_involves_p ();
757bf1df
DM
7939}
7940
7941} // namespace selftest
7942
7943#endif /* CHECKING_P */
7944
75038aa6
DM
7945} // namespace ana
7946
757bf1df 7947#endif /* #if ENABLE_ANALYZER */