]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/analyzer/region-model.cc
analyzer: fix ICE on unhandled tree codes in gassign [PR96640]
[thirdparty/gcc.git] / gcc / analyzer / region-model.cc
CommitLineData
757bf1df
DM
1/* Classes for modeling the state of memory.
2 Copyright (C) 2019-2020 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tree.h"
25#include "function.h"
26#include "basic-block.h"
27#include "gimple.h"
28#include "gimple-iterator.h"
7892ff37 29#include "diagnostic-core.h"
757bf1df
DM
30#include "graphviz.h"
31#include "options.h"
32#include "cgraph.h"
33#include "tree-dfa.h"
34#include "stringpool.h"
35#include "convert.h"
36#include "target.h"
37#include "fold-const.h"
38#include "tree-pretty-print.h"
39#include "diagnostic-color.h"
40#include "diagnostic-metadata.h"
757bf1df 41#include "tristate.h"
ef7827b0 42#include "bitmap.h"
757bf1df
DM
43#include "selftest.h"
44#include "function.h"
45#include "analyzer/analyzer.h"
46#include "analyzer/analyzer-logging.h"
47#include "ordered-hash-map.h"
48#include "options.h"
49#include "cgraph.h"
50#include "cfg.h"
51#include "digraph.h"
52#include "analyzer/supergraph.h"
53#include "sbitmap.h"
808f4dfe
DM
54#include "analyzer/call-string.h"
55#include "analyzer/program-point.h"
56#include "analyzer/store.h"
757bf1df
DM
57#include "analyzer/region-model.h"
58#include "analyzer/constraint-manager.h"
59#include "diagnostic-event-id.h"
60#include "analyzer/sm.h"
61#include "diagnostic-event-id.h"
62#include "analyzer/sm.h"
63#include "analyzer/pending-diagnostic.h"
808f4dfe 64#include "analyzer/region-model-reachability.h"
757bf1df 65#include "analyzer/analyzer-selftests.h"
884d9141 66#include "stor-layout.h"
757bf1df
DM
67
68#if ENABLE_ANALYZER
69
75038aa6
DM
70namespace ana {
71
757bf1df
DM
72/* Dump T to PP in language-independent form, for debugging/logging/dumping
73 purposes. */
74
757bf1df 75void
808f4dfe 76dump_tree (pretty_printer *pp, tree t)
757bf1df 77{
808f4dfe 78 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
757bf1df
DM
79}
80
808f4dfe
DM
81/* Dump T to PP in language-independent form in quotes, for
82 debugging/logging/dumping purposes. */
757bf1df
DM
83
84void
808f4dfe 85dump_quoted_tree (pretty_printer *pp, tree t)
757bf1df 86{
808f4dfe
DM
87 pp_begin_quote (pp, pp_show_color (pp));
88 dump_tree (pp, t);
89 pp_end_quote (pp, pp_show_color (pp));
757bf1df
DM
90}
91
808f4dfe
DM
92/* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
93 calls within other pp_printf calls.
757bf1df 94
808f4dfe
DM
95 default_tree_printer handles 'T' and some other codes by calling
96 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
97 dump_generic_node calls pp_printf in various places, leading to
98 garbled output.
757bf1df 99
808f4dfe
DM
100 Ideally pp_printf could be made to be reentrant, but in the meantime
101 this function provides a workaround. */
6969ac30
DM
102
103void
808f4dfe 104print_quoted_type (pretty_printer *pp, tree t)
6969ac30 105{
808f4dfe
DM
106 pp_begin_quote (pp, pp_show_color (pp));
107 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
108 pp_end_quote (pp, pp_show_color (pp));
6969ac30
DM
109}
110
757bf1df
DM
111/* class region_model. */
112
808f4dfe 113/* Ctor for region_model: construct an "empty" model. */
757bf1df 114
808f4dfe
DM
115region_model::region_model (region_model_manager *mgr)
116: m_mgr (mgr), m_store (), m_current_frame (NULL)
757bf1df 117{
808f4dfe 118 m_constraints = new constraint_manager (mgr);
757bf1df
DM
119}
120
121/* region_model's copy ctor. */
122
123region_model::region_model (const region_model &other)
808f4dfe
DM
124: m_mgr (other.m_mgr), m_store (other.m_store),
125 m_constraints (new constraint_manager (*other.m_constraints)),
126 m_current_frame (other.m_current_frame)
757bf1df 127{
757bf1df
DM
128}
129
130/* region_model's dtor. */
131
132region_model::~region_model ()
133{
134 delete m_constraints;
135}
136
137/* region_model's assignment operator. */
138
139region_model &
140region_model::operator= (const region_model &other)
141{
808f4dfe
DM
142 /* m_mgr is const. */
143 gcc_assert (m_mgr == other.m_mgr);
757bf1df 144
808f4dfe 145 m_store = other.m_store;
757bf1df
DM
146
147 delete m_constraints;
808f4dfe 148 m_constraints = new constraint_manager (*other.m_constraints);
757bf1df 149
808f4dfe 150 m_current_frame = other.m_current_frame;
757bf1df
DM
151
152 return *this;
153}
154
155/* Equality operator for region_model.
156
808f4dfe
DM
157 Amongst other things this directly compares the stores and the constraint
158 managers, so for this to be meaningful both this and OTHER should
757bf1df
DM
159 have been canonicalized. */
160
161bool
162region_model::operator== (const region_model &other) const
163{
808f4dfe
DM
164 /* We can only compare instances that use the same manager. */
165 gcc_assert (m_mgr == other.m_mgr);
757bf1df 166
808f4dfe 167 if (m_store != other.m_store)
757bf1df
DM
168 return false;
169
170 if (*m_constraints != *other.m_constraints)
171 return false;
172
808f4dfe
DM
173 if (m_current_frame != other.m_current_frame)
174 return false;
757bf1df
DM
175
176 gcc_checking_assert (hash () == other.hash ());
177
178 return true;
179}
180
181/* Generate a hash value for this region_model. */
182
183hashval_t
808f4dfe
DM
184region_model::hash () const
185{
186 hashval_t result = m_store.hash ();
187 result ^= m_constraints->hash ();
188 return result;
757bf1df
DM
189}
190
808f4dfe
DM
191/* Dump a representation of this model to PP, showing the
192 stack, the store, and any constraints.
193 Use SIMPLE to control how svalues and regions are printed. */
757bf1df
DM
194
195void
808f4dfe
DM
196region_model::dump_to_pp (pretty_printer *pp, bool simple,
197 bool multiline) const
757bf1df 198{
808f4dfe
DM
199 /* Dump stack. */
200 pp_printf (pp, "stack depth: %i", get_stack_depth ());
201 if (multiline)
202 pp_newline (pp);
203 else
204 pp_string (pp, " {");
205 for (const frame_region *iter_frame = m_current_frame; iter_frame;
206 iter_frame = iter_frame->get_calling_frame ())
207 {
208 if (multiline)
209 pp_string (pp, " ");
210 else if (iter_frame != m_current_frame)
211 pp_string (pp, ", ");
212 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
213 iter_frame->dump_to_pp (pp, simple);
214 if (multiline)
215 pp_newline (pp);
216 }
217 if (!multiline)
218 pp_string (pp, "}");
219
220 /* Dump store. */
221 if (!multiline)
222 pp_string (pp, ", {");
223 m_store.dump_to_pp (pp, simple, multiline,
224 m_mgr->get_store_manager ());
225 if (!multiline)
226 pp_string (pp, "}");
227
228 /* Dump constraints. */
229 pp_string (pp, "constraint_manager:");
230 if (multiline)
231 pp_newline (pp);
232 else
233 pp_string (pp, " {");
234 m_constraints->dump_to_pp (pp, multiline);
235 if (!multiline)
236 pp_string (pp, "}");
237}
757bf1df 238
808f4dfe 239/* Dump a representation of this model to FILE. */
757bf1df 240
808f4dfe
DM
241void
242region_model::dump (FILE *fp, bool simple, bool multiline) const
243{
244 pretty_printer pp;
245 pp_format_decoder (&pp) = default_tree_printer;
246 pp_show_color (&pp) = pp_show_color (global_dc->printer);
247 pp.buffer->stream = fp;
248 dump_to_pp (&pp, simple, multiline);
249 pp_newline (&pp);
250 pp_flush (&pp);
757bf1df
DM
251}
252
808f4dfe 253/* Dump a multiline representation of this model to stderr. */
757bf1df 254
808f4dfe
DM
255DEBUG_FUNCTION void
256region_model::dump (bool simple) const
257{
258 dump (stderr, simple, true);
259}
757bf1df 260
808f4dfe 261/* Dump a multiline representation of this model to stderr. */
757bf1df 262
808f4dfe
DM
263DEBUG_FUNCTION void
264region_model::debug () const
757bf1df 265{
808f4dfe 266 dump (true);
757bf1df
DM
267}
268
808f4dfe
DM
269/* Canonicalize the store and constraints, to maximize the chance of
270 equality between region_model instances. */
757bf1df
DM
271
272void
808f4dfe 273region_model::canonicalize ()
757bf1df 274{
808f4dfe
DM
275 m_store.canonicalize (m_mgr->get_store_manager ());
276 m_constraints->canonicalize ();
757bf1df
DM
277}
278
279/* Return true if this region_model is in canonical form. */
280
281bool
282region_model::canonicalized_p () const
283{
284 region_model copy (*this);
808f4dfe 285 copy.canonicalize ();
757bf1df
DM
286 return *this == copy;
287}
288
808f4dfe
DM
289/* See the comment for store::loop_replay_fixup. */
290
291void
292region_model::loop_replay_fixup (const region_model *dst_state)
293{
294 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
295}
296
757bf1df
DM
297/* A subclass of pending_diagnostic for complaining about uses of
298 poisoned values. */
299
300class poisoned_value_diagnostic
301: public pending_diagnostic_subclass<poisoned_value_diagnostic>
302{
303public:
304 poisoned_value_diagnostic (tree expr, enum poison_kind pkind)
305 : m_expr (expr), m_pkind (pkind)
306 {}
307
308 const char *get_kind () const FINAL OVERRIDE { return "poisoned_value_diagnostic"; }
309
310 bool operator== (const poisoned_value_diagnostic &other) const
311 {
312 return m_expr == other.m_expr;
313 }
314
315 bool emit (rich_location *rich_loc) FINAL OVERRIDE
316 {
317 switch (m_pkind)
318 {
319 default:
320 gcc_unreachable ();
757bf1df
DM
321 case POISON_KIND_FREED:
322 {
323 diagnostic_metadata m;
324 m.add_cwe (416); /* "CWE-416: Use After Free". */
6c8e5844
DM
325 return warning_meta (rich_loc, m,
326 OPT_Wanalyzer_use_after_free,
327 "use after %<free%> of %qE",
328 m_expr);
757bf1df
DM
329 }
330 break;
331 case POISON_KIND_POPPED_STACK:
332 {
757bf1df 333 /* TODO: which CWE? */
808f4dfe
DM
334 return warning_at
335 (rich_loc,
336 OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame,
337 "dereferencing pointer %qE to within stale stack frame",
338 m_expr);
757bf1df
DM
339 }
340 break;
341 }
342 }
343
344 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
345 {
346 switch (m_pkind)
347 {
348 default:
349 gcc_unreachable ();
757bf1df
DM
350 case POISON_KIND_FREED:
351 return ev.formatted_print ("use after %<free%> of %qE here",
352 m_expr);
353 case POISON_KIND_POPPED_STACK:
354 return ev.formatted_print
808f4dfe 355 ("dereferencing pointer %qE to within stale stack frame",
757bf1df
DM
356 m_expr);
357 }
358 }
359
360private:
361 tree m_expr;
362 enum poison_kind m_pkind;
363};
364
808f4dfe
DM
365/* If ASSIGN is a stmt that can be modelled via
366 set_value (lhs_reg, SVALUE, CTXT)
367 for some SVALUE, get the SVALUE.
368 Otherwise return NULL. */
757bf1df 369
808f4dfe
DM
370const svalue *
371region_model::get_gassign_result (const gassign *assign,
372 region_model_context *ctxt)
757bf1df
DM
373{
374 tree lhs = gimple_assign_lhs (assign);
375 tree rhs1 = gimple_assign_rhs1 (assign);
757bf1df
DM
376 enum tree_code op = gimple_assign_rhs_code (assign);
377 switch (op)
378 {
379 default:
808f4dfe 380 return NULL;
757bf1df
DM
381
382 case POINTER_PLUS_EXPR:
383 {
384 /* e.g. "_1 = a_10(D) + 12;" */
385 tree ptr = rhs1;
386 tree offset = gimple_assign_rhs2 (assign);
387
808f4dfe
DM
388 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
389 const svalue *offset_sval = get_rvalue (offset, ctxt);
390 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
391 is an integer of type sizetype". */
392 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
393
394 const svalue *sval_binop
395 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
396 ptr_sval, offset_sval);
397 return sval_binop;
757bf1df
DM
398 }
399 break;
400
401 case POINTER_DIFF_EXPR:
402 {
403 /* e.g. "_1 = p_2(D) - q_3(D);". */
808f4dfe
DM
404 tree rhs2 = gimple_assign_rhs2 (assign);
405 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
406 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 407
808f4dfe 408 // TODO: perhaps fold to zero if they're known to be equal?
757bf1df 409
808f4dfe
DM
410 const svalue *sval_binop
411 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
412 rhs1_sval, rhs2_sval);
413 return sval_binop;
757bf1df
DM
414 }
415 break;
416
808f4dfe
DM
417 /* Assignments of the form
418 set_value (lvalue (LHS), rvalue (EXPR))
419 for various EXPR.
420 We already have the lvalue for the LHS above, as "lhs_reg". */
421 case ADDR_EXPR: /* LHS = &RHS; */
422 case BIT_FIELD_REF:
423 case COMPONENT_REF: /* LHS = op0.op1; */
757bf1df 424 case MEM_REF:
757bf1df 425 case REAL_CST:
808f4dfe
DM
426 case COMPLEX_CST:
427 case VECTOR_CST:
757bf1df
DM
428 case INTEGER_CST:
429 case ARRAY_REF:
808f4dfe
DM
430 case SSA_NAME: /* LHS = VAR; */
431 case VAR_DECL: /* LHS = VAR; */
432 case PARM_DECL:/* LHS = VAR; */
433 case REALPART_EXPR:
434 case IMAGPART_EXPR:
435 return get_rvalue (rhs1, ctxt);
436
437 case ABS_EXPR:
438 case ABSU_EXPR:
439 case CONJ_EXPR:
440 case BIT_NOT_EXPR:
757bf1df
DM
441 case FIX_TRUNC_EXPR:
442 case FLOAT_EXPR:
808f4dfe 443 case NEGATE_EXPR:
757bf1df 444 case NOP_EXPR:
808f4dfe 445 case VIEW_CONVERT_EXPR:
757bf1df 446 {
808f4dfe
DM
447 /* Unary ops. */
448 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
449 const svalue *sval_unaryop
450 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
451 return sval_unaryop;
757bf1df 452 }
757bf1df
DM
453
454 case EQ_EXPR:
455 case GE_EXPR:
456 case LE_EXPR:
457 case NE_EXPR:
458 case GT_EXPR:
459 case LT_EXPR:
808f4dfe
DM
460 case UNORDERED_EXPR:
461 case ORDERED_EXPR:
757bf1df
DM
462 {
463 tree rhs2 = gimple_assign_rhs2 (assign);
464
465 // TODO: constraints between svalues
808f4dfe
DM
466 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
467 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 468
808f4dfe 469 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
757bf1df 470 if (t.is_known ())
808f4dfe
DM
471 return get_rvalue (t.is_true ()
472 ? boolean_true_node
473 : boolean_false_node,
474 ctxt);
757bf1df 475 else
808f4dfe
DM
476 {
477 // TODO: symbolic value for binop
478 const svalue *sval_binop
479 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
480 rhs1_sval, rhs2_sval);
481 return sval_binop;
482 }
757bf1df
DM
483 }
484 break;
485
486 case PLUS_EXPR:
487 case MINUS_EXPR:
488 case MULT_EXPR:
808f4dfe 489 case MULT_HIGHPART_EXPR:
757bf1df 490 case TRUNC_DIV_EXPR:
808f4dfe
DM
491 case CEIL_DIV_EXPR:
492 case FLOOR_DIV_EXPR:
493 case ROUND_DIV_EXPR:
757bf1df 494 case TRUNC_MOD_EXPR:
808f4dfe
DM
495 case CEIL_MOD_EXPR:
496 case FLOOR_MOD_EXPR:
497 case ROUND_MOD_EXPR:
498 case RDIV_EXPR:
499 case EXACT_DIV_EXPR:
757bf1df
DM
500 case LSHIFT_EXPR:
501 case RSHIFT_EXPR:
808f4dfe
DM
502 case LROTATE_EXPR:
503 case RROTATE_EXPR:
757bf1df
DM
504 case BIT_IOR_EXPR:
505 case BIT_XOR_EXPR:
506 case BIT_AND_EXPR:
507 case MIN_EXPR:
508 case MAX_EXPR:
808f4dfe 509 case COMPLEX_EXPR:
757bf1df
DM
510 {
511 /* Binary ops. */
512 tree rhs2 = gimple_assign_rhs2 (assign);
513
808f4dfe
DM
514 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
515 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 516
808f4dfe
DM
517 const svalue *sval_binop
518 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
519 rhs1_sval, rhs2_sval);
520 return sval_binop;
521 }
522
523 /* Vector expressions. In theory we could implement these elementwise,
524 but for now, simply return unknown values. */
525 case VEC_DUPLICATE_EXPR:
526 case VEC_SERIES_EXPR:
527 case VEC_COND_EXPR:
528 case VEC_PERM_EXPR:
1b0be822
DM
529 case VEC_WIDEN_MULT_HI_EXPR:
530 case VEC_WIDEN_MULT_LO_EXPR:
531 case VEC_WIDEN_MULT_EVEN_EXPR:
532 case VEC_WIDEN_MULT_ODD_EXPR:
533 case VEC_UNPACK_HI_EXPR:
534 case VEC_UNPACK_LO_EXPR:
535 case VEC_UNPACK_FLOAT_HI_EXPR:
536 case VEC_UNPACK_FLOAT_LO_EXPR:
537 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
538 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
539 case VEC_PACK_TRUNC_EXPR:
540 case VEC_PACK_SAT_EXPR:
541 case VEC_PACK_FIX_TRUNC_EXPR:
542 case VEC_PACK_FLOAT_EXPR:
543 case VEC_WIDEN_LSHIFT_HI_EXPR:
544 case VEC_WIDEN_LSHIFT_LO_EXPR:
808f4dfe
DM
545 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
546 }
547}
548
549/* Update this model for the ASSIGN stmt, using CTXT to report any
550 diagnostics. */
551
552void
553region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
554{
555 tree lhs = gimple_assign_lhs (assign);
556 tree rhs1 = gimple_assign_rhs1 (assign);
557
558 const region *lhs_reg = get_lvalue (lhs, ctxt);
559
560 /* Most assignments are handled by:
561 set_value (lhs_reg, SVALUE, CTXT)
562 for some SVALUE. */
563 if (const svalue *sval = get_gassign_result (assign, ctxt))
564 {
565 set_value (lhs_reg, sval, ctxt);
566 return;
567 }
568
569 enum tree_code op = gimple_assign_rhs_code (assign);
570 switch (op)
571 {
572 default:
573 {
1b0be822 574 if (0)
808f4dfe
DM
575 sorry_at (assign->location, "unhandled assignment op: %qs",
576 get_tree_code_name (op));
1b0be822
DM
577 const svalue *unknown_sval
578 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
579 set_value (lhs_reg, unknown_sval, ctxt);
757bf1df
DM
580 }
581 break;
582
808f4dfe
DM
583 case CONSTRUCTOR:
584 {
585 if (TREE_CLOBBER_P (rhs1))
586 {
587 /* e.g. "x ={v} {CLOBBER};" */
588 clobber_region (lhs_reg);
589 }
590 else
591 {
592 /* Any CONSTRUCTOR that survives to this point is either
593 just a zero-init of everything, or a vector. */
594 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
595 zero_fill_region (lhs_reg);
596 unsigned ix;
597 tree index;
598 tree val;
599 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
600 {
601 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
602 if (!index)
603 index = build_int_cst (integer_type_node, ix);
604 gcc_assert (TREE_CODE (index) == INTEGER_CST);
605 const svalue *index_sval
606 = m_mgr->get_or_create_constant_svalue (index);
607 gcc_assert (index_sval);
608 const region *sub_reg
609 = m_mgr->get_element_region (lhs_reg,
610 TREE_TYPE (val),
611 index_sval);
612 const svalue *val_sval = get_rvalue (val, ctxt);
613 set_value (sub_reg, val_sval, ctxt);
614 }
615 }
616 }
617 break;
618
619 case STRING_CST:
757bf1df 620 {
808f4dfe
DM
621 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
622 /* Add a default binding, rather than a direct one, so that array
623 access will "inherit" the individual chars. */
624 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
625 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
626 BK_default);
757bf1df
DM
627 }
628 break;
629 }
630}
631
632/* Update this model for the CALL stmt, using CTXT to report any
633 diagnostics - the first half.
634
635 Updates to the region_model that should be made *before* sm-states
636 are updated are done here; other updates to the region_model are done
ef7827b0 637 in region_model::on_call_post.
757bf1df 638
ef7827b0
DM
639 Return true if the function call has unknown side effects (it wasn't
640 recognized and we don't have a body for it, or are unable to tell which
641 fndecl it is). */
642
643bool
757bf1df
DM
644region_model::on_call_pre (const gcall *call, region_model_context *ctxt)
645{
ef7827b0
DM
646 bool unknown_side_effects = false;
647
757bf1df
DM
648 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
649 {
808f4dfe
DM
650 call_details cd (call, this, ctxt);
651
652 /* The various impl_call_* member functions are implemented
653 in region-model-impl-calls.cc.
654 Having them split out into separate functions makes it easier
655 to put breakpoints on the handling of specific functions. */
757bf1df 656 if (is_named_call_p (callee_fndecl, "malloc", call, 1))
808f4dfe
DM
657 return impl_call_malloc (cd);
658 else if (is_named_call_p (callee_fndecl, "calloc", call, 2))
659 return impl_call_calloc (cd);
757bf1df 660 else if (is_named_call_p (callee_fndecl, "__builtin_alloca", call, 1))
808f4dfe 661 return impl_call_alloca (cd);
e516294a
DM
662 else if (gimple_call_builtin_p (call, BUILT_IN_EXPECT)
663 || gimple_call_builtin_p (call, BUILT_IN_EXPECT_WITH_PROBABILITY)
664 || gimple_call_internal_p (call, IFN_BUILTIN_EXPECT))
808f4dfe
DM
665 return impl_call_builtin_expect (cd);
666 else if (is_named_call_p (callee_fndecl, "memset", call, 3)
667 || gimple_call_builtin_p (call, BUILT_IN_MEMSET))
e516294a 668 {
808f4dfe 669 impl_call_memset (cd);
e516294a
DM
670 return false;
671 }
757bf1df
DM
672 else if (is_named_call_p (callee_fndecl, "strlen", call, 1))
673 {
808f4dfe
DM
674 if (impl_call_strlen (cd))
675 return false;
757bf1df 676 }
ef7827b0 677 else if (!fndecl_has_gimple_body_p (callee_fndecl)
808f4dfe
DM
678 && !DECL_PURE_P (callee_fndecl)
679 && !fndecl_built_in_p (callee_fndecl))
ef7827b0 680 unknown_side_effects = true;
757bf1df 681 }
ef7827b0
DM
682 else
683 unknown_side_effects = true;
757bf1df 684
808f4dfe
DM
685 /* Some of the above cases update the lhs of the call based on the
686 return value. If we get here, it hasn't been done yet, so do that
687 now. */
688 if (tree lhs = gimple_call_lhs (call))
689 {
690 const region *lhs_region = get_lvalue (lhs, ctxt);
691 if (TREE_CODE (lhs) == SSA_NAME)
692 {
693 const svalue *sval = m_mgr->get_or_create_initial_value (lhs_region);
694 set_value (lhs_region, sval, ctxt);
695 }
696 }
757bf1df 697
ef7827b0 698 return unknown_side_effects;
757bf1df
DM
699}
700
701/* Update this model for the CALL stmt, using CTXT to report any
702 diagnostics - the second half.
703
704 Updates to the region_model that should be made *after* sm-states
705 are updated are done here; other updates to the region_model are done
ef7827b0
DM
706 in region_model::on_call_pre.
707
708 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
709 to purge state. */
757bf1df
DM
710
711void
ef7827b0
DM
712region_model::on_call_post (const gcall *call,
713 bool unknown_side_effects,
714 region_model_context *ctxt)
757bf1df 715{
757bf1df
DM
716 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
717 if (is_named_call_p (callee_fndecl, "free", call, 1))
718 {
808f4dfe
DM
719 call_details cd (call, this, ctxt);
720 impl_call_free (cd);
757bf1df
DM
721 return;
722 }
ef7827b0
DM
723
724 if (unknown_side_effects)
725 handle_unrecognized_call (call, ctxt);
726}
727
ef7827b0
DM
728/* Handle a call CALL to a function with unknown behavior.
729
730 Traverse the regions in this model, determining what regions are
731 reachable from pointer arguments to CALL and from global variables,
732 recursively.
733
734 Set all reachable regions to new unknown values and purge sm-state
735 from their values, and from values that point to them. */
736
737void
738region_model::handle_unrecognized_call (const gcall *call,
739 region_model_context *ctxt)
740{
741 tree fndecl = get_fndecl_for_call (call, ctxt);
742
808f4dfe 743 reachable_regions reachable_regs (&m_store, m_mgr);
ef7827b0
DM
744
745 /* Determine the reachable regions and their mutability. */
746 {
808f4dfe
DM
747 /* Add globals and regions that already escaped in previous
748 unknown calls. */
749 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
750 &reachable_regs);
ef7827b0
DM
751
752 /* Params that are pointers. */
753 tree iter_param_types = NULL_TREE;
754 if (fndecl)
755 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
756 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
757 {
758 /* Track expected param type, where available. */
759 tree param_type = NULL_TREE;
760 if (iter_param_types)
761 {
762 param_type = TREE_VALUE (iter_param_types);
763 gcc_assert (param_type);
764 iter_param_types = TREE_CHAIN (iter_param_types);
765 }
766
767 tree parm = gimple_call_arg (call, arg_idx);
808f4dfe
DM
768 const svalue *parm_sval = get_rvalue (parm, ctxt);
769 reachable_regs.handle_parm (parm_sval, param_type);
ef7827b0
DM
770 }
771 }
772
808f4dfe
DM
773 /* Purge sm-state for the svalues that were reachable,
774 both in non-mutable and mutable form. */
775 for (svalue_set::iterator iter
776 = reachable_regs.begin_reachable_svals ();
777 iter != reachable_regs.end_reachable_svals (); ++iter)
ef7827b0 778 {
808f4dfe
DM
779 const svalue *sval = (*iter);
780 ctxt->on_unknown_change (sval, false);
781 }
782 for (svalue_set::iterator iter
783 = reachable_regs.begin_mutable_svals ();
784 iter != reachable_regs.end_mutable_svals (); ++iter)
785 {
786 const svalue *sval = (*iter);
787 ctxt->on_unknown_change (sval, true);
788 }
ef7827b0 789
808f4dfe
DM
790 /* Mark any clusters that have escaped. */
791 reachable_regs.mark_escaped_clusters ();
ef7827b0 792
808f4dfe
DM
793 /* Update bindings for all clusters that have escaped, whether above,
794 or previously. */
795 m_store.on_unknown_fncall (call, m_mgr->get_store_manager ());
796}
ef7827b0 797
808f4dfe
DM
798/* Traverse the regions in this model, determining what regions are
799 reachable from the store and populating *OUT.
ef7827b0 800
808f4dfe
DM
801 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
802 for reachability (for handling return values from functions when
803 analyzing return of the only function on the stack).
804
805 Find svalues that haven't leaked. */
806
807void
808region_model::get_reachable_svalues (svalue_set *out,
809 const svalue *extra_sval)
810{
811 reachable_regions reachable_regs (&m_store, m_mgr);
812
813 /* Add globals and regions that already escaped in previous
814 unknown calls. */
815 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
816 &reachable_regs);
817
818 if (extra_sval)
819 reachable_regs.handle_sval (extra_sval);
ef7827b0 820
808f4dfe
DM
821 /* Get regions for locals that have explicitly bound values. */
822 for (store::cluster_map_t::iterator iter = m_store.begin ();
823 iter != m_store.end (); ++iter)
824 {
825 const region *base_reg = (*iter).first;
826 if (const region *parent = base_reg->get_parent_region ())
827 if (parent->get_kind () == RK_FRAME)
828 reachable_regs.add (base_reg, false);
829 }
830
831 /* Populate *OUT based on the values that were reachable. */
832 for (svalue_set::iterator iter
833 = reachable_regs.begin_reachable_svals ();
834 iter != reachable_regs.end_reachable_svals (); ++iter)
835 out->add (*iter);
757bf1df
DM
836}
837
838/* Update this model for the RETURN_STMT, using CTXT to report any
839 diagnostics. */
840
841void
842region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
843{
844 tree callee = get_current_function ()->decl;
845 tree lhs = DECL_RESULT (callee);
846 tree rhs = gimple_return_retval (return_stmt);
847
848 if (lhs && rhs)
a96f1c38 849 copy_region (get_lvalue (lhs, ctxt), get_lvalue (rhs, ctxt), ctxt);
757bf1df
DM
850}
851
342e14ff
DM
852/* Update this model for a call and return of setjmp/sigsetjmp at CALL within
853 ENODE, using CTXT to report any diagnostics.
757bf1df 854
342e14ff
DM
855 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
856 0), as opposed to any second return due to longjmp/sigsetjmp. */
757bf1df
DM
857
858void
859region_model::on_setjmp (const gcall *call, const exploded_node *enode,
860 region_model_context *ctxt)
861{
808f4dfe
DM
862 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
863 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
864 ctxt);
757bf1df 865
808f4dfe
DM
866 /* Create a setjmp_svalue for this call and store it in BUF_REG's
867 region. */
868 if (buf_reg)
757bf1df 869 {
fd9982bb 870 setjmp_record r (enode, call);
808f4dfe
DM
871 const svalue *sval
872 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
873 set_value (buf_reg, sval, ctxt);
757bf1df
DM
874 }
875
876 /* Direct calls to setjmp return 0. */
877 if (tree lhs = gimple_call_lhs (call))
878 {
879 tree zero = build_int_cst (TREE_TYPE (lhs), 0);
808f4dfe
DM
880 const svalue *new_sval = m_mgr->get_or_create_constant_svalue (zero);
881 const region *lhs_reg = get_lvalue (lhs, ctxt);
882 set_value (lhs_reg, new_sval, ctxt);
757bf1df
DM
883 }
884}
885
886/* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
887 to a "setjmp" at SETJMP_CALL where the final stack depth should be
808f4dfe
DM
888 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
889 done, and should be done by the caller. */
757bf1df
DM
890
891void
892region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
808f4dfe 893 int setjmp_stack_depth, region_model_context *ctxt)
757bf1df
DM
894{
895 /* Evaluate the val, using the frame of the "longjmp". */
896 tree fake_retval = gimple_call_arg (longjmp_call, 1);
808f4dfe 897 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
757bf1df
DM
898
899 /* Pop any frames until we reach the stack depth of the function where
900 setjmp was called. */
901 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
902 while (get_stack_depth () > setjmp_stack_depth)
808f4dfe 903 pop_frame (NULL, NULL, ctxt);
757bf1df
DM
904
905 gcc_assert (get_stack_depth () == setjmp_stack_depth);
906
907 /* Assign to LHS of "setjmp" in new_state. */
908 if (tree lhs = gimple_call_lhs (setjmp_call))
909 {
910 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
911 tree t_zero = build_int_cst (TREE_TYPE (fake_retval), 0);
808f4dfe
DM
912 const svalue *zero_sval = m_mgr->get_or_create_constant_svalue (t_zero);
913 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
757bf1df
DM
914 /* If we have 0, use 1. */
915 if (eq_zero.is_true ())
916 {
917 tree t_one = build_int_cst (TREE_TYPE (fake_retval), 1);
808f4dfe
DM
918 const svalue *one_sval
919 = m_mgr->get_or_create_constant_svalue (t_one);
920 fake_retval_sval = one_sval;
757bf1df
DM
921 }
922 else
923 {
924 /* Otherwise note that the value is nonzero. */
808f4dfe 925 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
757bf1df
DM
926 }
927
808f4dfe
DM
928 /* Decorate the return value from setjmp as being unmergeable,
929 so that we don't attempt to merge states with it as zero
930 with states in which it's nonzero, leading to a clean distinction
931 in the exploded_graph betweeen the first return and the second
932 return. */
933 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
757bf1df 934
808f4dfe
DM
935 const region *lhs_reg = get_lvalue (lhs, ctxt);
936 set_value (lhs_reg, fake_retval_sval, ctxt);
937 }
757bf1df
DM
938}
939
940/* Update this region_model for a phi stmt of the form
941 LHS = PHI <...RHS...>.
942 where RHS is for the appropriate edge. */
943
944void
8525d1f5 945region_model::handle_phi (const gphi *phi,
808f4dfe 946 tree lhs, tree rhs,
757bf1df
DM
947 region_model_context *ctxt)
948{
949 /* For now, don't bother tracking the .MEM SSA names. */
950 if (tree var = SSA_NAME_VAR (lhs))
951 if (TREE_CODE (var) == VAR_DECL)
952 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
953 return;
954
808f4dfe 955 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
757bf1df 956
808f4dfe 957 set_value (get_lvalue (lhs, ctxt), rhs_sval, ctxt);
8525d1f5
DM
958
959 if (ctxt)
960 ctxt->on_phi (phi, rhs);
757bf1df
DM
961}
962
963/* Implementation of region_model::get_lvalue; the latter adds type-checking.
964
965 Get the id of the region for PV within this region_model,
966 emitting any diagnostics to CTXT. */
967
808f4dfe 968const region *
757bf1df
DM
969region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt)
970{
971 tree expr = pv.m_tree;
972
973 gcc_assert (expr);
974
975 switch (TREE_CODE (expr))
976 {
977 default:
808f4dfe
DM
978 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
979 dump_location_t ());
757bf1df
DM
980
981 case ARRAY_REF:
982 {
983 tree array = TREE_OPERAND (expr, 0);
984 tree index = TREE_OPERAND (expr, 1);
757bf1df 985
808f4dfe
DM
986 const region *array_reg = get_lvalue (array, ctxt);
987 const svalue *index_sval = get_rvalue (index, ctxt);
988 return m_mgr->get_element_region (array_reg,
989 TREE_TYPE (TREE_TYPE (array)),
990 index_sval);
757bf1df
DM
991 }
992 break;
993
994 case MEM_REF:
995 {
996 tree ptr = TREE_OPERAND (expr, 0);
997 tree offset = TREE_OPERAND (expr, 1);
808f4dfe
DM
998 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
999 const svalue *offset_sval = get_rvalue (offset, ctxt);
1000 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
1001 return m_mgr->get_offset_region (star_ptr,
1002 TREE_TYPE (expr),
1003 offset_sval);
757bf1df
DM
1004 }
1005 break;
1006
808f4dfe
DM
1007 case FUNCTION_DECL:
1008 return m_mgr->get_region_for_fndecl (expr);
1009
1010 case LABEL_DECL:
1011 return m_mgr->get_region_for_label (expr);
1012
757bf1df
DM
1013 case VAR_DECL:
1014 /* Handle globals. */
1015 if (is_global_var (expr))
808f4dfe 1016 return m_mgr->get_region_for_global (expr);
757bf1df
DM
1017
1018 /* Fall through. */
1019
1020 case SSA_NAME:
1021 case PARM_DECL:
1022 case RESULT_DECL:
1023 {
1024 gcc_assert (TREE_CODE (expr) == SSA_NAME
1025 || TREE_CODE (expr) == PARM_DECL
1026 || TREE_CODE (expr) == VAR_DECL
1027 || TREE_CODE (expr) == RESULT_DECL);
1028
808f4dfe
DM
1029 int stack_index = pv.m_stack_depth;
1030 const frame_region *frame = get_frame_at_index (stack_index);
757bf1df 1031 gcc_assert (frame);
808f4dfe 1032 return frame->get_region_for_local (m_mgr, expr);
757bf1df
DM
1033 }
1034
1035 case COMPONENT_REF:
1036 {
1037 /* obj.field */
1038 tree obj = TREE_OPERAND (expr, 0);
1039 tree field = TREE_OPERAND (expr, 1);
808f4dfe
DM
1040 const region *obj_reg = get_lvalue (obj, ctxt);
1041 return m_mgr->get_field_region (obj_reg, field);
41a9e940
DM
1042 }
1043 break;
1044
757bf1df 1045 case STRING_CST:
808f4dfe 1046 return m_mgr->get_region_for_string (expr);
757bf1df
DM
1047 }
1048}
1049
1050/* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
1051
09bea584
DM
1052static void
1053assert_compat_types (tree src_type, tree dst_type)
1054{
1055 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
808f4dfe
DM
1056 {
1057#if CHECKING_P
1058 if (!(useless_type_conversion_p (src_type, dst_type)))
1059 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
1060#endif
1061 }
09bea584 1062}
757bf1df 1063
808f4dfe 1064/* Get the region for PV within this region_model,
757bf1df
DM
1065 emitting any diagnostics to CTXT. */
1066
808f4dfe 1067const region *
757bf1df
DM
1068region_model::get_lvalue (path_var pv, region_model_context *ctxt)
1069{
1070 if (pv.m_tree == NULL_TREE)
808f4dfe 1071 return NULL;
757bf1df 1072
808f4dfe
DM
1073 const region *result_reg = get_lvalue_1 (pv, ctxt);
1074 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
1075 return result_reg;
757bf1df
DM
1076}
1077
808f4dfe 1078/* Get the region for EXPR within this region_model (assuming the most
757bf1df
DM
1079 recent stack frame if it's a local). */
1080
808f4dfe 1081const region *
757bf1df
DM
1082region_model::get_lvalue (tree expr, region_model_context *ctxt)
1083{
1084 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
1085}
1086
1087/* Implementation of region_model::get_rvalue; the latter adds type-checking.
1088
1089 Get the value of PV within this region_model,
1090 emitting any diagnostics to CTXT. */
1091
808f4dfe 1092const svalue *
757bf1df
DM
1093region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt)
1094{
1095 gcc_assert (pv.m_tree);
1096
1097 switch (TREE_CODE (pv.m_tree))
1098 {
1099 default:
808f4dfe 1100 gcc_unreachable ();
757bf1df
DM
1101
1102 case ADDR_EXPR:
1103 {
1104 /* "&EXPR". */
1105 tree expr = pv.m_tree;
1106 tree op0 = TREE_OPERAND (expr, 0);
808f4dfe
DM
1107 const region *expr_reg = get_lvalue (op0, ctxt);
1108 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
757bf1df
DM
1109 }
1110 break;
1111
808f4dfe
DM
1112 case BIT_FIELD_REF:
1113 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
1114
1115 case SSA_NAME:
1116 case VAR_DECL:
1117 case PARM_DECL:
1118 case RESULT_DECL:
757bf1df
DM
1119 case ARRAY_REF:
1120 {
808f4dfe
DM
1121 const region *element_reg = get_lvalue (pv, ctxt);
1122 return get_store_value (element_reg);
757bf1df
DM
1123 }
1124
808f4dfe
DM
1125 case REALPART_EXPR:
1126 case IMAGPART_EXPR:
1127 case VIEW_CONVERT_EXPR:
1128 {
1129 tree expr = pv.m_tree;
1130 tree arg = TREE_OPERAND (expr, 0);
1131 const svalue *arg_sval = get_rvalue (arg, ctxt);
1132 const svalue *sval_unaryop
1133 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
1134 arg_sval);
1135 return sval_unaryop;
1136 };
1137
757bf1df
DM
1138 case INTEGER_CST:
1139 case REAL_CST:
808f4dfe
DM
1140 case COMPLEX_CST:
1141 case VECTOR_CST:
757bf1df 1142 case STRING_CST:
808f4dfe
DM
1143 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
1144
1145 case POINTER_PLUS_EXPR:
1146 {
1147 tree expr = pv.m_tree;
1148 tree ptr = TREE_OPERAND (expr, 0);
1149 tree offset = TREE_OPERAND (expr, 1);
1150 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
1151 const svalue *offset_sval = get_rvalue (offset, ctxt);
1152 const svalue *sval_binop
1153 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
1154 ptr_sval, offset_sval);
1155 return sval_binop;
1156 }
1157
1158 /* Binary ops. */
1159 case PLUS_EXPR:
1160 case MULT_EXPR:
1161 {
1162 tree expr = pv.m_tree;
1163 tree arg0 = TREE_OPERAND (expr, 0);
1164 tree arg1 = TREE_OPERAND (expr, 1);
1165 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
1166 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
1167 const svalue *sval_binop
1168 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
1169 arg0_sval, arg1_sval);
1170 return sval_binop;
1171 }
757bf1df
DM
1172
1173 case COMPONENT_REF:
1174 case MEM_REF:
757bf1df 1175 {
808f4dfe
DM
1176 const region *ref_reg = get_lvalue (pv, ctxt);
1177 return get_store_value (ref_reg);
757bf1df
DM
1178 }
1179 }
1180}
1181
1182/* Get the value of PV within this region_model,
1183 emitting any diagnostics to CTXT. */
1184
808f4dfe 1185const svalue *
757bf1df
DM
1186region_model::get_rvalue (path_var pv, region_model_context *ctxt)
1187{
1188 if (pv.m_tree == NULL_TREE)
808f4dfe 1189 return NULL;
757bf1df 1190
808f4dfe 1191 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
757bf1df 1192
808f4dfe
DM
1193 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
1194
1195 return result_sval;
757bf1df
DM
1196}
1197
1198/* Get the value of EXPR within this region_model (assuming the most
1199 recent stack frame if it's a local). */
1200
808f4dfe 1201const svalue *
757bf1df
DM
1202region_model::get_rvalue (tree expr, region_model_context *ctxt)
1203{
1204 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
1205}
1206
808f4dfe
DM
1207/* Get a value for REG, looking it up in the store, or otherwise falling
1208 back to "initial" or "unknown" values. */
757bf1df 1209
808f4dfe
DM
1210const svalue *
1211region_model::get_store_value (const region *reg) const
757bf1df 1212{
2867118d
DM
1213 /* Special-case: handle var_decls in the constant pool. */
1214 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
1215 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
1216 return sval;
1217
808f4dfe
DM
1218 const svalue *sval
1219 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
1220 if (sval)
757bf1df 1221 {
808f4dfe
DM
1222 if (reg->get_type ())
1223 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
1224 return sval;
757bf1df 1225 }
757bf1df 1226
808f4dfe
DM
1227 /* Special-case: read at a constant index within a STRING_CST. */
1228 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
1229 if (tree byte_offset_cst
1230 = offset_reg->get_byte_offset ()->maybe_get_constant ())
1231 if (const string_region *str_reg
1232 = reg->get_parent_region ()->dyn_cast_string_region ())
757bf1df 1233 {
808f4dfe
DM
1234 tree string_cst = str_reg->get_string_cst ();
1235 if (const svalue *char_sval
1236 = m_mgr->maybe_get_char_from_string_cst (string_cst,
1237 byte_offset_cst))
1238 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
757bf1df 1239 }
757bf1df 1240
808f4dfe
DM
1241 /* Special-case: read the initial char of a STRING_CST. */
1242 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
1243 if (const string_region *str_reg
1244 = cast_reg->get_original_region ()->dyn_cast_string_region ())
1245 {
1246 tree string_cst = str_reg->get_string_cst ();
1247 tree byte_offset_cst = build_int_cst (integer_type_node, 0);
1248 if (const svalue *char_sval
1249 = m_mgr->maybe_get_char_from_string_cst (string_cst,
1250 byte_offset_cst))
1251 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
1252 }
757bf1df 1253
808f4dfe
DM
1254 /* Otherwise we implicitly have the initial value of the region
1255 (if the cluster had been touched, binding_cluster::get_any_binding,
1256 would have returned UNKNOWN, and we would already have returned
1257 that above). */
757bf1df 1258
808f4dfe
DM
1259 /* Special-case: to avoid having to explicitly update all previously
1260 untracked globals when calling an unknown fn, we instead change
1261 the default here so we implicitly have an unknown value for such
1262 regions. */
1263 if (m_store.called_unknown_fn_p ())
1264 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
1265 == RK_GLOBALS)
1266 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
757bf1df 1267
808f4dfe 1268 return m_mgr->get_or_create_initial_value (reg);
757bf1df
DM
1269}
1270
808f4dfe
DM
1271/* Return false if REG does not exist, true if it may do.
1272 This is for detecting regions within the stack that don't exist anymore
1273 after frames are popped. */
757bf1df 1274
808f4dfe
DM
1275bool
1276region_model::region_exists_p (const region *reg) const
757bf1df 1277{
808f4dfe
DM
1278 /* If within a stack frame, check that the stack frame is live. */
1279 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
757bf1df 1280 {
808f4dfe
DM
1281 /* Check that the current frame is the enclosing frame, or is called
1282 by it. */
1283 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
1284 iter_frame = iter_frame->get_calling_frame ())
1285 if (iter_frame == enclosing_frame)
1286 return true;
1287 return false;
757bf1df 1288 }
808f4dfe
DM
1289
1290 return true;
757bf1df
DM
1291}
1292
808f4dfe
DM
1293/* Get a region for referencing PTR_SVAL, creating a region if need be, and
1294 potentially generating warnings via CTXT.
1295 PTR_TREE if non-NULL can be used when emitting diagnostics. */
757bf1df 1296
808f4dfe
DM
1297const region *
1298region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
1299 region_model_context *ctxt)
757bf1df 1300{
808f4dfe 1301 gcc_assert (ptr_sval);
757bf1df 1302
808f4dfe 1303 switch (ptr_sval->get_kind ())
757bf1df 1304 {
808f4dfe
DM
1305 default:
1306 gcc_unreachable ();
1307
757bf1df
DM
1308 case SK_REGION:
1309 {
808f4dfe
DM
1310 const region_svalue *region_sval
1311 = as_a <const region_svalue *> (ptr_sval);
757bf1df
DM
1312 return region_sval->get_pointee ();
1313 }
1314
808f4dfe
DM
1315 case SK_BINOP:
1316 {
1317 const binop_svalue *binop_sval
1318 = as_a <const binop_svalue *> (ptr_sval);
1319 switch (binop_sval->get_op ())
1320 {
1321 case POINTER_PLUS_EXPR:
1322 {
1323 /* If we have a symbolic value expressing pointer arithmentic,
1324 try to convert it to a suitable region. */
1325 const region *parent_region
1326 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
1327 const svalue *offset = binop_sval->get_arg1 ();
1328 tree type= TREE_TYPE (ptr_sval->get_type ());
1329 return m_mgr->get_offset_region (parent_region, type, offset);
1330 }
1331 default:
1332 goto create_symbolic_region;
1333 }
1334 }
1335
757bf1df 1336 case SK_CONSTANT:
808f4dfe
DM
1337 case SK_INITIAL:
1338 case SK_UNARYOP:
1339 case SK_SUB:
1340 case SK_WIDENING:
1341 case SK_CONJURED:
757bf1df
DM
1342 goto create_symbolic_region;
1343
1344 case SK_POISONED:
1345 {
1346 if (ctxt)
808f4dfe
DM
1347 {
1348 tree ptr = get_representative_tree (ptr_sval);
1349 /* If we can't get a representative tree for PTR_SVAL
1350 (e.g. if it hasn't been bound into the store), then
1351 fall back on PTR_TREE, if non-NULL. */
1352 if (!ptr)
1353 ptr = ptr_tree;
1354 if (ptr)
1355 {
1356 const poisoned_svalue *poisoned_sval
1357 = as_a <const poisoned_svalue *> (ptr_sval);
1358 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
1359 ctxt->warn (new poisoned_value_diagnostic (ptr, pkind));
1360 }
1361 }
757bf1df
DM
1362 goto create_symbolic_region;
1363 }
1364
1365 case SK_UNKNOWN:
1366 {
1367 create_symbolic_region:
808f4dfe 1368 return m_mgr->get_symbolic_region (ptr_sval);
757bf1df
DM
1369 }
1370
1371 case SK_SETJMP:
1372 goto create_symbolic_region;
1373 }
1374
1375 gcc_unreachable ();
1376}
1377
808f4dfe
DM
1378/* Set the value of the region given by LHS_REG to the value given
1379 by RHS_SVAL. */
757bf1df 1380
808f4dfe
DM
1381void
1382region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
1383 region_model_context */*ctxt*/)
757bf1df 1384{
808f4dfe
DM
1385 gcc_assert (lhs_reg);
1386 gcc_assert (rhs_sval);
1387
1388 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
1389 BK_direct);
757bf1df
DM
1390}
1391
808f4dfe 1392/* Set the value of the region given by LHS to the value given by RHS. */
757bf1df
DM
1393
1394void
808f4dfe 1395region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
757bf1df 1396{
808f4dfe
DM
1397 const region *lhs_reg = get_lvalue (lhs, ctxt);
1398 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
1399 gcc_assert (lhs_reg);
1400 gcc_assert (rhs_sval);
1401 set_value (lhs_reg, rhs_sval, ctxt);
757bf1df
DM
1402}
1403
808f4dfe 1404/* Remove all bindings overlapping REG within the store. */
884d9141
DM
1405
1406void
808f4dfe
DM
1407region_model::clobber_region (const region *reg)
1408{
1409 m_store.clobber_region (m_mgr->get_store_manager(), reg);
1410}
1411
1412/* Remove any bindings for REG within the store. */
1413
1414void
1415region_model::purge_region (const region *reg)
1416{
1417 m_store.purge_region (m_mgr->get_store_manager(), reg);
1418}
1419
1420/* Zero-fill REG. */
1421
1422void
1423region_model::zero_fill_region (const region *reg)
1424{
1425 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
1426}
1427
1428/* Mark REG as having unknown content. */
1429
1430void
1431region_model::mark_region_as_unknown (const region *reg)
884d9141 1432{
808f4dfe 1433 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg);
884d9141
DM
1434}
1435
808f4dfe 1436/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
757bf1df
DM
1437 this model. */
1438
1439tristate
808f4dfe
DM
1440region_model::eval_condition (const svalue *lhs,
1441 enum tree_code op,
1442 const svalue *rhs) const
757bf1df 1443{
e978955d
DM
1444 /* For now, make no attempt to capture constraints on floating-point
1445 values. */
1446 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
1447 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
1448 return tristate::unknown ();
1449
808f4dfe 1450 tristate ts = eval_condition_without_cm (lhs, op, rhs);
757bf1df
DM
1451 if (ts.is_known ())
1452 return ts;
1453
1454 /* Otherwise, try constraints. */
808f4dfe 1455 return m_constraints->eval_condition (lhs, op, rhs);
757bf1df
DM
1456}
1457
808f4dfe 1458/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
757bf1df
DM
1459 this model, without resorting to the constraint_manager.
1460
1461 This is exposed so that impl_region_model_context::on_state_leak can
1462 check for equality part-way through region_model::purge_unused_svalues
1463 without risking creating new ECs. */
1464
1465tristate
808f4dfe
DM
1466region_model::eval_condition_without_cm (const svalue *lhs,
1467 enum tree_code op,
1468 const svalue *rhs) const
757bf1df 1469{
757bf1df
DM
1470 gcc_assert (lhs);
1471 gcc_assert (rhs);
1472
1473 /* See what we know based on the values. */
808f4dfe
DM
1474
1475 /* For now, make no attempt to capture constraints on floating-point
1476 values. */
1477 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
1478 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
1479 return tristate::unknown ();
1480
1481 /* Unwrap any unmergeable values. */
1482 lhs = lhs->unwrap_any_unmergeable ();
1483 rhs = rhs->unwrap_any_unmergeable ();
1484
1485 if (lhs == rhs)
757bf1df 1486 {
808f4dfe
DM
1487 /* If we have the same svalue, then we have equality
1488 (apart from NaN-handling).
1489 TODO: should this definitely be the case for poisoned values? */
1490 /* Poisoned and unknown values are "unknowable". */
1491 if (lhs->get_kind () == SK_POISONED
1492 || lhs->get_kind () == SK_UNKNOWN)
1493 return tristate::TS_UNKNOWN;
e978955d 1494
808f4dfe 1495 switch (op)
757bf1df 1496 {
808f4dfe
DM
1497 case EQ_EXPR:
1498 case GE_EXPR:
1499 case LE_EXPR:
1500 return tristate::TS_TRUE;
07c86323 1501
808f4dfe
DM
1502 case NE_EXPR:
1503 case GT_EXPR:
1504 case LT_EXPR:
1505 return tristate::TS_FALSE;
1506
1507 default:
1508 /* For other ops, use the logic below. */
1509 break;
757bf1df 1510 }
808f4dfe 1511 }
757bf1df 1512
808f4dfe
DM
1513 /* If we have a pair of region_svalues, compare them. */
1514 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
1515 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
1516 {
1517 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
1518 if (res.is_known ())
1519 return res;
1520 /* Otherwise, only known through constraints. */
1521 }
757bf1df 1522
808f4dfe
DM
1523 /* If we have a pair of constants, compare them. */
1524 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
1525 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
1526 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
757bf1df 1527
808f4dfe
DM
1528 /* Handle comparison of a region_svalue against zero. */
1529
1530 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
1531 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
1532 if (zerop (cst_rhs->get_constant ()))
1533 {
1534 /* A region_svalue is a non-NULL pointer, except in certain
1535 special cases (see the comment for region::non_null_p. */
1536 const region *pointee = ptr->get_pointee ();
1537 if (pointee->non_null_p ())
757bf1df 1538 {
808f4dfe 1539 switch (op)
757bf1df 1540 {
808f4dfe
DM
1541 default:
1542 gcc_unreachable ();
1543
1544 case EQ_EXPR:
1545 case GE_EXPR:
1546 case LE_EXPR:
1547 return tristate::TS_FALSE;
1548
1549 case NE_EXPR:
1550 case GT_EXPR:
1551 case LT_EXPR:
1552 return tristate::TS_TRUE;
757bf1df
DM
1553 }
1554 }
808f4dfe
DM
1555 }
1556
1557 /* Handle rejection of equality for comparisons of the initial values of
1558 "external" values (such as params) with the address of locals. */
1559 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
1560 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
1561 {
1562 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
1563 if (res.is_known ())
1564 return res;
1565 }
1566 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
1567 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
1568 {
1569 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
1570 if (res.is_known ())
1571 return res;
1572 }
1573
1574 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
1575 if (tree rhs_cst = rhs->maybe_get_constant ())
1576 {
1577 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
1578 if (res.is_known ())
1579 return res;
1580 }
1581
1582 return tristate::TS_UNKNOWN;
1583}
1584
1585/* Subroutine of region_model::eval_condition_without_cm, for rejecting
1586 equality of INIT_VAL(PARM) with &LOCAL. */
1587
1588tristate
1589region_model::compare_initial_and_pointer (const initial_svalue *init,
1590 const region_svalue *ptr) const
1591{
1592 const region *pointee = ptr->get_pointee ();
1593
1594 /* If we have a pointer to something within a stack frame, it can't be the
1595 initial value of a param. */
1596 if (pointee->maybe_get_frame_region ())
1597 {
1598 const region *reg = init->get_region ();
1599 if (tree reg_decl = reg->maybe_get_decl ())
1600 if (TREE_CODE (reg_decl) == SSA_NAME)
1601 {
1602 tree ssa_name = reg_decl;
1603 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
1604 && SSA_NAME_VAR (ssa_name)
1605 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == PARM_DECL)
1606 return tristate::TS_FALSE;
1607 }
757bf1df
DM
1608 }
1609
1610 return tristate::TS_UNKNOWN;
1611}
1612
1613/* Attempt to add the constraint "LHS OP RHS" to this region_model.
1614 If it is consistent with existing constraints, add it, and return true.
1615 Return false if it contradicts existing constraints.
1616 Use CTXT for reporting any diagnostics associated with the accesses. */
1617
1618bool
1619region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
1620 region_model_context *ctxt)
1621{
e978955d
DM
1622 /* For now, make no attempt to capture constraints on floating-point
1623 values. */
1624 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
1625 return true;
1626
808f4dfe
DM
1627 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
1628 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
757bf1df 1629
808f4dfe 1630 tristate t_cond = eval_condition (lhs_sval, op, rhs_sval);
757bf1df
DM
1631
1632 /* If we already have the condition, do nothing. */
1633 if (t_cond.is_true ())
1634 return true;
1635
1636 /* Reject a constraint that would contradict existing knowledge, as
1637 unsatisfiable. */
1638 if (t_cond.is_false ())
1639 return false;
1640
1641 /* Store the constraint. */
808f4dfe 1642 m_constraints->add_constraint (lhs_sval, op, rhs_sval);
757bf1df
DM
1643
1644 add_any_constraints_from_ssa_def_stmt (lhs, op, rhs, ctxt);
1645
1646 /* Notify the context, if any. This exists so that the state machines
1647 in a program_state can be notified about the condition, and so can
1648 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
1649 when synthesizing constraints as above. */
1650 if (ctxt)
1651 ctxt->on_condition (lhs, op, rhs);
1652
1653 return true;
1654}
1655
1656/* Subroutine of region_model::add_constraint for handling optimized
1657 && and || conditionals.
1658
1659 If we have an SSA_NAME for a boolean compared against 0,
1660 look at anything implied by the def stmt and call add_constraint
1661 for it (which could recurse).
1662
1663 For example, if we have
1664 _1 = p_6 == 0B;
1665 _2 = p_8 == 0B
1666 _3 = _1 | _2
1667 and add the constraint
1668 (_3 == 0),
1669 then the def stmt for _3 implies that _1 and _2 are both false,
1670 and hence we can add the constraints:
1671 p_6 != 0B
1672 p_8 != 0B. */
1673
1674void
1675region_model::add_any_constraints_from_ssa_def_stmt (tree lhs,
1676 enum tree_code op,
1677 tree rhs,
1678 region_model_context *ctxt)
1679{
1680 if (TREE_CODE (lhs) != SSA_NAME)
1681 return;
1682
e516294a 1683 if (!zerop (rhs))
757bf1df
DM
1684 return;
1685
1686 if (op != NE_EXPR && op != EQ_EXPR)
1687 return;
1688
e516294a
DM
1689 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
1690 if (const gassign *assign = dyn_cast<gassign *> (def_stmt))
1691 add_any_constraints_from_gassign (op, rhs, assign, ctxt);
1692 else if (gcall *call = dyn_cast<gcall *> (def_stmt))
1693 add_any_constraints_from_gcall (op, rhs, call, ctxt);
1694}
1695
1696/* Add any constraints for an SSA_NAME defined by ASSIGN
1697 where the result OP RHS. */
1698
1699void
1700region_model::add_any_constraints_from_gassign (enum tree_code op,
1701 tree rhs,
1702 const gassign *assign,
1703 region_model_context *ctxt)
1704{
757bf1df
DM
1705 /* We have either
1706 - "LHS != false" (i.e. LHS is true), or
1707 - "LHS == false" (i.e. LHS is false). */
1708 bool is_true = op == NE_EXPR;
1709
757bf1df
DM
1710 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
1711
1712 switch (rhs_code)
1713 {
1714 default:
1715 break;
e516294a
DM
1716
1717 case NOP_EXPR:
1718 {
1719 add_constraint (gimple_assign_rhs1 (assign), op, rhs, ctxt);
1720 }
1721 break;
1722
757bf1df
DM
1723 case BIT_AND_EXPR:
1724 {
1725 if (is_true)
1726 {
1727 /* ...and "LHS == (rhs1 & rhs2) i.e. "(rhs1 & rhs2)" is true
1728 then both rhs1 and rhs2 must be true. */
1729 tree rhs1 = gimple_assign_rhs1 (assign);
1730 tree rhs2 = gimple_assign_rhs2 (assign);
1731 add_constraint (rhs1, NE_EXPR, boolean_false_node, ctxt);
1732 add_constraint (rhs2, NE_EXPR, boolean_false_node, ctxt);
1733 }
1734 }
1735 break;
1736
1737 case BIT_IOR_EXPR:
1738 {
1739 if (!is_true)
1740 {
1741 /* ...and "LHS == (rhs1 | rhs2)
1742 i.e. "(rhs1 | rhs2)" is false
1743 then both rhs1 and rhs2 must be false. */
1744 tree rhs1 = gimple_assign_rhs1 (assign);
1745 tree rhs2 = gimple_assign_rhs2 (assign);
1746 add_constraint (rhs1, EQ_EXPR, boolean_false_node, ctxt);
1747 add_constraint (rhs2, EQ_EXPR, boolean_false_node, ctxt);
1748 }
1749 }
1750 break;
1751
1752 case EQ_EXPR:
1753 case NE_EXPR:
1754 {
1755 /* ...and "LHS == (rhs1 OP rhs2)"
1756 then rhs1 OP rhs2 must have the same logical value as LHS. */
1757 tree rhs1 = gimple_assign_rhs1 (assign);
1758 tree rhs2 = gimple_assign_rhs2 (assign);
1759 if (!is_true)
1760 rhs_code
1761 = invert_tree_comparison (rhs_code, false /* honor_nans */);
1762 add_constraint (rhs1, rhs_code, rhs2, ctxt);
1763 }
1764 break;
1765 }
1766}
1767
e516294a
DM
1768/* Add any constraints for an SSA_NAME defined by CALL
1769 where the result OP RHS. */
1770
1771void
1772region_model::add_any_constraints_from_gcall (enum tree_code op,
1773 tree rhs,
1774 const gcall *call,
1775 region_model_context *ctxt)
1776{
1777 if (gimple_call_builtin_p (call, BUILT_IN_EXPECT)
1778 || gimple_call_builtin_p (call, BUILT_IN_EXPECT_WITH_PROBABILITY)
1779 || gimple_call_internal_p (call, IFN_BUILTIN_EXPECT))
1780 {
1781 /* __builtin_expect's return value is its initial argument. */
1782 add_constraint (gimple_call_arg (call, 0), op, rhs, ctxt);
1783 }
1784}
1785
757bf1df
DM
1786/* Determine what is known about the condition "LHS OP RHS" within
1787 this model.
1788 Use CTXT for reporting any diagnostics associated with the accesses. */
1789
1790tristate
1791region_model::eval_condition (tree lhs,
1792 enum tree_code op,
1793 tree rhs,
1794 region_model_context *ctxt)
1795{
e978955d
DM
1796 /* For now, make no attempt to model constraints on floating-point
1797 values. */
1798 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
1799 return tristate::unknown ();
1800
757bf1df
DM
1801 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
1802}
1803
808f4dfe
DM
1804/* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
1805 Use VISITED to prevent infinite mutual recursion with the overload for
1806 regions. */
757bf1df 1807
808f4dfe
DM
1808path_var
1809region_model::get_representative_path_var (const svalue *sval,
1810 svalue_set *visited) const
757bf1df 1811{
808f4dfe
DM
1812 if (sval == NULL)
1813 return path_var (NULL_TREE, 0);
757bf1df 1814
808f4dfe
DM
1815 if (const svalue *cast_sval = sval->maybe_undo_cast ())
1816 sval = cast_sval;
757bf1df 1817
808f4dfe
DM
1818 /* Prevent infinite recursion. */
1819 if (visited->contains (sval))
1820 return path_var (NULL_TREE, 0);
1821 visited->add (sval);
757bf1df 1822
808f4dfe
DM
1823 auto_vec<path_var> pvs;
1824 m_store.get_representative_path_vars (this, visited, sval, &pvs);
757bf1df 1825
808f4dfe
DM
1826 if (tree cst = sval->maybe_get_constant ())
1827 pvs.safe_push (path_var (cst, 0));
757bf1df 1828
90f7c300 1829 /* Handle string literals and various other pointers. */
808f4dfe
DM
1830 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
1831 {
1832 const region *reg = ptr_sval->get_pointee ();
1833 if (path_var pv = get_representative_path_var (reg, visited))
1834 return path_var (build1 (ADDR_EXPR,
1835 TREE_TYPE (sval->get_type ()),
1836 pv.m_tree),
1837 pv.m_stack_depth);
1838 }
1839
1840 /* If we have a sub_svalue, look for ways to represent the parent. */
1841 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
90f7c300 1842 {
808f4dfe
DM
1843 const svalue *parent_sval = sub_sval->get_parent ();
1844 const region *subreg = sub_sval->get_subregion ();
1845 if (path_var parent_pv
1846 = get_representative_path_var (parent_sval, visited))
1847 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
1848 return path_var (build3 (COMPONENT_REF,
1849 sval->get_type (),
1850 parent_pv.m_tree,
1851 field_reg->get_field (),
1852 NULL_TREE),
1853 parent_pv.m_stack_depth);
90f7c300
DM
1854 }
1855
808f4dfe
DM
1856 if (pvs.length () < 1)
1857 return path_var (NULL_TREE, 0);
1858
1859 pvs.qsort (readability_comparator);
1860 return pvs[0];
757bf1df
DM
1861}
1862
808f4dfe 1863/* Attempt to return a tree that represents SVAL, or return NULL_TREE. */
757bf1df 1864
808f4dfe
DM
1865tree
1866region_model::get_representative_tree (const svalue *sval) const
757bf1df 1867{
808f4dfe
DM
1868 svalue_set visited;
1869 return get_representative_path_var (sval, &visited).m_tree;
1870}
1871
1872/* Attempt to return a path_var that represents REG, or return
1873 the NULL path_var.
1874 For example, a region for a field of a local would be a path_var
1875 wrapping a COMPONENT_REF.
1876 Use VISITED to prevent infinite mutual recursion with the overload for
1877 svalues. */
757bf1df 1878
808f4dfe
DM
1879path_var
1880region_model::get_representative_path_var (const region *reg,
1881 svalue_set *visited) const
1882{
1883 switch (reg->get_kind ())
757bf1df 1884 {
808f4dfe
DM
1885 default:
1886 gcc_unreachable ();
e516294a 1887
808f4dfe
DM
1888 case RK_FRAME:
1889 case RK_GLOBALS:
1890 case RK_CODE:
1891 case RK_HEAP:
1892 case RK_STACK:
1893 case RK_ROOT:
1894 /* Regions that represent memory spaces are not expressible as trees. */
1895 return path_var (NULL_TREE, 0);
757bf1df 1896
808f4dfe 1897 case RK_FUNCTION:
884d9141 1898 {
808f4dfe
DM
1899 const function_region *function_reg
1900 = as_a <const function_region *> (reg);
1901 return path_var (function_reg->get_fndecl (), 0);
884d9141 1902 }
808f4dfe
DM
1903 case RK_LABEL:
1904 gcc_unreachable (); // TODO
90f7c300 1905
808f4dfe
DM
1906 case RK_SYMBOLIC:
1907 {
1908 const symbolic_region *symbolic_reg
1909 = as_a <const symbolic_region *> (reg);
1910 const svalue *pointer = symbolic_reg->get_pointer ();
1911 path_var pointer_pv = get_representative_path_var (pointer, visited);
1912 if (!pointer_pv)
1913 return path_var (NULL_TREE, 0);
1914 tree offset = build_int_cst (pointer->get_type (), 0);
1915 return path_var (build2 (MEM_REF,
1916 reg->get_type (),
1917 pointer_pv.m_tree,
1918 offset),
1919 pointer_pv.m_stack_depth);
1920 }
1921 case RK_DECL:
1922 {
1923 const decl_region *decl_reg = as_a <const decl_region *> (reg);
1924 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
1925 }
1926 case RK_FIELD:
1927 {
1928 const field_region *field_reg = as_a <const field_region *> (reg);
1929 path_var parent_pv
1930 = get_representative_path_var (reg->get_parent_region (), visited);
1931 if (!parent_pv)
1932 return path_var (NULL_TREE, 0);
1933 return path_var (build3 (COMPONENT_REF,
1934 reg->get_type (),
1935 parent_pv.m_tree,
1936 field_reg->get_field (),
1937 NULL_TREE),
1938 parent_pv.m_stack_depth);
1939 }
757bf1df 1940
808f4dfe
DM
1941 case RK_ELEMENT:
1942 {
1943 const element_region *element_reg
1944 = as_a <const element_region *> (reg);
1945 path_var parent_pv
1946 = get_representative_path_var (reg->get_parent_region (), visited);
1947 if (!parent_pv)
1948 return path_var (NULL_TREE, 0);
1949 path_var index_pv
1950 = get_representative_path_var (element_reg->get_index (), visited);
1951 if (!index_pv)
1952 return path_var (NULL_TREE, 0);
1953 return path_var (build4 (ARRAY_REF,
1954 reg->get_type (),
1955 parent_pv.m_tree, index_pv.m_tree,
1956 NULL_TREE, NULL_TREE),
1957 parent_pv.m_stack_depth);
1958 }
757bf1df 1959
808f4dfe 1960 case RK_OFFSET:
757bf1df 1961 {
808f4dfe
DM
1962 const offset_region *offset_reg
1963 = as_a <const offset_region *> (reg);
1964 path_var parent_pv
1965 = get_representative_path_var (reg->get_parent_region (), visited);
1966 if (!parent_pv)
1967 return path_var (NULL_TREE, 0);
1968 path_var offset_pv
1969 = get_representative_path_var (offset_reg->get_byte_offset (),
1970 visited);
1971 if (!offset_pv)
1972 return path_var (NULL_TREE, 0);
1973 return path_var (build2 (MEM_REF,
1974 reg->get_type (),
1975 parent_pv.m_tree, offset_pv.m_tree),
1976 parent_pv.m_stack_depth);
757bf1df 1977 }
757bf1df 1978
808f4dfe
DM
1979 case RK_CAST:
1980 {
1981 path_var parent_pv
1982 = get_representative_path_var (reg->get_parent_region (), visited);
1983 if (!parent_pv)
1984 return path_var (NULL_TREE, 0);
1985 return path_var (build1 (NOP_EXPR,
1986 reg->get_type (),
1987 parent_pv.m_tree),
1988 parent_pv.m_stack_depth);
1989 }
757bf1df 1990
808f4dfe
DM
1991 case RK_HEAP_ALLOCATED:
1992 case RK_ALLOCA:
1993 /* No good way to express heap-allocated/alloca regions as trees. */
1994 return path_var (NULL_TREE, 0);
757bf1df 1995
808f4dfe
DM
1996 case RK_STRING:
1997 {
1998 const string_region *string_reg = as_a <const string_region *> (reg);
1999 return path_var (string_reg->get_string_cst (), 0);
2000 }
757bf1df 2001
808f4dfe
DM
2002 case RK_UNKNOWN:
2003 return path_var (NULL_TREE, 0);
2004 }
757bf1df
DM
2005}
2006
2007/* Update this model for any phis in SNODE, assuming we came from
2008 LAST_CFG_SUPEREDGE. */
2009
2010void
2011region_model::update_for_phis (const supernode *snode,
2012 const cfg_superedge *last_cfg_superedge,
2013 region_model_context *ctxt)
2014{
2015 gcc_assert (last_cfg_superedge);
2016
2017 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
2018 !gsi_end_p (gpi); gsi_next (&gpi))
2019 {
2020 gphi *phi = gpi.phi ();
2021
2022 tree src = last_cfg_superedge->get_phi_arg (phi);
2023 tree lhs = gimple_phi_result (phi);
2024
2025 /* Update next_state based on phi. */
808f4dfe 2026 handle_phi (phi, lhs, src, ctxt);
757bf1df
DM
2027 }
2028}
2029
2030/* Attempt to update this model for taking EDGE (where the last statement
2031 was LAST_STMT), returning true if the edge can be taken, false
2032 otherwise.
2033
2034 For CFG superedges where LAST_STMT is a conditional or a switch
2035 statement, attempt to add the relevant conditions for EDGE to this
2036 model, returning true if they are feasible, or false if they are
2037 impossible.
2038
2039 For call superedges, push frame information and store arguments
2040 into parameters.
2041
2042 For return superedges, pop frame information and store return
2043 values into any lhs.
2044
2045 Rejection of call/return superedges happens elsewhere, in
2046 program_point::on_edge (i.e. based on program point, rather
2047 than program state). */
2048
2049bool
2050region_model::maybe_update_for_edge (const superedge &edge,
2051 const gimple *last_stmt,
2052 region_model_context *ctxt)
2053{
2054 /* Handle frame updates for interprocedural edges. */
2055 switch (edge.m_kind)
2056 {
2057 default:
2058 break;
2059
2060 case SUPEREDGE_CALL:
2061 {
2062 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
2063 update_for_call_superedge (*call_edge, ctxt);
2064 }
2065 break;
2066
2067 case SUPEREDGE_RETURN:
2068 {
2069 const return_superedge *return_edge
2070 = as_a <const return_superedge *> (&edge);
2071 update_for_return_superedge (*return_edge, ctxt);
2072 }
2073 break;
2074
2075 case SUPEREDGE_INTRAPROCEDURAL_CALL:
2076 {
2077 const callgraph_superedge *cg_sedge
2078 = as_a <const callgraph_superedge *> (&edge);
2079 update_for_call_summary (*cg_sedge, ctxt);
2080 }
2081 break;
2082 }
2083
2084 if (last_stmt == NULL)
2085 return true;
2086
2087 /* Apply any constraints for conditionals/switch statements. */
2088
2089 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
2090 {
2091 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
2092 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt);
2093 }
2094
2095 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
2096 {
2097 const switch_cfg_superedge *switch_sedge
2098 = as_a <const switch_cfg_superedge *> (&edge);
2099 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt, ctxt);
2100 }
2101
2102 return true;
2103}
2104
2105/* Push a new frame_region on to the stack region.
2106 Populate the frame_region with child regions for the function call's
2107 parameters, using values from the arguments at the callsite in the
2108 caller's frame. */
2109
2110void
2111region_model::update_for_call_superedge (const call_superedge &call_edge,
2112 region_model_context *ctxt)
2113{
808f4dfe 2114 /* Build a vec of argument svalues, using the current top
757bf1df
DM
2115 frame for resolving tree expressions. */
2116 const gcall *call_stmt = call_edge.get_call_stmt ();
808f4dfe 2117 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
757bf1df
DM
2118
2119 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
2120 {
2121 tree arg = gimple_call_arg (call_stmt, i);
808f4dfe 2122 arg_svals.quick_push (get_rvalue (arg, ctxt));
757bf1df
DM
2123 }
2124
808f4dfe 2125 push_frame (call_edge.get_callee_function (), &arg_svals, ctxt);
757bf1df
DM
2126}
2127
a96f1c38
DM
2128/* Pop the top-most frame_region from the stack, and copy the return
2129 region's values (if any) into the region for the lvalue of the LHS of
757bf1df 2130 the call (if any). */
757bf1df
DM
2131void
2132region_model::update_for_return_superedge (const return_superedge &return_edge,
2133 region_model_context *ctxt)
2134{
a96f1c38 2135 /* Get the region for the result of the call, within the caller frame. */
808f4dfe 2136 const region *result_dst_reg = NULL;
757bf1df
DM
2137 const gcall *call_stmt = return_edge.get_call_stmt ();
2138 tree lhs = gimple_call_lhs (call_stmt);
2139 if (lhs)
a96f1c38
DM
2140 {
2141 /* Normally we access the top-level frame, which is:
808f4dfe 2142 path_var (expr, get_stack_depth () - 1)
a96f1c38 2143 whereas here we need the caller frame, hence "- 2" here. */
808f4dfe
DM
2144 gcc_assert (get_stack_depth () >= 2);
2145 result_dst_reg = get_lvalue (path_var (lhs, get_stack_depth () - 2),
a96f1c38
DM
2146 ctxt);
2147 }
2148
808f4dfe 2149 pop_frame (result_dst_reg, NULL, ctxt);
757bf1df
DM
2150}
2151
2152/* Update this region_model with a summary of the effect of calling
2153 and returning from CG_SEDGE.
2154
2155 TODO: Currently this is extremely simplistic: we merely set the
2156 return value to "unknown". A proper implementation would e.g. update
2157 sm-state, and presumably be reworked to support multiple outcomes. */
2158
2159void
2160region_model::update_for_call_summary (const callgraph_superedge &cg_sedge,
2161 region_model_context *ctxt)
2162{
2163 /* For now, set any return value to "unknown". */
2164 const gcall *call_stmt = cg_sedge.get_call_stmt ();
2165 tree lhs = gimple_call_lhs (call_stmt);
2166 if (lhs)
808f4dfe 2167 mark_region_as_unknown (get_lvalue (lhs, ctxt));
757bf1df
DM
2168
2169 // TODO: actually implement some kind of summary here
2170}
2171
2172/* Given a true or false edge guarded by conditional statement COND_STMT,
2173 determine appropriate constraints for the edge to be taken.
2174
2175 If they are feasible, add the constraints and return true.
2176
2177 Return false if the constraints contradict existing knowledge
2178 (and so the edge should not be taken). */
2179
2180bool
2181region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
2182 const gcond *cond_stmt,
2183 region_model_context *ctxt)
2184{
2185 ::edge cfg_edge = sedge.get_cfg_edge ();
2186 gcc_assert (cfg_edge != NULL);
2187 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
2188
2189 enum tree_code op = gimple_cond_code (cond_stmt);
2190 tree lhs = gimple_cond_lhs (cond_stmt);
2191 tree rhs = gimple_cond_rhs (cond_stmt);
2192 if (cfg_edge->flags & EDGE_FALSE_VALUE)
2193 op = invert_tree_comparison (op, false /* honor_nans */);
2194 return add_constraint (lhs, op, rhs, ctxt);
2195}
2196
2197/* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
2198 for the edge to be taken.
2199
2200 If they are feasible, add the constraints and return true.
2201
2202 Return false if the constraints contradict existing knowledge
2203 (and so the edge should not be taken). */
2204
2205bool
2206region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
2207 const gswitch *switch_stmt,
2208 region_model_context *ctxt)
2209{
2210 tree index = gimple_switch_index (switch_stmt);
2211 tree case_label = edge.get_case_label ();
2212 gcc_assert (TREE_CODE (case_label) == CASE_LABEL_EXPR);
2213 tree lower_bound = CASE_LOW (case_label);
2214 tree upper_bound = CASE_HIGH (case_label);
2215 if (lower_bound)
2216 {
2217 if (upper_bound)
2218 {
2219 /* Range. */
2220 if (!add_constraint (index, GE_EXPR, lower_bound, ctxt))
2221 return false;
2222 return add_constraint (index, LE_EXPR, upper_bound, ctxt);
2223 }
2224 else
2225 /* Single-value. */
2226 return add_constraint (index, EQ_EXPR, lower_bound, ctxt);
2227 }
2228 else
2229 {
2230 /* The default case.
2231 Add exclusions based on the other cases. */
2232 for (unsigned other_idx = 1;
2233 other_idx < gimple_switch_num_labels (switch_stmt);
2234 other_idx++)
2235 {
2236 tree other_label = gimple_switch_label (switch_stmt,
2237 other_idx);
2238 tree other_lower_bound = CASE_LOW (other_label);
2239 tree other_upper_bound = CASE_HIGH (other_label);
2240 gcc_assert (other_lower_bound);
2241 if (other_upper_bound)
2242 {
2243 /* Exclude this range-valued case.
2244 For now, we just exclude the boundary values.
2245 TODO: exclude the values within the region. */
2246 if (!add_constraint (index, NE_EXPR, other_lower_bound, ctxt))
2247 return false;
2248 if (!add_constraint (index, NE_EXPR, other_upper_bound, ctxt))
2249 return false;
2250 }
2251 else
2252 /* Exclude this single-valued case. */
2253 if (!add_constraint (index, NE_EXPR, other_lower_bound, ctxt))
2254 return false;
2255 }
2256 return true;
2257 }
2258}
2259
808f4dfe
DM
2260/* For use with push_frame when handling a top-level call within the analysis.
2261 PARAM has a defined but unknown initial value.
2262 Anything it points to has escaped, since the calling context "knows"
2263 the pointer, and thus calls to unknown functions could read/write into
2264 the region. */
757bf1df
DM
2265
2266void
808f4dfe 2267region_model::on_top_level_param (tree param,
3a25f345 2268 region_model_context *ctxt)
757bf1df 2269{
808f4dfe 2270 if (POINTER_TYPE_P (TREE_TYPE (param)))
5eae0ac7 2271 {
808f4dfe
DM
2272 const region *param_reg = get_lvalue (param, ctxt);
2273 const svalue *init_ptr_sval
2274 = m_mgr->get_or_create_initial_value (param_reg);
2275 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
2276 m_store.mark_as_escaped (pointee_reg);
5eae0ac7 2277 }
757bf1df
DM
2278}
2279
808f4dfe
DM
2280/* Update this region_model to reflect pushing a frame onto the stack
2281 for a call to FUN.
757bf1df 2282
808f4dfe
DM
2283 If ARG_SVALS is non-NULL, use it to populate the parameters
2284 in the new frame.
2285 Otherwise, the params have their initial_svalues.
757bf1df 2286
808f4dfe 2287 Return the frame_region for the new frame. */
757bf1df 2288
808f4dfe
DM
2289const region *
2290region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
2291 region_model_context *ctxt)
757bf1df 2292{
808f4dfe
DM
2293 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
2294 if (arg_svals)
757bf1df 2295 {
808f4dfe
DM
2296 /* Arguments supplied from a caller frame. */
2297 tree fndecl = fun->decl;
2298 unsigned idx = 0;
2299 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
2300 iter_parm = DECL_CHAIN (iter_parm), ++idx)
757bf1df 2301 {
808f4dfe
DM
2302 /* If there's a mismatching declaration, the call stmt might
2303 not have enough args. Handle this case by leaving the
2304 rest of the params as uninitialized. */
2305 if (idx >= arg_svals->length ())
2306 break;
2307 const svalue *arg_sval = (*arg_svals)[idx];
2308 const region *parm_reg = get_lvalue (iter_parm, ctxt);
2309 set_value (parm_reg, arg_sval, ctxt);
757bf1df 2310
808f4dfe
DM
2311 /* Also do it for default SSA name (sharing the same value). */
2312 tree parm_default_ssa = ssa_default_def (fun, iter_parm);
2313 if (parm_default_ssa)
2314 {
2315 const region *defssa_reg = get_lvalue (parm_default_ssa, ctxt);
2316 set_value (defssa_reg, arg_sval, ctxt);
2317 }
757bf1df 2318 }
757bf1df 2319 }
808f4dfe 2320 else
757bf1df 2321 {
808f4dfe
DM
2322 /* Otherwise we have a top-level call within the analysis. The params
2323 have defined but unknown initial values.
2324 Anything they point to has escaped. */
2325 tree fndecl = fun->decl;
2326 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
2327 iter_parm = DECL_CHAIN (iter_parm))
757bf1df 2328 {
808f4dfe
DM
2329 on_top_level_param (iter_parm, ctxt);
2330 tree parm_default_ssa = ssa_default_def (fun, iter_parm);
2331 if (parm_default_ssa)
2332 on_top_level_param (parm_default_ssa, ctxt);
757bf1df
DM
2333 }
2334 }
757bf1df 2335
808f4dfe 2336 return m_current_frame;
757bf1df
DM
2337}
2338
808f4dfe
DM
2339/* Get the function of the top-most frame in this region_model's stack.
2340 There must be such a frame. */
757bf1df 2341
808f4dfe
DM
2342function *
2343region_model::get_current_function () const
757bf1df 2344{
808f4dfe
DM
2345 const frame_region *frame = get_current_frame ();
2346 gcc_assert (frame);
2347 return frame->get_function ();
757bf1df
DM
2348}
2349
808f4dfe 2350/* Pop the topmost frame_region from this region_model's stack;
757bf1df 2351
808f4dfe
DM
2352 If RESULT_DST_REG is non-null, copy any return value from the frame
2353 into RESULT_DST_REG's region.
2354 If OUT_RESULT is non-null, copy any return value from the frame
2355 into *OUT_RESULT.
757bf1df 2356
808f4dfe
DM
2357 Purge the frame region and all its descendent regions.
2358 Convert any pointers that point into such regions into
2359 POISON_KIND_POPPED_STACK svalues. */
757bf1df 2360
808f4dfe
DM
2361void
2362region_model::pop_frame (const region *result_dst_reg,
2363 const svalue **out_result,
2364 region_model_context *ctxt)
2365{
2366 gcc_assert (m_current_frame);
757bf1df 2367
808f4dfe
DM
2368 /* Evaluate the result, within the callee frame. */
2369 const frame_region *frame_reg = m_current_frame;
2370 tree fndecl = m_current_frame->get_function ()->decl;
2371 tree result = DECL_RESULT (fndecl);
2372 if (result && TREE_TYPE (result) != void_type_node)
2373 {
2374 if (result_dst_reg)
2375 {
2376 /* Copy the result to RESULT_DST_REG. */
2377 copy_region (result_dst_reg,
2378 get_lvalue (result, ctxt),
2379 ctxt);
2380 }
2381 if (out_result)
2382 *out_result = get_rvalue (result, ctxt);
2383 }
757bf1df 2384
808f4dfe
DM
2385 /* Pop the frame. */
2386 m_current_frame = m_current_frame->get_calling_frame ();
757bf1df 2387
808f4dfe 2388 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
757bf1df
DM
2389}
2390
808f4dfe 2391/* Get the number of frames in this region_model's stack. */
757bf1df 2392
808f4dfe
DM
2393int
2394region_model::get_stack_depth () const
757bf1df 2395{
808f4dfe
DM
2396 const frame_region *frame = get_current_frame ();
2397 if (frame)
2398 return frame->get_stack_depth ();
2399 else
2400 return 0;
757bf1df
DM
2401}
2402
808f4dfe
DM
2403/* Get the frame_region with the given index within the stack.
2404 The frame_region must exist. */
757bf1df 2405
808f4dfe
DM
2406const frame_region *
2407region_model::get_frame_at_index (int index) const
757bf1df 2408{
808f4dfe
DM
2409 const frame_region *frame = get_current_frame ();
2410 gcc_assert (frame);
2411 gcc_assert (index >= 0);
2412 gcc_assert (index <= frame->get_index ());
2413 while (index != frame->get_index ())
2414 {
2415 frame = frame->get_calling_frame ();
2416 gcc_assert (frame);
2417 }
2418 return frame;
757bf1df
DM
2419}
2420
808f4dfe
DM
2421/* Unbind svalues for any regions in REG and below.
2422 Find any pointers to such regions; convert them to
2423 poisoned values of kind PKIND. */
757bf1df 2424
808f4dfe
DM
2425void
2426region_model::unbind_region_and_descendents (const region *reg,
2427 enum poison_kind pkind)
757bf1df 2428{
808f4dfe
DM
2429 /* Gather a set of base regions to be unbound. */
2430 hash_set<const region *> base_regs;
2431 for (store::cluster_map_t::iterator iter = m_store.begin ();
2432 iter != m_store.end (); ++iter)
757bf1df 2433 {
808f4dfe
DM
2434 const region *iter_base_reg = (*iter).first;
2435 if (iter_base_reg->descendent_of_p (reg))
2436 base_regs.add (iter_base_reg);
757bf1df 2437 }
808f4dfe
DM
2438 for (hash_set<const region *>::iterator iter = base_regs.begin ();
2439 iter != base_regs.end (); ++iter)
2440 m_store.purge_cluster (*iter);
757bf1df 2441
808f4dfe
DM
2442 /* Find any pointers to REG or its descendents; convert to poisoned. */
2443 poison_any_pointers_to_descendents (reg, pkind);
757bf1df
DM
2444}
2445
808f4dfe
DM
2446/* Implementation of BindingVisitor.
2447 Update the bound svalues for regions below REG to use poisoned
2448 values instead. */
757bf1df 2449
808f4dfe 2450struct bad_pointer_finder
757bf1df 2451{
808f4dfe
DM
2452 bad_pointer_finder (const region *reg, enum poison_kind pkind,
2453 region_model_manager *mgr)
2454 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
2455 {}
757bf1df 2456
808f4dfe
DM
2457 void on_binding (const binding_key *, const svalue *&sval)
2458 {
2459 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
2460 {
2461 const region *ptr_dst = ptr_sval->get_pointee ();
2462 /* Poison ptrs to descendents of REG, but not to REG itself,
2463 otherwise double-free detection doesn't work (since sm-state
2464 for "free" is stored on the original ptr svalue). */
2465 if (ptr_dst->descendent_of_p (m_reg)
2466 && ptr_dst != m_reg)
2467 {
2468 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
2469 sval->get_type ());
2470 ++m_count;
2471 }
2472 }
2473 }
757bf1df 2474
808f4dfe
DM
2475 const region *m_reg;
2476 enum poison_kind m_pkind;
2477 region_model_manager *const m_mgr;
2478 int m_count;
2479};
757bf1df 2480
808f4dfe
DM
2481/* Find any pointers to REG or its descendents; convert them to
2482 poisoned values of kind PKIND.
2483 Return the number of pointers that were poisoned. */
757bf1df 2484
808f4dfe
DM
2485int
2486region_model::poison_any_pointers_to_descendents (const region *reg,
2487 enum poison_kind pkind)
2488{
2489 bad_pointer_finder bv (reg, pkind, m_mgr);
2490 m_store.for_each_binding (bv);
2491 return bv.m_count;
757bf1df
DM
2492}
2493
808f4dfe
DM
2494/* Attempt to merge THIS with OTHER_MODEL, writing the result
2495 to OUT_MODEL. Use POINT to distinguish values created as a
2496 result of merging. */
757bf1df 2497
808f4dfe
DM
2498bool
2499region_model::can_merge_with_p (const region_model &other_model,
2500 const program_point &point,
2501 region_model *out_model) const
757bf1df 2502{
808f4dfe
DM
2503 gcc_assert (out_model);
2504 gcc_assert (m_mgr == other_model.m_mgr);
2505 gcc_assert (m_mgr == out_model->m_mgr);
757bf1df 2506
808f4dfe
DM
2507 if (m_current_frame != other_model.m_current_frame)
2508 return false;
2509 out_model->m_current_frame = m_current_frame;
757bf1df 2510
808f4dfe 2511 model_merger m (this, &other_model, point, out_model);
757bf1df 2512
808f4dfe
DM
2513 if (!store::can_merge_p (&m_store, &other_model.m_store,
2514 &out_model->m_store, m_mgr->get_store_manager (),
2515 &m))
2516 return false;
2517
2518 /* Merge constraints. */
2519 constraint_manager::merge (*m_constraints,
2520 *other_model.m_constraints,
2521 out_model->m_constraints,
2522 m);
757bf1df 2523
808f4dfe 2524 return true;
757bf1df
DM
2525}
2526
2527/* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
2528 otherwise. */
2529
2530tree
2531region_model::get_fndecl_for_call (const gcall *call,
2532 region_model_context *ctxt)
2533{
2534 tree fn_ptr = gimple_call_fn (call);
2535 if (fn_ptr == NULL_TREE)
2536 return NULL_TREE;
808f4dfe
DM
2537 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
2538 if (const region_svalue *fn_ptr_ptr
2539 = fn_ptr_sval->dyn_cast_region_svalue ())
757bf1df 2540 {
808f4dfe
DM
2541 const region *reg = fn_ptr_ptr->get_pointee ();
2542 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
757bf1df 2543 {
808f4dfe 2544 tree fn_decl = fn_reg->get_fndecl ();
0ba70d1b
DM
2545 cgraph_node *node = cgraph_node::get (fn_decl);
2546 if (!node)
2547 return NULL_TREE;
2548 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
91f993b7
DM
2549 if (ultimate_node)
2550 return ultimate_node->decl;
757bf1df
DM
2551 }
2552 }
2553
2554 return NULL_TREE;
2555}
2556
808f4dfe 2557/* Would be much simpler to use a lambda here, if it were supported. */
757bf1df 2558
808f4dfe 2559struct append_ssa_names_cb_data
757bf1df 2560{
808f4dfe
DM
2561 const region_model *model;
2562 auto_vec<const decl_region *> *out;
2563};
757bf1df 2564
808f4dfe
DM
2565/* Populate *OUT with all decl_regions for SSA names in the current
2566 frame that have clusters within the store. */
757bf1df
DM
2567
2568void
808f4dfe
DM
2569region_model::
2570get_ssa_name_regions_for_current_frame (auto_vec<const decl_region *> *out)
2571 const
757bf1df 2572{
808f4dfe
DM
2573 append_ssa_names_cb_data data;
2574 data.model = this;
2575 data.out = out;
2576 m_store.for_each_cluster (append_ssa_names_cb, &data);
757bf1df
DM
2577}
2578
808f4dfe 2579/* Implementation detail of get_ssa_name_regions_for_current_frame. */
757bf1df 2580
808f4dfe
DM
2581void
2582region_model::append_ssa_names_cb (const region *base_reg,
2583 append_ssa_names_cb_data *cb_data)
757bf1df 2584{
808f4dfe
DM
2585 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
2586 return;
2587 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
757bf1df 2588 {
808f4dfe
DM
2589 if (TREE_CODE (decl_reg->get_decl ()) == SSA_NAME)
2590 cb_data->out->safe_push (decl_reg);
757bf1df
DM
2591 }
2592}
2593
808f4dfe 2594/* Return a new region describing a heap-allocated block of memory. */
757bf1df 2595
808f4dfe
DM
2596const region *
2597region_model::create_region_for_heap_alloc (const svalue *size_in_bytes)
757bf1df 2598{
808f4dfe
DM
2599 const region *reg = m_mgr->create_region_for_heap_alloc ();
2600 record_dynamic_extents (reg, size_in_bytes);
2601 return reg;
757bf1df
DM
2602}
2603
808f4dfe
DM
2604/* Return a new region describing a block of memory allocated within the
2605 current frame. */
757bf1df 2606
808f4dfe
DM
2607const region *
2608region_model::create_region_for_alloca (const svalue *size_in_bytes)
757bf1df 2609{
808f4dfe
DM
2610 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
2611 record_dynamic_extents (reg, size_in_bytes);
2612 return reg;
757bf1df
DM
2613}
2614
808f4dfe
DM
2615/* Placeholder hook for recording that the size of REG is SIZE_IN_BYTES.
2616 Currently does nothing. */
757bf1df
DM
2617
2618void
808f4dfe
DM
2619region_model::
2620record_dynamic_extents (const region *reg ATTRIBUTE_UNUSED,
2621 const svalue *size_in_bytes ATTRIBUTE_UNUSED)
757bf1df
DM
2622{
2623}
2624
808f4dfe 2625/* struct model_merger. */
757bf1df 2626
808f4dfe 2627/* Dump a multiline representation of this merger to PP. */
757bf1df
DM
2628
2629void
808f4dfe 2630model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
757bf1df 2631{
808f4dfe
DM
2632 pp_string (pp, "model A:");
2633 pp_newline (pp);
2634 m_model_a->dump_to_pp (pp, simple, true);
2635 pp_newline (pp);
757bf1df 2636
808f4dfe 2637 pp_string (pp, "model B:");
757bf1df 2638 pp_newline (pp);
808f4dfe 2639 m_model_b->dump_to_pp (pp, simple, true);
757bf1df
DM
2640 pp_newline (pp);
2641
808f4dfe 2642 pp_string (pp, "merged model:");
757bf1df 2643 pp_newline (pp);
808f4dfe 2644 m_merged_model->dump_to_pp (pp, simple, true);
757bf1df
DM
2645 pp_newline (pp);
2646}
2647
808f4dfe 2648/* Dump a multiline representation of this merger to FILE. */
757bf1df
DM
2649
2650void
808f4dfe 2651model_merger::dump (FILE *fp, bool simple) const
757bf1df
DM
2652{
2653 pretty_printer pp;
2654 pp_format_decoder (&pp) = default_tree_printer;
2655 pp_show_color (&pp) = pp_show_color (global_dc->printer);
2656 pp.buffer->stream = fp;
808f4dfe 2657 dump_to_pp (&pp, simple);
757bf1df
DM
2658 pp_flush (&pp);
2659}
2660
808f4dfe 2661/* Dump a multiline representation of this merger to stderr. */
757bf1df
DM
2662
2663DEBUG_FUNCTION void
808f4dfe 2664model_merger::dump (bool simple) const
757bf1df 2665{
808f4dfe 2666 dump (stderr, simple);
757bf1df
DM
2667}
2668
75038aa6
DM
2669} // namespace ana
2670
808f4dfe 2671/* Dump RMODEL fully to stderr (i.e. without summarization). */
757bf1df 2672
808f4dfe
DM
2673DEBUG_FUNCTION void
2674debug (const region_model &rmodel)
757bf1df 2675{
808f4dfe 2676 rmodel.dump (false);
757bf1df
DM
2677}
2678
808f4dfe 2679/* class engine. */
757bf1df 2680
808f4dfe 2681/* Dump the managed objects by class to LOGGER, and the per-class totals. */
757bf1df 2682
808f4dfe
DM
2683void
2684engine::log_stats (logger *logger) const
757bf1df 2685{
808f4dfe 2686 m_mgr.log_stats (logger, true);
757bf1df
DM
2687}
2688
75038aa6
DM
2689namespace ana {
2690
757bf1df
DM
2691#if CHECKING_P
2692
2693namespace selftest {
2694
8c08c983
DM
2695/* Build a constant tree of the given type from STR. */
2696
2697static tree
2698build_real_cst_from_string (tree type, const char *str)
2699{
2700 REAL_VALUE_TYPE real;
2701 real_from_string (&real, str);
2702 return build_real (type, real);
2703}
2704
2705/* Append various "interesting" constants to OUT (e.g. NaN). */
2706
2707static void
2708append_interesting_constants (auto_vec<tree> *out)
2709{
2710 out->safe_push (build_int_cst (integer_type_node, 0));
2711 out->safe_push (build_int_cst (integer_type_node, 42));
2712 out->safe_push (build_int_cst (unsigned_type_node, 0));
2713 out->safe_push (build_int_cst (unsigned_type_node, 42));
2714 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
2715 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
2716 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
2717 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
2718 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
2719 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
2720 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
2721 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
2722}
2723
2724/* Verify that tree_cmp is a well-behaved comparator for qsort, even
2725 if the underlying constants aren't comparable. */
2726
2727static void
2728test_tree_cmp_on_constants ()
2729{
2730 auto_vec<tree> csts;
2731 append_interesting_constants (&csts);
2732
2733 /* Try sorting every triple. */
2734 const unsigned num = csts.length ();
2735 for (unsigned i = 0; i < num; i++)
2736 for (unsigned j = 0; j < num; j++)
2737 for (unsigned k = 0; k < num; k++)
2738 {
2739 auto_vec<tree> v (3);
2740 v.quick_push (csts[i]);
2741 v.quick_push (csts[j]);
2742 v.quick_push (csts[k]);
2743 v.qsort (tree_cmp);
2744 }
2745}
2746
757bf1df
DM
2747/* Implementation detail of the ASSERT_CONDITION_* macros. */
2748
808f4dfe
DM
2749void
2750assert_condition (const location &loc,
2751 region_model &model,
2752 const svalue *lhs, tree_code op, const svalue *rhs,
2753 tristate expected)
2754{
2755 tristate actual = model.eval_condition (lhs, op, rhs);
2756 ASSERT_EQ_AT (loc, actual, expected);
2757}
2758
2759/* Implementation detail of the ASSERT_CONDITION_* macros. */
2760
757bf1df
DM
2761void
2762assert_condition (const location &loc,
2763 region_model &model,
2764 tree lhs, tree_code op, tree rhs,
2765 tristate expected)
2766{
2767 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
2768 ASSERT_EQ_AT (loc, actual, expected);
2769}
2770
90f7c300
DM
2771/* Implementation detail of ASSERT_DUMP_TREE_EQ. */
2772
2773static void
2774assert_dump_tree_eq (const location &loc, tree t, const char *expected)
2775{
2776 auto_fix_quotes sentinel;
2777 pretty_printer pp;
2778 pp_format_decoder (&pp) = default_tree_printer;
2779 dump_tree (&pp, t);
2780 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
2781}
2782
2783/* Assert that dump_tree (T) is EXPECTED. */
2784
2785#define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
2786 SELFTEST_BEGIN_STMT \
2787 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
2788 SELFTEST_END_STMT
2789
757bf1df
DM
2790/* Implementation detail of ASSERT_DUMP_EQ. */
2791
2792static void
2793assert_dump_eq (const location &loc,
2794 const region_model &model,
2795 bool summarize,
2796 const char *expected)
2797{
2798 auto_fix_quotes sentinel;
2799 pretty_printer pp;
2800 pp_format_decoder (&pp) = default_tree_printer;
808f4dfe
DM
2801
2802 model.dump_to_pp (&pp, summarize, true);
757bf1df
DM
2803 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
2804}
2805
2806/* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
2807
2808#define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
2809 SELFTEST_BEGIN_STMT \
2810 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
2811 SELFTEST_END_STMT
2812
2813/* Smoketest for region_model::dump_to_pp. */
2814
2815static void
2816test_dump ()
2817{
808f4dfe
DM
2818 region_model_manager mgr;
2819 region_model model (&mgr);
757bf1df
DM
2820
2821 ASSERT_DUMP_EQ (model, false,
808f4dfe
DM
2822 "stack depth: 0\n"
2823 "m_called_unknown_fn: FALSE\n"
2824 "constraint_manager:\n"
2825 " equiv classes:\n"
2826 " constraints:\n");
2827 ASSERT_DUMP_EQ (model, true,
2828 "stack depth: 0\n"
2829 "m_called_unknown_fn: FALSE\n"
2830 "constraint_manager:\n"
757bf1df
DM
2831 " equiv classes:\n"
2832 " constraints:\n");
757bf1df
DM
2833}
2834
884d9141
DM
2835/* Helper function for selftests. Create a struct or union type named NAME,
2836 with the fields given by the FIELD_DECLS in FIELDS.
2837 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
2838 create a UNION_TYPE. */
2839
2840static tree
2841make_test_compound_type (const char *name, bool is_struct,
2842 const auto_vec<tree> *fields)
2843{
2844 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
2845 TYPE_NAME (t) = get_identifier (name);
2846 TYPE_SIZE (t) = 0;
2847
2848 tree fieldlist = NULL;
2849 int i;
2850 tree field;
2851 FOR_EACH_VEC_ELT (*fields, i, field)
2852 {
2853 gcc_assert (TREE_CODE (field) == FIELD_DECL);
2854 DECL_CONTEXT (field) = t;
2855 fieldlist = chainon (field, fieldlist);
2856 }
2857 fieldlist = nreverse (fieldlist);
2858 TYPE_FIELDS (t) = fieldlist;
2859
2860 layout_type (t);
2861 return t;
2862}
2863
a96f1c38
DM
2864/* Selftest fixture for creating the type "struct coord {int x; int y; };". */
2865
2866struct coord_test
2867{
2868 coord_test ()
2869 {
2870 auto_vec<tree> fields;
2871 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2872 get_identifier ("x"), integer_type_node);
2873 fields.safe_push (m_x_field);
2874 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2875 get_identifier ("y"), integer_type_node);
2876 fields.safe_push (m_y_field);
2877 m_coord_type = make_test_compound_type ("coord", true, &fields);
2878 }
2879
2880 tree m_x_field;
2881 tree m_y_field;
2882 tree m_coord_type;
2883};
2884
808f4dfe 2885/* Verify usage of a struct. */
884d9141
DM
2886
2887static void
808f4dfe 2888test_struct ()
884d9141 2889{
a96f1c38
DM
2890 coord_test ct;
2891
2892 tree c = build_global_decl ("c", ct.m_coord_type);
2893 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
2894 c, ct.m_x_field, NULL_TREE);
2895 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
2896 c, ct.m_y_field, NULL_TREE);
884d9141
DM
2897
2898 tree int_17 = build_int_cst (integer_type_node, 17);
2899 tree int_m3 = build_int_cst (integer_type_node, -3);
2900
808f4dfe
DM
2901 region_model_manager mgr;
2902 region_model model (&mgr);
884d9141
DM
2903 model.set_value (c_x, int_17, NULL);
2904 model.set_value (c_y, int_m3, NULL);
2905
808f4dfe
DM
2906 /* Verify get_offset for "c.x". */
2907 {
2908 const region *c_x_reg = model.get_lvalue (c_x, NULL);
2909 region_offset offset = c_x_reg->get_offset ();
2910 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
2911 ASSERT_EQ (offset.get_bit_offset (), 0);
2912 }
2913
2914 /* Verify get_offset for "c.y". */
2915 {
2916 const region *c_y_reg = model.get_lvalue (c_y, NULL);
2917 region_offset offset = c_y_reg->get_offset ();
2918 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
2919 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
2920 }
884d9141
DM
2921}
2922
808f4dfe 2923/* Verify usage of an array element. */
884d9141
DM
2924
2925static void
808f4dfe 2926test_array_1 ()
884d9141
DM
2927{
2928 tree tlen = size_int (10);
2929 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
2930
2931 tree a = build_global_decl ("a", arr_type);
2932
808f4dfe
DM
2933 region_model_manager mgr;
2934 region_model model (&mgr);
884d9141
DM
2935 tree int_0 = build_int_cst (integer_type_node, 0);
2936 tree a_0 = build4 (ARRAY_REF, char_type_node,
2937 a, int_0, NULL_TREE, NULL_TREE);
2938 tree char_A = build_int_cst (char_type_node, 'A');
2939 model.set_value (a_0, char_A, NULL);
884d9141
DM
2940}
2941
90f7c300
DM
2942/* Verify that region_model::get_representative_tree works as expected. */
2943
2944static void
2945test_get_representative_tree ()
2946{
808f4dfe
DM
2947 region_model_manager mgr;
2948
90f7c300
DM
2949 /* STRING_CST. */
2950 {
2951 tree string_cst = build_string (4, "foo");
808f4dfe
DM
2952 region_model m (&mgr);
2953 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
2954 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
2955 ASSERT_EQ (rep, string_cst);
2956 }
2957
2958 /* String literal. */
2959 {
2960 tree string_cst_ptr = build_string_literal (4, "foo");
808f4dfe
DM
2961 region_model m (&mgr);
2962 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
2963 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
2964 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
2965 }
808f4dfe
DM
2966
2967 /* Value of an element within an array. */
2968 {
2969 tree tlen = size_int (10);
2970 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
2971 tree a = build_global_decl ("a", arr_type);
2972 placeholder_svalue test_sval (char_type_node, "test value");
2973
2974 /* Value of a[3]. */
2975 {
2976 test_region_model_context ctxt;
2977 region_model model (&mgr);
2978 tree int_3 = build_int_cst (integer_type_node, 3);
2979 tree a_3 = build4 (ARRAY_REF, char_type_node,
2980 a, int_3, NULL_TREE, NULL_TREE);
2981 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
2982 model.set_value (a_3_reg, &test_sval, &ctxt);
2983 tree rep = model.get_representative_tree (&test_sval);
2984 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
2985 }
2986
2987 /* Value of a[0]. */
2988 {
2989 test_region_model_context ctxt;
2990 region_model model (&mgr);
2991 tree idx = build_int_cst (integer_type_node, 0);
2992 tree a_0 = build4 (ARRAY_REF, char_type_node,
2993 a, idx, NULL_TREE, NULL_TREE);
2994 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
2995 model.set_value (a_0_reg, &test_sval, &ctxt);
2996 tree rep = model.get_representative_tree (&test_sval);
2997 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
2998 }
2999 }
3000
3001 /* Value of a field within a struct. */
3002 {
3003 coord_test ct;
3004
3005 tree c = build_global_decl ("c", ct.m_coord_type);
3006 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
3007 c, ct.m_x_field, NULL_TREE);
3008 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
3009 c, ct.m_y_field, NULL_TREE);
3010
3011 test_region_model_context ctxt;
3012
3013 /* Value of initial field. */
3014 {
3015 region_model m (&mgr);
3016 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
3017 placeholder_svalue test_sval_x (integer_type_node, "test x val");
3018 m.set_value (c_x_reg, &test_sval_x, &ctxt);
3019 tree rep = m.get_representative_tree (&test_sval_x);
3020 ASSERT_DUMP_TREE_EQ (rep, "c.x");
3021 }
3022
3023 /* Value of non-initial field. */
3024 {
3025 region_model m (&mgr);
3026 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
3027 placeholder_svalue test_sval_y (integer_type_node, "test y val");
3028 m.set_value (c_y_reg, &test_sval_y, &ctxt);
3029 tree rep = m.get_representative_tree (&test_sval_y);
3030 ASSERT_DUMP_TREE_EQ (rep, "c.y");
3031 }
3032 }
90f7c300
DM
3033}
3034
757bf1df 3035/* Verify that calling region_model::get_rvalue repeatedly on the same
808f4dfe 3036 tree constant retrieves the same svalue *. */
757bf1df
DM
3037
3038static void
3039test_unique_constants ()
3040{
3041 tree int_0 = build_int_cst (integer_type_node, 0);
3042 tree int_42 = build_int_cst (integer_type_node, 42);
3043
3044 test_region_model_context ctxt;
808f4dfe
DM
3045 region_model_manager mgr;
3046 region_model model (&mgr);
757bf1df
DM
3047 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
3048 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
3049 model.get_rvalue (int_42, &ctxt));
3050 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
3051 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
757bf1df 3052
808f4dfe
DM
3053 /* A "(const int)42" will be a different tree from "(int)42)"... */
3054 tree const_int_type_node
3055 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
3056 tree const_int_42 = build_int_cst (const_int_type_node, 42);
3057 ASSERT_NE (int_42, const_int_42);
3058 /* It should have a different const_svalue. */
3059 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
3060 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
3061 ASSERT_NE (int_42_sval, const_int_42_sval);
3062 /* But they should compare as equal. */
3063 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
3064 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
757bf1df
DM
3065}
3066
808f4dfe
DM
3067/* Verify that each type gets its own singleton unknown_svalue within a
3068 region_model_manager, and that NULL_TREE gets its own singleton. */
757bf1df
DM
3069
3070static void
808f4dfe 3071test_unique_unknowns ()
757bf1df 3072{
808f4dfe
DM
3073 region_model_manager mgr;
3074 const svalue *unknown_int
3075 = mgr.get_or_create_unknown_svalue (integer_type_node);
3076 /* Repeated calls with the same type should get the same "unknown"
3077 svalue. */
3078 const svalue *unknown_int_2
3079 = mgr.get_or_create_unknown_svalue (integer_type_node);
3080 ASSERT_EQ (unknown_int, unknown_int_2);
757bf1df 3081
808f4dfe
DM
3082 /* Different types (or the NULL type) should have different
3083 unknown_svalues. */
3084 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
3085 ASSERT_NE (unknown_NULL_type, unknown_int);
757bf1df 3086
808f4dfe
DM
3087 /* Repeated calls with NULL for the type should get the same "unknown"
3088 svalue. */
3089 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
3090 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
757bf1df
DM
3091}
3092
808f4dfe 3093/* Verify that initial_svalue are handled as expected. */
757bf1df 3094
808f4dfe
DM
3095static void
3096test_initial_svalue_folding ()
757bf1df 3097{
808f4dfe
DM
3098 region_model_manager mgr;
3099 tree x = build_global_decl ("x", integer_type_node);
3100 tree y = build_global_decl ("y", integer_type_node);
757bf1df 3101
808f4dfe
DM
3102 test_region_model_context ctxt;
3103 region_model model (&mgr);
3104 const svalue *x_init = model.get_rvalue (x, &ctxt);
3105 const svalue *y_init = model.get_rvalue (y, &ctxt);
3106 ASSERT_NE (x_init, y_init);
3107 const region *x_reg = model.get_lvalue (x, &ctxt);
3108 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
757bf1df 3109
808f4dfe 3110}
757bf1df 3111
808f4dfe 3112/* Verify that unary ops are folded as expected. */
757bf1df
DM
3113
3114static void
808f4dfe 3115test_unaryop_svalue_folding ()
757bf1df 3116{
808f4dfe 3117 region_model_manager mgr;
757bf1df
DM
3118 tree x = build_global_decl ("x", integer_type_node);
3119 tree y = build_global_decl ("y", integer_type_node);
3120
808f4dfe
DM
3121 test_region_model_context ctxt;
3122 region_model model (&mgr);
3123 const svalue *x_init = model.get_rvalue (x, &ctxt);
3124 const svalue *y_init = model.get_rvalue (y, &ctxt);
3125 const region *x_reg = model.get_lvalue (x, &ctxt);
3126 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
3127
3128 /* "(int)x" -> "x". */
3129 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
3130
3131 /* "(void *)x" -> something other than "x". */
3132 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
3133
3134 /* "!(x == y)" -> "x != y". */
3135 ASSERT_EQ (mgr.get_or_create_unaryop
3136 (boolean_type_node, TRUTH_NOT_EXPR,
3137 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
3138 x_init, y_init)),
3139 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
3140 x_init, y_init));
3141 /* "!(x > y)" -> "x <= y". */
3142 ASSERT_EQ (mgr.get_or_create_unaryop
3143 (boolean_type_node, TRUTH_NOT_EXPR,
3144 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
3145 x_init, y_init)),
3146 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
3147 x_init, y_init));
3148}
3149
3150/* Verify that binops on constant svalues are folded. */
757bf1df 3151
808f4dfe
DM
3152static void
3153test_binop_svalue_folding ()
3154{
3155#define NUM_CSTS 10
3156 tree cst_int[NUM_CSTS];
3157 region_model_manager mgr;
3158 const svalue *cst_sval[NUM_CSTS];
3159 for (int i = 0; i < NUM_CSTS; i++)
3160 {
3161 cst_int[i] = build_int_cst (integer_type_node, i);
3162 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
3163 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
3164 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
3165 }
757bf1df 3166
808f4dfe
DM
3167 for (int i = 0; i < NUM_CSTS; i++)
3168 for (int j = 0; j < NUM_CSTS; j++)
3169 {
3170 if (i != j)
3171 ASSERT_NE (cst_sval[i], cst_sval[j]);
3172 if (i + j < NUM_CSTS)
3173 {
3174 const svalue *sum
3175 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
3176 cst_sval[i], cst_sval[j]);
3177 ASSERT_EQ (sum, cst_sval[i + j]);
3178 }
3179 if (i - j >= 0)
3180 {
3181 const svalue *difference
3182 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
3183 cst_sval[i], cst_sval[j]);
3184 ASSERT_EQ (difference, cst_sval[i - j]);
3185 }
3186 if (i * j < NUM_CSTS)
3187 {
3188 const svalue *product
3189 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
3190 cst_sval[i], cst_sval[j]);
3191 ASSERT_EQ (product, cst_sval[i * j]);
3192 }
3193 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
3194 cst_sval[i], cst_sval[j]);
3195 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
3196 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
3197 cst_sval[i], cst_sval[j]);
3198 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
3199 // etc
3200 }
757bf1df 3201
808f4dfe 3202 tree x = build_global_decl ("x", integer_type_node);
757bf1df 3203
808f4dfe
DM
3204 test_region_model_context ctxt;
3205 region_model model (&mgr);
3206 const svalue *x_init = model.get_rvalue (x, &ctxt);
3207
3208 /* PLUS_EXPR folding. */
3209 const svalue *x_init_plus_zero
3210 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
3211 x_init, cst_sval[0]);
3212 ASSERT_EQ (x_init_plus_zero, x_init);
3213 const svalue *zero_plus_x_init
3214 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
3215 cst_sval[0], x_init);
3216 ASSERT_EQ (zero_plus_x_init, x_init);
3217
3218 /* MULT_EXPR folding. */
3219 const svalue *x_init_times_zero
3220 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
3221 x_init, cst_sval[0]);
3222 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
3223 const svalue *zero_times_x_init
3224 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
3225 cst_sval[0], x_init);
3226 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
3227
3228 const svalue *x_init_times_one
3229 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
3230 x_init, cst_sval[1]);
3231 ASSERT_EQ (x_init_times_one, x_init);
3232 const svalue *one_times_x_init
3233 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
3234 cst_sval[1], x_init);
3235 ASSERT_EQ (one_times_x_init, x_init);
3236
3237 // etc
3238 // TODO: do we want to use the match-and-simplify DSL for this?
3239
3240 /* Verify that binops put any constants on the RHS. */
3241 const svalue *four_times_x_init
3242 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
3243 cst_sval[4], x_init);
3244 const svalue *x_init_times_four
3245 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
3246 x_init, cst_sval[4]);
3247 ASSERT_EQ (four_times_x_init, x_init_times_four);
3248 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
3249 ASSERT_EQ (binop->get_op (), MULT_EXPR);
3250 ASSERT_EQ (binop->get_arg0 (), x_init);
3251 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
3252
3253 /* Verify that ((x + 1) + 1) == (x + 2). */
3254 const svalue *x_init_plus_one
3255 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
3256 x_init, cst_sval[1]);
3257 const svalue *x_init_plus_two
3258 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
3259 x_init, cst_sval[2]);
3260 const svalue *x_init_plus_one_plus_one
3261 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
3262 x_init_plus_one, cst_sval[1]);
3263 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
3264}
3265
3266/* Verify that sub_svalues are folded as expected. */
757bf1df 3267
808f4dfe
DM
3268static void
3269test_sub_svalue_folding ()
3270{
3271 coord_test ct;
3272 tree c = build_global_decl ("c", ct.m_coord_type);
3273 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
3274 c, ct.m_x_field, NULL_TREE);
757bf1df 3275
808f4dfe
DM
3276 region_model_manager mgr;
3277 region_model model (&mgr);
3278 test_region_model_context ctxt;
3279 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
757bf1df 3280
808f4dfe
DM
3281 /* Verify that sub_svalue of "unknown" simply
3282 yields an unknown. */
757bf1df 3283
808f4dfe
DM
3284 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
3285 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
3286 unknown, c_x_reg);
3287 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
3288 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
757bf1df
DM
3289}
3290
808f4dfe 3291/* Test that region::descendent_of_p works as expected. */
757bf1df
DM
3292
3293static void
808f4dfe 3294test_descendent_of_p ()
757bf1df 3295{
808f4dfe
DM
3296 region_model_manager mgr;
3297 const region *stack = mgr.get_stack_region ();
3298 const region *heap = mgr.get_heap_region ();
3299 const region *code = mgr.get_code_region ();
3300 const region *globals = mgr.get_globals_region ();
757bf1df 3301
808f4dfe
DM
3302 /* descendent_of_p should return true when used on the region itself. */
3303 ASSERT_TRUE (stack->descendent_of_p (stack));
3304 ASSERT_FALSE (stack->descendent_of_p (heap));
3305 ASSERT_FALSE (stack->descendent_of_p (code));
3306 ASSERT_FALSE (stack->descendent_of_p (globals));
757bf1df 3307
808f4dfe
DM
3308 tree x = build_global_decl ("x", integer_type_node);
3309 const region *x_reg = mgr.get_region_for_global (x);
3310 ASSERT_TRUE (x_reg->descendent_of_p (globals));
757bf1df 3311
808f4dfe
DM
3312 /* A cast_region should be a descendent of the original region. */
3313 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
3314 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
757bf1df
DM
3315}
3316
3317/* Verify that simple assignments work as expected. */
3318
3319static void
3320test_assignment ()
3321{
3322 tree int_0 = build_int_cst (integer_type_node, 0);
3323 tree x = build_global_decl ("x", integer_type_node);
3324 tree y = build_global_decl ("y", integer_type_node);
3325
3326 /* "x == 0", then use of y, then "y = 0;". */
808f4dfe
DM
3327 region_model_manager mgr;
3328 region_model model (&mgr);
757bf1df
DM
3329 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
3330 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
3331 model.set_value (model.get_lvalue (y, NULL),
3332 model.get_rvalue (int_0, NULL),
3333 NULL);
3334 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
3335 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
757bf1df
DM
3336}
3337
a96f1c38
DM
3338/* Verify that compound assignments work as expected. */
3339
3340static void
3341test_compound_assignment ()
3342{
3343 coord_test ct;
3344
3345 tree c = build_global_decl ("c", ct.m_coord_type);
3346 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
3347 c, ct.m_x_field, NULL_TREE);
3348 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
3349 c, ct.m_y_field, NULL_TREE);
3350 tree d = build_global_decl ("d", ct.m_coord_type);
3351 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
3352 d, ct.m_x_field, NULL_TREE);
3353 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
3354 d, ct.m_y_field, NULL_TREE);
3355
3356 tree int_17 = build_int_cst (integer_type_node, 17);
3357 tree int_m3 = build_int_cst (integer_type_node, -3);
3358
808f4dfe
DM
3359 region_model_manager mgr;
3360 region_model model (&mgr);
a96f1c38
DM
3361 model.set_value (c_x, int_17, NULL);
3362 model.set_value (c_y, int_m3, NULL);
3363
a96f1c38
DM
3364 /* Copy c to d. */
3365 model.copy_region (model.get_lvalue (d, NULL), model.get_lvalue (c, NULL),
3366 NULL);
3367 /* Check that the fields have the same svalues. */
3368 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
3369 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
3370}
3371
757bf1df
DM
3372/* Verify the details of pushing and popping stack frames. */
3373
3374static void
3375test_stack_frames ()
3376{
3377 tree int_42 = build_int_cst (integer_type_node, 42);
3378 tree int_10 = build_int_cst (integer_type_node, 10);
3379 tree int_5 = build_int_cst (integer_type_node, 5);
3380 tree int_0 = build_int_cst (integer_type_node, 0);
3381
3382 auto_vec <tree> param_types;
3383 tree parent_fndecl = make_fndecl (integer_type_node,
3384 "parent_fn",
3385 param_types);
3386 allocate_struct_function (parent_fndecl, true);
3387
3388 tree child_fndecl = make_fndecl (integer_type_node,
3389 "child_fn",
3390 param_types);
3391 allocate_struct_function (child_fndecl, true);
3392
3393 /* "a" and "b" in the parent frame. */
3394 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
3395 get_identifier ("a"),
3396 integer_type_node);
3397 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
3398 get_identifier ("b"),
3399 integer_type_node);
3400 /* "x" and "y" in a child frame. */
3401 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
3402 get_identifier ("x"),
3403 integer_type_node);
3404 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
3405 get_identifier ("y"),
3406 integer_type_node);
3407
3408 /* "p" global. */
3409 tree p = build_global_decl ("p", ptr_type_node);
3410
3411 /* "q" global. */
3412 tree q = build_global_decl ("q", ptr_type_node);
3413
808f4dfe 3414 region_model_manager mgr;
757bf1df 3415 test_region_model_context ctxt;
808f4dfe 3416 region_model model (&mgr);
757bf1df
DM
3417
3418 /* Push stack frame for "parent_fn". */
808f4dfe
DM
3419 const region *parent_frame_reg
3420 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl),
3421 NULL, &ctxt);
3422 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
3423 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
3424 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
3425 model.set_value (a_in_parent_reg,
3426 model.get_rvalue (int_42, &ctxt),
3427 &ctxt);
3428 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
3429
757bf1df
DM
3430 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
3431 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
3432 tristate (tristate::TS_TRUE));
3433
3434 /* Push stack frame for "child_fn". */
808f4dfe 3435 const region *child_frame_reg
757bf1df 3436 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
808f4dfe
DM
3437 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
3438 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
3439 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
3440 model.set_value (x_in_child_reg,
3441 model.get_rvalue (int_0, &ctxt),
3442 &ctxt);
3443 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
3444
757bf1df
DM
3445 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
3446 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
3447 tristate (tristate::TS_TRUE));
3448
3449 /* Point a global pointer at a local in the child frame: p = &x. */
808f4dfe
DM
3450 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
3451 model.set_value (p_in_globals_reg,
3452 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
757bf1df 3453 &ctxt);
808f4dfe 3454 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
757bf1df
DM
3455
3456 /* Point another global pointer at p: q = &p. */
808f4dfe
DM
3457 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
3458 model.set_value (q_in_globals_reg,
3459 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
757bf1df
DM
3460 &ctxt);
3461
808f4dfe
DM
3462 /* Test region::descendent_of_p. */
3463 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
3464 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
3465 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
757bf1df
DM
3466
3467 /* Pop the "child_fn" frame from the stack. */
808f4dfe
DM
3468 model.pop_frame (NULL, NULL, &ctxt);
3469 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
3470 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
757bf1df
DM
3471
3472 /* Verify that p (which was pointing at the local "x" in the popped
3473 frame) has been poisoned. */
808f4dfe 3474 const svalue *new_p_sval = model.get_rvalue (p, &ctxt);
757bf1df
DM
3475 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
3476 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
3477 POISON_KIND_POPPED_STACK);
3478
3479 /* Verify that q still points to p, in spite of the region
3480 renumbering. */
808f4dfe 3481 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
757bf1df
DM
3482 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
3483 ASSERT_EQ (new_q_sval->dyn_cast_region_svalue ()->get_pointee (),
3484 model.get_lvalue (p, &ctxt));
3485
3486 /* Verify that top of stack has been updated. */
808f4dfe 3487 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
757bf1df
DM
3488
3489 /* Verify locals in parent frame. */
3490 /* Verify "a" still has its value. */
808f4dfe 3491 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
757bf1df
DM
3492 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
3493 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
3494 int_42);
3495 /* Verify "b" still has its constraint. */
3496 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
3497 tristate (tristate::TS_TRUE));
3498}
3499
3500/* Verify that get_representative_path_var works as expected, that
808f4dfe 3501 we can map from regions to parms and back within a recursive call
757bf1df
DM
3502 stack. */
3503
3504static void
3505test_get_representative_path_var ()
3506{
3507 auto_vec <tree> param_types;
3508 tree fndecl = make_fndecl (integer_type_node,
3509 "factorial",
3510 param_types);
3511 allocate_struct_function (fndecl, true);
3512
3513 /* Parm "n". */
3514 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
3515 get_identifier ("n"),
3516 integer_type_node);
3517
808f4dfe
DM
3518 region_model_manager mgr;
3519 test_region_model_context ctxt;
3520 region_model model (&mgr);
757bf1df
DM
3521
3522 /* Push 5 stack frames for "factorial", each with a param */
808f4dfe
DM
3523 auto_vec<const region *> parm_regs;
3524 auto_vec<const svalue *> parm_svals;
757bf1df
DM
3525 for (int depth = 0; depth < 5; depth++)
3526 {
808f4dfe
DM
3527 const region *frame_n_reg
3528 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
3529 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
3530 parm_regs.safe_push (parm_n_reg);
757bf1df 3531
808f4dfe
DM
3532 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
3533 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
3534 parm_svals.safe_push (sval_n);
757bf1df
DM
3535 }
3536
3537 /* Verify that we can recognize that the regions are the parms,
3538 at every depth. */
3539 for (int depth = 0; depth < 5; depth++)
3540 {
808f4dfe
DM
3541 {
3542 svalue_set visited;
3543 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
3544 &visited),
3545 path_var (n, depth + 1));
3546 }
757bf1df
DM
3547 /* ...and that we can lookup lvalues for locals for all frames,
3548 not just the top. */
3549 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
808f4dfe 3550 parm_regs[depth]);
757bf1df 3551 /* ...and that we can locate the svalues. */
808f4dfe
DM
3552 {
3553 svalue_set visited;
3554 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
3555 &visited),
3556 path_var (n, depth + 1));
3557 }
757bf1df
DM
3558 }
3559}
3560
808f4dfe 3561/* Ensure that region_model::operator== works as expected. */
757bf1df
DM
3562
3563static void
808f4dfe 3564test_equality_1 ()
757bf1df 3565{
808f4dfe
DM
3566 tree int_42 = build_int_cst (integer_type_node, 42);
3567 tree int_17 = build_int_cst (integer_type_node, 17);
757bf1df 3568
808f4dfe
DM
3569/* Verify that "empty" region_model instances are equal to each other. */
3570 region_model_manager mgr;
3571 region_model model0 (&mgr);
3572 region_model model1 (&mgr);
757bf1df 3573 ASSERT_EQ (model0, model1);
808f4dfe
DM
3574
3575 /* Verify that setting state in model1 makes the models non-equal. */
3576 tree x = build_global_decl ("x", integer_type_node);
3577 model0.set_value (x, int_42, NULL);
3578 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
3579 ASSERT_NE (model0, model1);
3580
3581 /* Verify the copy-ctor. */
3582 region_model model2 (model0);
3583 ASSERT_EQ (model0, model2);
3584 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
3585 ASSERT_NE (model1, model2);
3586
3587 /* Verify that models obtained from copy-ctor are independently editable
3588 w/o affecting the original model. */
3589 model2.set_value (x, int_17, NULL);
3590 ASSERT_NE (model0, model2);
3591 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
3592 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
757bf1df
DM
3593}
3594
3595/* Verify that region models for
3596 x = 42; y = 113;
3597 and
3598 y = 113; x = 42;
808f4dfe 3599 are equal. */
757bf1df
DM
3600
3601static void
3602test_canonicalization_2 ()
3603{
3604 tree int_42 = build_int_cst (integer_type_node, 42);
3605 tree int_113 = build_int_cst (integer_type_node, 113);
3606 tree x = build_global_decl ("x", integer_type_node);
3607 tree y = build_global_decl ("y", integer_type_node);
3608
808f4dfe
DM
3609 region_model_manager mgr;
3610 region_model model0 (&mgr);
757bf1df
DM
3611 model0.set_value (model0.get_lvalue (x, NULL),
3612 model0.get_rvalue (int_42, NULL),
3613 NULL);
3614 model0.set_value (model0.get_lvalue (y, NULL),
3615 model0.get_rvalue (int_113, NULL),
3616 NULL);
3617
808f4dfe 3618 region_model model1 (&mgr);
757bf1df
DM
3619 model1.set_value (model1.get_lvalue (y, NULL),
3620 model1.get_rvalue (int_113, NULL),
3621 NULL);
3622 model1.set_value (model1.get_lvalue (x, NULL),
3623 model1.get_rvalue (int_42, NULL),
3624 NULL);
3625
757bf1df
DM
3626 ASSERT_EQ (model0, model1);
3627}
3628
3629/* Verify that constraints for
3630 x > 3 && y > 42
3631 and
3632 y > 42 && x > 3
3633 are equal after canonicalization. */
3634
3635static void
3636test_canonicalization_3 ()
3637{
3638 tree int_3 = build_int_cst (integer_type_node, 3);
3639 tree int_42 = build_int_cst (integer_type_node, 42);
3640 tree x = build_global_decl ("x", integer_type_node);
3641 tree y = build_global_decl ("y", integer_type_node);
3642
808f4dfe
DM
3643 region_model_manager mgr;
3644 region_model model0 (&mgr);
757bf1df
DM
3645 model0.add_constraint (x, GT_EXPR, int_3, NULL);
3646 model0.add_constraint (y, GT_EXPR, int_42, NULL);
3647
808f4dfe 3648 region_model model1 (&mgr);
757bf1df
DM
3649 model1.add_constraint (y, GT_EXPR, int_42, NULL);
3650 model1.add_constraint (x, GT_EXPR, int_3, NULL);
3651
808f4dfe
DM
3652 model0.canonicalize ();
3653 model1.canonicalize ();
757bf1df
DM
3654 ASSERT_EQ (model0, model1);
3655}
3656
8c08c983
DM
3657/* Verify that we can canonicalize a model containing NaN and other real
3658 constants. */
3659
3660static void
3661test_canonicalization_4 ()
3662{
3663 auto_vec<tree> csts;
3664 append_interesting_constants (&csts);
3665
808f4dfe
DM
3666 region_model_manager mgr;
3667 region_model model (&mgr);
8c08c983
DM
3668
3669 unsigned i;
3670 tree cst;
3671 FOR_EACH_VEC_ELT (csts, i, cst)
3672 model.get_rvalue (cst, NULL);
3673
808f4dfe 3674 model.canonicalize ();
8c08c983
DM
3675}
3676
757bf1df
DM
3677/* Assert that if we have two region_model instances
3678 with values VAL_A and VAL_B for EXPR that they are
3679 mergable. Write the merged model to *OUT_MERGED_MODEL,
3680 and the merged svalue ptr to *OUT_MERGED_SVALUE.
3681 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
3682 for that region_model. */
3683
3684static void
3685assert_region_models_merge (tree expr, tree val_a, tree val_b,
808f4dfe
DM
3686 region_model *out_merged_model,
3687 const svalue **out_merged_svalue)
757bf1df 3688{
808f4dfe 3689 program_point point (program_point::origin ());
757bf1df 3690 test_region_model_context ctxt;
808f4dfe
DM
3691 region_model_manager *mgr = out_merged_model->get_manager ();
3692 region_model model0 (mgr);
3693 region_model model1 (mgr);
757bf1df
DM
3694 if (val_a)
3695 model0.set_value (model0.get_lvalue (expr, &ctxt),
3696 model0.get_rvalue (val_a, &ctxt),
3697 &ctxt);
3698 if (val_b)
3699 model1.set_value (model1.get_lvalue (expr, &ctxt),
3700 model1.get_rvalue (val_b, &ctxt),
3701 &ctxt);
3702
3703 /* They should be mergeable. */
808f4dfe
DM
3704 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
3705 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
757bf1df
DM
3706}
3707
3708/* Verify that we can merge region_model instances. */
3709
3710static void
3711test_state_merging ()
3712{
3713 tree int_42 = build_int_cst (integer_type_node, 42);
3714 tree int_113 = build_int_cst (integer_type_node, 113);
3715 tree x = build_global_decl ("x", integer_type_node);
3716 tree y = build_global_decl ("y", integer_type_node);
3717 tree z = build_global_decl ("z", integer_type_node);
3718 tree p = build_global_decl ("p", ptr_type_node);
3719
3720 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
3721 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
3722
3723 auto_vec <tree> param_types;
3724 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
3725 allocate_struct_function (test_fndecl, true);
3726
3727 /* Param "a". */
3728 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
3729 get_identifier ("a"),
3730 integer_type_node);
3731 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
3732
455f58ec
DM
3733 /* Param "q", a pointer. */
3734 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
3735 get_identifier ("q"),
3736 ptr_type_node);
3737
808f4dfe
DM
3738 program_point point (program_point::origin ());
3739 region_model_manager mgr;
3740
757bf1df 3741 {
808f4dfe
DM
3742 region_model model0 (&mgr);
3743 region_model model1 (&mgr);
3744 region_model merged (&mgr);
757bf1df 3745 /* Verify empty models can be merged. */
808f4dfe 3746 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
3747 ASSERT_EQ (model0, merged);
3748 }
3749
3750 /* Verify that we can merge two contradictory constraints on the
3751 value for a global. */
3752 /* TODO: verify that the merged model doesn't have a value for
3753 the global */
3754 {
808f4dfe
DM
3755 region_model model0 (&mgr);
3756 region_model model1 (&mgr);
3757 region_model merged (&mgr);
757bf1df
DM
3758 test_region_model_context ctxt;
3759 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
3760 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe 3761 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
3762 ASSERT_NE (model0, merged);
3763 ASSERT_NE (model1, merged);
3764 }
3765
3766 /* Verify handling of a PARM_DECL. */
3767 {
3768 test_region_model_context ctxt;
808f4dfe
DM
3769 region_model model0 (&mgr);
3770 region_model model1 (&mgr);
757bf1df
DM
3771 ASSERT_EQ (model0.get_stack_depth (), 0);
3772 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
3773 ASSERT_EQ (model0.get_stack_depth (), 1);
757bf1df
DM
3774 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
3775
808f4dfe
DM
3776 placeholder_svalue test_sval (integer_type_node, "test sval");
3777 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
3778 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
757bf1df
DM
3779 ASSERT_EQ (model0, model1);
3780
757bf1df 3781 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
3782 region_model merged (&mgr);
3783 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 3784 ASSERT_EQ (model0, merged);
808f4dfe
DM
3785 /* In particular, "a" should have the placeholder value. */
3786 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
757bf1df
DM
3787 }
3788
3789 /* Verify handling of a global. */
3790 {
3791 test_region_model_context ctxt;
808f4dfe
DM
3792 region_model model0 (&mgr);
3793 region_model model1 (&mgr);
757bf1df 3794
808f4dfe
DM
3795 placeholder_svalue test_sval (integer_type_node, "test sval");
3796 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
3797 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
3798 ASSERT_EQ (model0, model1);
757bf1df
DM
3799
3800 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
3801 region_model merged (&mgr);
3802 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 3803 ASSERT_EQ (model0, merged);
808f4dfe
DM
3804 /* In particular, "x" should have the placeholder value. */
3805 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
757bf1df
DM
3806 }
3807
3808 /* Use global-handling to verify various combinations of values. */
3809
3810 /* Two equal constant values. */
3811 {
808f4dfe
DM
3812 region_model merged (&mgr);
3813 const svalue *merged_x_sval;
757bf1df
DM
3814 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
3815
3816 /* In particular, there should be a constant value for "x". */
3817 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
3818 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
3819 int_42);
3820 }
3821
3822 /* Two non-equal constant values. */
3823 {
808f4dfe
DM
3824 region_model merged (&mgr);
3825 const svalue *merged_x_sval;
757bf1df
DM
3826 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
3827
808f4dfe
DM
3828 /* In particular, there should be a "widening" value for "x". */
3829 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
757bf1df
DM
3830 }
3831
808f4dfe 3832 /* Initial and constant. */
757bf1df 3833 {
808f4dfe
DM
3834 region_model merged (&mgr);
3835 const svalue *merged_x_sval;
757bf1df
DM
3836 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
3837
3838 /* In particular, there should be an unknown value for "x". */
3839 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
3840 }
3841
808f4dfe 3842 /* Constant and initial. */
757bf1df 3843 {
808f4dfe
DM
3844 region_model merged (&mgr);
3845 const svalue *merged_x_sval;
757bf1df
DM
3846 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
3847
3848 /* In particular, there should be an unknown value for "x". */
3849 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
3850 }
3851
3852 /* Unknown and constant. */
3853 // TODO
3854
3855 /* Pointers: NULL and NULL. */
3856 // TODO
3857
3858 /* Pointers: NULL and non-NULL. */
3859 // TODO
3860
3861 /* Pointers: non-NULL and non-NULL: ptr to a local. */
3862 {
808f4dfe 3863 region_model model0 (&mgr);
757bf1df 3864 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
757bf1df
DM
3865 model0.set_value (model0.get_lvalue (p, NULL),
3866 model0.get_rvalue (addr_of_a, NULL), NULL);
3867
3868 region_model model1 (model0);
3869 ASSERT_EQ (model0, model1);
3870
3871 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
3872 region_model merged (&mgr);
3873 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
3874 ASSERT_EQ (model0, merged);
3875 }
3876
3877 /* Pointers: non-NULL and non-NULL: ptr to a global. */
3878 {
808f4dfe 3879 region_model merged (&mgr);
757bf1df 3880 /* p == &y in both input models. */
808f4dfe 3881 const svalue *merged_p_sval;
757bf1df
DM
3882 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
3883 &merged_p_sval);
3884
3885 /* We should get p == &y in the merged model. */
3886 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
808f4dfe
DM
3887 const region_svalue *merged_p_ptr
3888 = merged_p_sval->dyn_cast_region_svalue ();
3889 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
3890 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
757bf1df
DM
3891 }
3892
3893 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
3894 {
808f4dfe
DM
3895 region_model merged (&mgr);
3896 /* x == &y vs x == &z in the input models; these are actually casts
3897 of the ptrs to "int". */
3898 const svalue *merged_x_sval;
3899 // TODO:
757bf1df
DM
3900 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
3901 &merged_x_sval);
3902
3903 /* We should get x == unknown in the merged model. */
3904 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
3905 }
3906
3907 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
3908 {
3909 test_region_model_context ctxt;
808f4dfe
DM
3910 region_model model0 (&mgr);
3911 tree size = build_int_cst (integer_type_node, 1024);
3912 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
3913 const region *new_reg = model0.create_region_for_heap_alloc (size_sval);
3914 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
757bf1df 3915 model0.set_value (model0.get_lvalue (p, &ctxt),
808f4dfe 3916 ptr_sval, &ctxt);
757bf1df
DM
3917
3918 region_model model1 (model0);
3919
3920 ASSERT_EQ (model0, model1);
3921
808f4dfe
DM
3922 region_model merged (&mgr);
3923 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 3924
808f4dfe 3925 /* The merged model ought to be identical. */
757bf1df
DM
3926 ASSERT_EQ (model0, merged);
3927 }
3928
808f4dfe
DM
3929 /* Two regions sharing the same placeholder svalue should continue sharing
3930 it after self-merger. */
757bf1df
DM
3931 {
3932 test_region_model_context ctxt;
808f4dfe
DM
3933 region_model model0 (&mgr);
3934 placeholder_svalue placeholder_sval (integer_type_node, "test");
3935 model0.set_value (model0.get_lvalue (x, &ctxt),
3936 &placeholder_sval, &ctxt);
3937 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
757bf1df
DM
3938 region_model model1 (model0);
3939
3940 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
3941 region_model merged (&mgr);
3942 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
3943 ASSERT_EQ (model0, merged);
3944
3945 /* In particular, we should have x == y. */
3946 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
3947 tristate (tristate::TS_TRUE));
3948 }
3949
757bf1df 3950 {
808f4dfe
DM
3951 region_model model0 (&mgr);
3952 region_model model1 (&mgr);
757bf1df
DM
3953 test_region_model_context ctxt;
3954 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
3955 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
808f4dfe
DM
3956 region_model merged (&mgr);
3957 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
3958 }
3959
3960 {
808f4dfe
DM
3961 region_model model0 (&mgr);
3962 region_model model1 (&mgr);
757bf1df
DM
3963 test_region_model_context ctxt;
3964 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
3965 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
3966 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe
DM
3967 region_model merged (&mgr);
3968 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 3969 }
757bf1df
DM
3970
3971 // TODO: what can't we merge? need at least one such test
3972
3973 /* TODO: various things
3974 - heap regions
3975 - value merging:
3976 - every combination, but in particular
808f4dfe 3977 - pairs of regions
757bf1df
DM
3978 */
3979
3980 /* Views. */
3981 {
3982 test_region_model_context ctxt;
808f4dfe 3983 region_model model0 (&mgr);
757bf1df 3984
808f4dfe
DM
3985 const region *x_reg = model0.get_lvalue (x, &ctxt);
3986 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
757bf1df
DM
3987 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
3988
3989 region_model model1 (model0);
3990 ASSERT_EQ (model1, model0);
3991
3992 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
3993 region_model merged (&mgr);
3994 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 3995 }
455f58ec
DM
3996
3997 /* Verify that we can merge a model in which a local in an older stack
3998 frame points to a local in a more recent stack frame. */
3999 {
808f4dfe 4000 region_model model0 (&mgr);
455f58ec 4001 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
808f4dfe 4002 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
455f58ec
DM
4003
4004 /* Push a second frame. */
808f4dfe 4005 const region *reg_2nd_frame
455f58ec
DM
4006 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
4007
4008 /* Have a pointer in the older frame point to a local in the
4009 more recent frame. */
808f4dfe
DM
4010 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
4011 model0.set_value (q_in_first_frame, sval_ptr, NULL);
455f58ec
DM
4012
4013 /* Verify that it's pointing at the newer frame. */
808f4dfe
DM
4014 const region *reg_pointee
4015 = sval_ptr->dyn_cast_region_svalue ()->get_pointee ();
4016 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
455f58ec 4017
808f4dfe 4018 model0.canonicalize ();
455f58ec
DM
4019
4020 region_model model1 (model0);
4021 ASSERT_EQ (model0, model1);
4022
4023 /* They should be mergeable, and the result should be the same
4024 (after canonicalization, at least). */
808f4dfe
DM
4025 region_model merged (&mgr);
4026 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
4027 merged.canonicalize ();
455f58ec
DM
4028 ASSERT_EQ (model0, merged);
4029 }
4030
4031 /* Verify that we can merge a model in which a local points to a global. */
4032 {
808f4dfe 4033 region_model model0 (&mgr);
455f58ec
DM
4034 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
4035 model0.set_value (model0.get_lvalue (q, NULL),
4036 model0.get_rvalue (addr_of_y, NULL), NULL);
4037
455f58ec
DM
4038 region_model model1 (model0);
4039 ASSERT_EQ (model0, model1);
4040
4041 /* They should be mergeable, and the result should be the same
4042 (after canonicalization, at least). */
808f4dfe
DM
4043 region_model merged (&mgr);
4044 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
455f58ec
DM
4045 ASSERT_EQ (model0, merged);
4046 }
757bf1df
DM
4047}
4048
4049/* Verify that constraints are correctly merged when merging region_model
4050 instances. */
4051
4052static void
4053test_constraint_merging ()
4054{
4055 tree int_0 = build_int_cst (integer_type_node, 0);
4056 tree int_5 = build_int_cst (integer_type_node, 5);
4057 tree x = build_global_decl ("x", integer_type_node);
4058 tree y = build_global_decl ("y", integer_type_node);
4059 tree z = build_global_decl ("z", integer_type_node);
4060 tree n = build_global_decl ("n", integer_type_node);
4061
808f4dfe 4062 region_model_manager mgr;
757bf1df
DM
4063 test_region_model_context ctxt;
4064
4065 /* model0: 0 <= (x == y) < n. */
808f4dfe 4066 region_model model0 (&mgr);
757bf1df
DM
4067 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
4068 model0.add_constraint (x, GE_EXPR, int_0, NULL);
4069 model0.add_constraint (x, LT_EXPR, n, NULL);
4070
4071 /* model1: z != 5 && (0 <= x < n). */
808f4dfe 4072 region_model model1 (&mgr);
757bf1df
DM
4073 model1.add_constraint (z, NE_EXPR, int_5, NULL);
4074 model1.add_constraint (x, GE_EXPR, int_0, NULL);
4075 model1.add_constraint (x, LT_EXPR, n, NULL);
4076
4077 /* They should be mergeable; the merged constraints should
4078 be: (0 <= x < n). */
808f4dfe
DM
4079 program_point point (program_point::origin ());
4080 region_model merged (&mgr);
4081 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
4082
4083 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
4084 tristate (tristate::TS_TRUE));
4085 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
4086 tristate (tristate::TS_TRUE));
4087
4088 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
4089 tristate (tristate::TS_UNKNOWN));
4090 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
4091 tristate (tristate::TS_UNKNOWN));
4092}
4093
808f4dfe
DM
4094/* Verify that widening_svalue::eval_condition_without_cm works as
4095 expected. */
4096
4097static void
4098test_widening_constraints ()
4099{
4100 program_point point (program_point::origin ());
4101 tree int_0 = build_int_cst (integer_type_node, 0);
4102 tree int_m1 = build_int_cst (integer_type_node, -1);
4103 tree int_1 = build_int_cst (integer_type_node, 1);
4104 tree int_256 = build_int_cst (integer_type_node, 256);
4105 region_model_manager mgr;
4106 test_region_model_context ctxt;
4107 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
4108 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
4109 const svalue *w_zero_then_one_sval
4110 = mgr.get_or_create_widening_svalue (integer_type_node, point,
4111 int_0_sval, int_1_sval);
4112 const widening_svalue *w_zero_then_one
4113 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
4114 ASSERT_EQ (w_zero_then_one->get_direction (),
4115 widening_svalue::DIR_ASCENDING);
4116 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
4117 tristate::TS_FALSE);
4118 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
4119 tristate::TS_FALSE);
4120 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
4121 tristate::TS_UNKNOWN);
4122 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
4123 tristate::TS_UNKNOWN);
4124
4125 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
4126 tristate::TS_FALSE);
4127 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
4128 tristate::TS_UNKNOWN);
4129 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
4130 tristate::TS_UNKNOWN);
4131 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
4132 tristate::TS_UNKNOWN);
4133
4134 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
4135 tristate::TS_TRUE);
4136 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
4137 tristate::TS_UNKNOWN);
4138 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
4139 tristate::TS_UNKNOWN);
4140 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
4141 tristate::TS_UNKNOWN);
4142
4143 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
4144 tristate::TS_TRUE);
4145 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
4146 tristate::TS_TRUE);
4147 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
4148 tristate::TS_UNKNOWN);
4149 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
4150 tristate::TS_UNKNOWN);
4151
4152 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
4153 tristate::TS_FALSE);
4154 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
4155 tristate::TS_UNKNOWN);
4156 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
4157 tristate::TS_UNKNOWN);
4158 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
4159 tristate::TS_UNKNOWN);
4160
4161 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
4162 tristate::TS_TRUE);
4163 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
4164 tristate::TS_UNKNOWN);
4165 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
4166 tristate::TS_UNKNOWN);
4167 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
4168 tristate::TS_UNKNOWN);
4169}
4170
4171/* Verify merging constraints for states simulating successive iterations
4172 of a loop.
4173 Simulate:
4174 for (i = 0; i < 256; i++)
4175 [...body...]
4176 i.e. this gimple:.
4177 i_15 = 0;
4178 goto <bb 4>;
4179
4180 <bb 4> :
4181 i_11 = PHI <i_15(2), i_23(3)>
4182 if (i_11 <= 255)
4183 goto <bb 3>;
4184 else
4185 goto [AFTER LOOP]
4186
4187 <bb 3> :
4188 [LOOP BODY]
4189 i_23 = i_11 + 1;
4190
4191 and thus these ops (and resultant states):
4192 i_11 = PHI()
4193 {i_11: 0}
4194 add_constraint (i_11 <= 255) [for the true edge]
4195 {i_11: 0} [constraint was a no-op]
4196 i_23 = i_11 + 1;
4197 {i_22: 1}
4198 i_11 = PHI()
4199 {i_11: WIDENED (at phi, 0, 1)}
4200 add_constraint (i_11 <= 255) [for the true edge]
4201 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
4202 i_23 = i_11 + 1;
4203 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
4204 i_11 = PHI(); merge with state at phi above
4205 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
4206 [changing meaning of "WIDENED" here]
4207 if (i_11 <= 255)
4208 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
4209 F: {i_11: 256}
4210 */
4211
4212static void
4213test_iteration_1 ()
4214{
4215 program_point point (program_point::origin ());
4216
4217 tree int_0 = build_int_cst (integer_type_node, 0);
4218 tree int_1 = build_int_cst (integer_type_node, 1);
4219 tree int_256 = build_int_cst (integer_type_node, 256);
4220 tree int_257 = build_int_cst (integer_type_node, 257);
4221 tree i = build_global_decl ("i", integer_type_node);
4222
4223 region_model_manager mgr;
4224 test_region_model_context ctxt;
4225
4226 /* model0: i: 0. */
4227 region_model model0 (&mgr);
4228 model0.set_value (i, int_0, &ctxt);
4229
4230 /* model1: i: 1. */
4231 region_model model1 (&mgr);
4232 model1.set_value (i, int_1, &ctxt);
4233
4234 /* Should merge "i" to a widened value. */
4235 region_model model2 (&mgr);
4236 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
4237 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
4238 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
4239 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
4240 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
4241
4242 /* Add constraint: i < 256 */
4243 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
4244 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
4245 tristate (tristate::TS_TRUE));
4246 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
4247 tristate (tristate::TS_TRUE));
4248
4249 /* Try merging with the initial state. */
4250 region_model model3 (&mgr);
4251 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
4252 /* Merging the merged value with the initial value should be idempotent,
4253 so that the analysis converges. */
4254 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
4255 /* Merger of 0 and a widening value with constraint < CST
4256 should retain the constraint, even though it was implicit
4257 for the 0 case. */
4258 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
4259 tristate (tristate::TS_TRUE));
4260 /* ...and we should have equality: the analysis should have converged. */
4261 ASSERT_EQ (model3, model2);
4262
4263 /* "i_23 = i_11 + 1;" */
4264 region_model model4 (model3);
4265 ASSERT_EQ (model4, model2);
4266 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
4267 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
4268 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
4269
4270 /* Try merging with the "i: 1" state. */
4271 region_model model5 (&mgr);
4272 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
4273 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
4274 ASSERT_EQ (model5, model4);
4275
4276 /* "i_11 = PHI();" merge with state at phi above.
4277 For i, we should have a merger of WIDENING with WIDENING + 1,
4278 and this should be WIDENING again. */
4279 region_model model6 (&mgr);
4280 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
4281 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
4282 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
4283
4284 ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257);
4285}
4286
6969ac30
DM
4287/* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
4288 all cast pointers to that region are also known to be non-NULL. */
4289
4290static void
4291test_malloc_constraints ()
4292{
808f4dfe
DM
4293 region_model_manager mgr;
4294 region_model model (&mgr);
6969ac30
DM
4295 tree p = build_global_decl ("p", ptr_type_node);
4296 tree char_star = build_pointer_type (char_type_node);
4297 tree q = build_global_decl ("q", char_star);
4298 tree null_ptr = build_int_cst (ptr_type_node, 0);
4299
808f4dfe
DM
4300 const svalue *size_in_bytes
4301 = mgr.get_or_create_unknown_svalue (integer_type_node);
4302 const region *reg = model.create_region_for_heap_alloc (size_in_bytes);
4303 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
4304 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
6969ac30
DM
4305 model.set_value (q, p, NULL);
4306
6969ac30
DM
4307 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
4308 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
4309 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
4310 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
4311
4312 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
4313
6969ac30
DM
4314 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
4315 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
4316 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
4317 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
4318}
4319
808f4dfe
DM
4320/* Smoketest of getting and setting the value of a variable. */
4321
4322static void
4323test_var ()
4324{
4325 /* "int i;" */
4326 tree i = build_global_decl ("i", integer_type_node);
4327
4328 tree int_17 = build_int_cst (integer_type_node, 17);
4329 tree int_m3 = build_int_cst (integer_type_node, -3);
4330
4331 region_model_manager mgr;
4332 region_model model (&mgr);
4333
4334 const region *i_reg = model.get_lvalue (i, NULL);
4335 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
4336
4337 /* Reading "i" should give a symbolic "initial value". */
4338 const svalue *sval_init = model.get_rvalue (i, NULL);
4339 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
4340 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
4341 /* ..and doing it again should give the same "initial value". */
4342 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
4343
4344 /* "i = 17;". */
4345 model.set_value (i, int_17, NULL);
4346 ASSERT_EQ (model.get_rvalue (i, NULL),
4347 model.get_rvalue (int_17, NULL));
4348
4349 /* "i = -3;". */
4350 model.set_value (i, int_m3, NULL);
4351 ASSERT_EQ (model.get_rvalue (i, NULL),
4352 model.get_rvalue (int_m3, NULL));
4353
4354 /* Verify get_offset for "i". */
4355 {
4356 region_offset offset = i_reg->get_offset ();
4357 ASSERT_EQ (offset.get_base_region (), i_reg);
4358 ASSERT_EQ (offset.get_bit_offset (), 0);
4359 }
4360}
4361
4362static void
4363test_array_2 ()
4364{
4365 /* "int arr[10];" */
4366 tree tlen = size_int (10);
4367 tree arr_type
4368 = build_array_type (integer_type_node, build_index_type (tlen));
4369 tree arr = build_global_decl ("arr", arr_type);
4370
4371 /* "int i;" */
4372 tree i = build_global_decl ("i", integer_type_node);
4373
4374 tree int_0 = build_int_cst (integer_type_node, 0);
4375 tree int_1 = build_int_cst (integer_type_node, 1);
4376
4377 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
4378 arr, int_0, NULL_TREE, NULL_TREE);
4379 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
4380 arr, int_1, NULL_TREE, NULL_TREE);
4381 tree arr_i = build4 (ARRAY_REF, integer_type_node,
4382 arr, i, NULL_TREE, NULL_TREE);
4383
4384 tree int_17 = build_int_cst (integer_type_node, 17);
4385 tree int_42 = build_int_cst (integer_type_node, 42);
4386 tree int_m3 = build_int_cst (integer_type_node, -3);
4387
4388 region_model_manager mgr;
4389 region_model model (&mgr);
4390 /* "arr[0] = 17;". */
4391 model.set_value (arr_0, int_17, NULL);
4392 /* "arr[1] = -3;". */
4393 model.set_value (arr_1, int_m3, NULL);
4394
4395 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
4396 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
4397
4398 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
4399 model.set_value (arr_1, int_42, NULL);
4400 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
4401
4402 /* Verify get_offset for "arr[0]". */
4403 {
4404 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
4405 region_offset offset = arr_0_reg->get_offset ();
4406 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
4407 ASSERT_EQ (offset.get_bit_offset (), 0);
4408 }
4409
4410 /* Verify get_offset for "arr[1]". */
4411 {
4412 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
4413 region_offset offset = arr_1_reg->get_offset ();
4414 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
4415 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
4416 }
4417
4418 /* "arr[i] = i;" - this should remove the earlier bindings. */
4419 model.set_value (arr_i, i, NULL);
4420 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
4421 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
4422
4423 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
4424 model.set_value (arr_0, int_17, NULL);
4425 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
4426 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
4427}
4428
4429/* Smoketest of dereferencing a pointer via MEM_REF. */
4430
4431static void
4432test_mem_ref ()
4433{
4434 /*
4435 x = 17;
4436 p = &x;
4437 *p;
4438 */
4439 tree x = build_global_decl ("x", integer_type_node);
4440 tree int_star = build_pointer_type (integer_type_node);
4441 tree p = build_global_decl ("p", int_star);
4442
4443 tree int_17 = build_int_cst (integer_type_node, 17);
4444 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
4445 tree offset_0 = build_int_cst (integer_type_node, 0);
4446 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
4447
4448 region_model_manager mgr;
4449 region_model model (&mgr);
4450
4451 /* "x = 17;". */
4452 model.set_value (x, int_17, NULL);
4453
4454 /* "p = &x;". */
4455 model.set_value (p, addr_of_x, NULL);
4456
4457 const svalue *sval = model.get_rvalue (star_p, NULL);
4458 ASSERT_EQ (sval->maybe_get_constant (), int_17);
4459}
4460
4461/* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
4462 Analogous to this code:
4463 void test_6 (int a[10])
4464 {
4465 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
4466 a[3] = 42;
4467 __analyzer_eval (a[3] == 42); [should be TRUE]
4468 }
4469 from data-model-1.c, which looks like this at the gimple level:
4470 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
4471 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
4472 int _2 = *_1; # MEM_REF
4473 _Bool _3 = _2 == 42;
4474 int _4 = (int) _3;
4475 __analyzer_eval (_4);
4476
4477 # a[3] = 42;
4478 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
4479 *_5 = 42; # MEM_REF
4480
4481 # __analyzer_eval (a[3] == 42); [should be TRUE]
4482 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
4483 int _7 = *_6; # MEM_REF
4484 _Bool _8 = _7 == 42;
4485 int _9 = (int) _8;
4486 __analyzer_eval (_9); */
4487
4488static void
4489test_POINTER_PLUS_EXPR_then_MEM_REF ()
4490{
4491 tree int_star = build_pointer_type (integer_type_node);
4492 tree a = build_global_decl ("a", int_star);
4493 tree offset_12 = build_int_cst (size_type_node, 12);
4494 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
4495 tree offset_0 = build_int_cst (integer_type_node, 0);
4496 tree mem_ref = build2 (MEM_REF, integer_type_node,
4497 pointer_plus_expr, offset_0);
4498 region_model_manager mgr;
4499 region_model m (&mgr);
4500
4501 tree int_42 = build_int_cst (integer_type_node, 42);
4502 m.set_value (mem_ref, int_42, NULL);
4503 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
4504}
4505
4506/* Verify that malloc works. */
4507
4508static void
4509test_malloc ()
4510{
4511 tree int_star = build_pointer_type (integer_type_node);
4512 tree p = build_global_decl ("p", int_star);
4513 tree n = build_global_decl ("n", integer_type_node);
4514 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
4515 n, build_int_cst (size_type_node, 4));
4516
4517 region_model_manager mgr;
4518 test_region_model_context ctxt;
4519 region_model model (&mgr);
4520
4521 /* "p = malloc (n * 4);". */
4522 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
4523 const region *reg = model.create_region_for_heap_alloc (size_sval);
4524 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
4525 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
4526 // TODO: verify dynamic extents
4527}
4528
4529/* Verify that alloca works. */
4530
4531static void
4532test_alloca ()
4533{
4534 auto_vec <tree> param_types;
4535 tree fndecl = make_fndecl (integer_type_node,
4536 "test_fn",
4537 param_types);
4538 allocate_struct_function (fndecl, true);
4539
4540
4541 tree int_star = build_pointer_type (integer_type_node);
4542 tree p = build_global_decl ("p", int_star);
4543 tree n = build_global_decl ("n", integer_type_node);
4544 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
4545 n, build_int_cst (size_type_node, 4));
4546
4547 region_model_manager mgr;
4548 test_region_model_context ctxt;
4549 region_model model (&mgr);
4550
4551 /* Push stack frame. */
4552 const region *frame_reg
4553 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl),
4554 NULL, &ctxt);
4555 /* "p = alloca (n * 4);". */
4556 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
4557 const region *reg = model.create_region_for_alloca (size_sval);
4558 ASSERT_EQ (reg->get_parent_region (), frame_reg);
4559 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
4560 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
4561 // TODO: verify dynamic extents
4562
4563 /* Verify that the pointers to the alloca region are replaced by
4564 poisoned values when the frame is popped. */
4565 model.pop_frame (NULL, NULL, &ctxt);
4566 ASSERT_EQ (model.get_rvalue (p, &ctxt)->get_kind (), SK_POISONED);
4567}
4568
757bf1df
DM
4569/* Run all of the selftests within this file. */
4570
4571void
4572analyzer_region_model_cc_tests ()
4573{
8c08c983 4574 test_tree_cmp_on_constants ();
757bf1df 4575 test_dump ();
808f4dfe
DM
4576 test_struct ();
4577 test_array_1 ();
90f7c300 4578 test_get_representative_tree ();
757bf1df 4579 test_unique_constants ();
808f4dfe
DM
4580 test_unique_unknowns ();
4581 test_initial_svalue_folding ();
4582 test_unaryop_svalue_folding ();
4583 test_binop_svalue_folding ();
4584 test_sub_svalue_folding ();
4585 test_descendent_of_p ();
757bf1df 4586 test_assignment ();
a96f1c38 4587 test_compound_assignment ();
757bf1df
DM
4588 test_stack_frames ();
4589 test_get_representative_path_var ();
808f4dfe 4590 test_equality_1 ();
757bf1df
DM
4591 test_canonicalization_2 ();
4592 test_canonicalization_3 ();
8c08c983 4593 test_canonicalization_4 ();
757bf1df
DM
4594 test_state_merging ();
4595 test_constraint_merging ();
808f4dfe
DM
4596 test_widening_constraints ();
4597 test_iteration_1 ();
6969ac30 4598 test_malloc_constraints ();
808f4dfe
DM
4599 test_var ();
4600 test_array_2 ();
4601 test_mem_ref ();
4602 test_POINTER_PLUS_EXPR_then_MEM_REF ();
4603 test_malloc ();
4604 test_alloca ();
757bf1df
DM
4605}
4606
4607} // namespace selftest
4608
4609#endif /* CHECKING_P */
4610
75038aa6
DM
4611} // namespace ana
4612
757bf1df 4613#endif /* #if ENABLE_ANALYZER */