]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/analyzer/region-model.cc
Daily bump.
[thirdparty/gcc.git] / gcc / analyzer / region-model.cc
CommitLineData
757bf1df
DM
1/* Classes for modeling the state of memory.
2 Copyright (C) 2019-2020 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tree.h"
25#include "function.h"
26#include "basic-block.h"
27#include "gimple.h"
28#include "gimple-iterator.h"
7892ff37 29#include "diagnostic-core.h"
757bf1df
DM
30#include "graphviz.h"
31#include "options.h"
32#include "cgraph.h"
33#include "tree-dfa.h"
34#include "stringpool.h"
35#include "convert.h"
36#include "target.h"
37#include "fold-const.h"
38#include "tree-pretty-print.h"
39#include "diagnostic-color.h"
40#include "diagnostic-metadata.h"
757bf1df 41#include "tristate.h"
ef7827b0 42#include "bitmap.h"
757bf1df
DM
43#include "selftest.h"
44#include "function.h"
45#include "analyzer/analyzer.h"
46#include "analyzer/analyzer-logging.h"
47#include "ordered-hash-map.h"
48#include "options.h"
49#include "cgraph.h"
50#include "cfg.h"
51#include "digraph.h"
52#include "analyzer/supergraph.h"
53#include "sbitmap.h"
808f4dfe
DM
54#include "analyzer/call-string.h"
55#include "analyzer/program-point.h"
56#include "analyzer/store.h"
757bf1df
DM
57#include "analyzer/region-model.h"
58#include "analyzer/constraint-manager.h"
59#include "diagnostic-event-id.h"
60#include "analyzer/sm.h"
61#include "diagnostic-event-id.h"
62#include "analyzer/sm.h"
63#include "analyzer/pending-diagnostic.h"
808f4dfe 64#include "analyzer/region-model-reachability.h"
757bf1df 65#include "analyzer/analyzer-selftests.h"
884d9141 66#include "stor-layout.h"
757bf1df
DM
67
68#if ENABLE_ANALYZER
69
75038aa6
DM
70namespace ana {
71
757bf1df
DM
72/* Dump T to PP in language-independent form, for debugging/logging/dumping
73 purposes. */
74
757bf1df 75void
808f4dfe 76dump_tree (pretty_printer *pp, tree t)
757bf1df 77{
808f4dfe 78 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
757bf1df
DM
79}
80
808f4dfe
DM
81/* Dump T to PP in language-independent form in quotes, for
82 debugging/logging/dumping purposes. */
757bf1df
DM
83
84void
808f4dfe 85dump_quoted_tree (pretty_printer *pp, tree t)
757bf1df 86{
808f4dfe
DM
87 pp_begin_quote (pp, pp_show_color (pp));
88 dump_tree (pp, t);
89 pp_end_quote (pp, pp_show_color (pp));
757bf1df
DM
90}
91
808f4dfe
DM
92/* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
93 calls within other pp_printf calls.
757bf1df 94
808f4dfe
DM
95 default_tree_printer handles 'T' and some other codes by calling
96 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
97 dump_generic_node calls pp_printf in various places, leading to
98 garbled output.
757bf1df 99
808f4dfe
DM
100 Ideally pp_printf could be made to be reentrant, but in the meantime
101 this function provides a workaround. */
6969ac30
DM
102
103void
808f4dfe 104print_quoted_type (pretty_printer *pp, tree t)
6969ac30 105{
808f4dfe
DM
106 pp_begin_quote (pp, pp_show_color (pp));
107 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
108 pp_end_quote (pp, pp_show_color (pp));
6969ac30
DM
109}
110
757bf1df
DM
111/* class region_model. */
112
808f4dfe 113/* Ctor for region_model: construct an "empty" model. */
757bf1df 114
808f4dfe
DM
115region_model::region_model (region_model_manager *mgr)
116: m_mgr (mgr), m_store (), m_current_frame (NULL)
757bf1df 117{
808f4dfe 118 m_constraints = new constraint_manager (mgr);
757bf1df
DM
119}
120
121/* region_model's copy ctor. */
122
123region_model::region_model (const region_model &other)
808f4dfe
DM
124: m_mgr (other.m_mgr), m_store (other.m_store),
125 m_constraints (new constraint_manager (*other.m_constraints)),
126 m_current_frame (other.m_current_frame)
757bf1df 127{
757bf1df
DM
128}
129
130/* region_model's dtor. */
131
132region_model::~region_model ()
133{
134 delete m_constraints;
135}
136
137/* region_model's assignment operator. */
138
139region_model &
140region_model::operator= (const region_model &other)
141{
808f4dfe
DM
142 /* m_mgr is const. */
143 gcc_assert (m_mgr == other.m_mgr);
757bf1df 144
808f4dfe 145 m_store = other.m_store;
757bf1df
DM
146
147 delete m_constraints;
808f4dfe 148 m_constraints = new constraint_manager (*other.m_constraints);
757bf1df 149
808f4dfe 150 m_current_frame = other.m_current_frame;
757bf1df
DM
151
152 return *this;
153}
154
155/* Equality operator for region_model.
156
808f4dfe
DM
157 Amongst other things this directly compares the stores and the constraint
158 managers, so for this to be meaningful both this and OTHER should
757bf1df
DM
159 have been canonicalized. */
160
161bool
162region_model::operator== (const region_model &other) const
163{
808f4dfe
DM
164 /* We can only compare instances that use the same manager. */
165 gcc_assert (m_mgr == other.m_mgr);
757bf1df 166
808f4dfe 167 if (m_store != other.m_store)
757bf1df
DM
168 return false;
169
170 if (*m_constraints != *other.m_constraints)
171 return false;
172
808f4dfe
DM
173 if (m_current_frame != other.m_current_frame)
174 return false;
757bf1df
DM
175
176 gcc_checking_assert (hash () == other.hash ());
177
178 return true;
179}
180
181/* Generate a hash value for this region_model. */
182
183hashval_t
808f4dfe
DM
184region_model::hash () const
185{
186 hashval_t result = m_store.hash ();
187 result ^= m_constraints->hash ();
188 return result;
757bf1df
DM
189}
190
808f4dfe
DM
191/* Dump a representation of this model to PP, showing the
192 stack, the store, and any constraints.
193 Use SIMPLE to control how svalues and regions are printed. */
757bf1df
DM
194
195void
808f4dfe
DM
196region_model::dump_to_pp (pretty_printer *pp, bool simple,
197 bool multiline) const
757bf1df 198{
808f4dfe
DM
199 /* Dump stack. */
200 pp_printf (pp, "stack depth: %i", get_stack_depth ());
201 if (multiline)
202 pp_newline (pp);
203 else
204 pp_string (pp, " {");
205 for (const frame_region *iter_frame = m_current_frame; iter_frame;
206 iter_frame = iter_frame->get_calling_frame ())
207 {
208 if (multiline)
209 pp_string (pp, " ");
210 else if (iter_frame != m_current_frame)
211 pp_string (pp, ", ");
212 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
213 iter_frame->dump_to_pp (pp, simple);
214 if (multiline)
215 pp_newline (pp);
216 }
217 if (!multiline)
218 pp_string (pp, "}");
219
220 /* Dump store. */
221 if (!multiline)
222 pp_string (pp, ", {");
223 m_store.dump_to_pp (pp, simple, multiline,
224 m_mgr->get_store_manager ());
225 if (!multiline)
226 pp_string (pp, "}");
227
228 /* Dump constraints. */
229 pp_string (pp, "constraint_manager:");
230 if (multiline)
231 pp_newline (pp);
232 else
233 pp_string (pp, " {");
234 m_constraints->dump_to_pp (pp, multiline);
235 if (!multiline)
236 pp_string (pp, "}");
237}
757bf1df 238
808f4dfe 239/* Dump a representation of this model to FILE. */
757bf1df 240
808f4dfe
DM
241void
242region_model::dump (FILE *fp, bool simple, bool multiline) const
243{
244 pretty_printer pp;
245 pp_format_decoder (&pp) = default_tree_printer;
246 pp_show_color (&pp) = pp_show_color (global_dc->printer);
247 pp.buffer->stream = fp;
248 dump_to_pp (&pp, simple, multiline);
249 pp_newline (&pp);
250 pp_flush (&pp);
757bf1df
DM
251}
252
808f4dfe 253/* Dump a multiline representation of this model to stderr. */
757bf1df 254
808f4dfe
DM
255DEBUG_FUNCTION void
256region_model::dump (bool simple) const
257{
258 dump (stderr, simple, true);
259}
757bf1df 260
808f4dfe 261/* Dump a multiline representation of this model to stderr. */
757bf1df 262
808f4dfe
DM
263DEBUG_FUNCTION void
264region_model::debug () const
757bf1df 265{
808f4dfe 266 dump (true);
757bf1df
DM
267}
268
808f4dfe
DM
269/* Canonicalize the store and constraints, to maximize the chance of
270 equality between region_model instances. */
757bf1df
DM
271
272void
808f4dfe 273region_model::canonicalize ()
757bf1df 274{
808f4dfe
DM
275 m_store.canonicalize (m_mgr->get_store_manager ());
276 m_constraints->canonicalize ();
757bf1df
DM
277}
278
279/* Return true if this region_model is in canonical form. */
280
281bool
282region_model::canonicalized_p () const
283{
284 region_model copy (*this);
808f4dfe 285 copy.canonicalize ();
757bf1df
DM
286 return *this == copy;
287}
288
808f4dfe
DM
289/* See the comment for store::loop_replay_fixup. */
290
291void
292region_model::loop_replay_fixup (const region_model *dst_state)
293{
294 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
295}
296
757bf1df
DM
297/* A subclass of pending_diagnostic for complaining about uses of
298 poisoned values. */
299
300class poisoned_value_diagnostic
301: public pending_diagnostic_subclass<poisoned_value_diagnostic>
302{
303public:
304 poisoned_value_diagnostic (tree expr, enum poison_kind pkind)
305 : m_expr (expr), m_pkind (pkind)
306 {}
307
308 const char *get_kind () const FINAL OVERRIDE { return "poisoned_value_diagnostic"; }
309
310 bool operator== (const poisoned_value_diagnostic &other) const
311 {
312 return m_expr == other.m_expr;
313 }
314
315 bool emit (rich_location *rich_loc) FINAL OVERRIDE
316 {
317 switch (m_pkind)
318 {
319 default:
320 gcc_unreachable ();
757bf1df
DM
321 case POISON_KIND_FREED:
322 {
323 diagnostic_metadata m;
324 m.add_cwe (416); /* "CWE-416: Use After Free". */
6c8e5844
DM
325 return warning_meta (rich_loc, m,
326 OPT_Wanalyzer_use_after_free,
327 "use after %<free%> of %qE",
328 m_expr);
757bf1df
DM
329 }
330 break;
331 case POISON_KIND_POPPED_STACK:
332 {
757bf1df 333 /* TODO: which CWE? */
808f4dfe
DM
334 return warning_at
335 (rich_loc,
336 OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame,
337 "dereferencing pointer %qE to within stale stack frame",
338 m_expr);
757bf1df
DM
339 }
340 break;
341 }
342 }
343
344 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
345 {
346 switch (m_pkind)
347 {
348 default:
349 gcc_unreachable ();
757bf1df
DM
350 case POISON_KIND_FREED:
351 return ev.formatted_print ("use after %<free%> of %qE here",
352 m_expr);
353 case POISON_KIND_POPPED_STACK:
354 return ev.formatted_print
808f4dfe 355 ("dereferencing pointer %qE to within stale stack frame",
757bf1df
DM
356 m_expr);
357 }
358 }
359
360private:
361 tree m_expr;
362 enum poison_kind m_pkind;
363};
364
808f4dfe
DM
365/* If ASSIGN is a stmt that can be modelled via
366 set_value (lhs_reg, SVALUE, CTXT)
367 for some SVALUE, get the SVALUE.
368 Otherwise return NULL. */
757bf1df 369
808f4dfe
DM
370const svalue *
371region_model::get_gassign_result (const gassign *assign,
372 region_model_context *ctxt)
757bf1df
DM
373{
374 tree lhs = gimple_assign_lhs (assign);
375 tree rhs1 = gimple_assign_rhs1 (assign);
757bf1df
DM
376 enum tree_code op = gimple_assign_rhs_code (assign);
377 switch (op)
378 {
379 default:
808f4dfe 380 return NULL;
757bf1df
DM
381
382 case POINTER_PLUS_EXPR:
383 {
384 /* e.g. "_1 = a_10(D) + 12;" */
385 tree ptr = rhs1;
386 tree offset = gimple_assign_rhs2 (assign);
387
808f4dfe
DM
388 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
389 const svalue *offset_sval = get_rvalue (offset, ctxt);
390 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
391 is an integer of type sizetype". */
392 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
393
394 const svalue *sval_binop
395 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
396 ptr_sval, offset_sval);
397 return sval_binop;
757bf1df
DM
398 }
399 break;
400
401 case POINTER_DIFF_EXPR:
402 {
403 /* e.g. "_1 = p_2(D) - q_3(D);". */
808f4dfe
DM
404 tree rhs2 = gimple_assign_rhs2 (assign);
405 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
406 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 407
808f4dfe 408 // TODO: perhaps fold to zero if they're known to be equal?
757bf1df 409
808f4dfe
DM
410 const svalue *sval_binop
411 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
412 rhs1_sval, rhs2_sval);
413 return sval_binop;
757bf1df
DM
414 }
415 break;
416
808f4dfe
DM
417 /* Assignments of the form
418 set_value (lvalue (LHS), rvalue (EXPR))
419 for various EXPR.
420 We already have the lvalue for the LHS above, as "lhs_reg". */
421 case ADDR_EXPR: /* LHS = &RHS; */
422 case BIT_FIELD_REF:
423 case COMPONENT_REF: /* LHS = op0.op1; */
757bf1df 424 case MEM_REF:
757bf1df 425 case REAL_CST:
808f4dfe
DM
426 case COMPLEX_CST:
427 case VECTOR_CST:
757bf1df
DM
428 case INTEGER_CST:
429 case ARRAY_REF:
808f4dfe
DM
430 case SSA_NAME: /* LHS = VAR; */
431 case VAR_DECL: /* LHS = VAR; */
432 case PARM_DECL:/* LHS = VAR; */
433 case REALPART_EXPR:
434 case IMAGPART_EXPR:
435 return get_rvalue (rhs1, ctxt);
436
437 case ABS_EXPR:
438 case ABSU_EXPR:
439 case CONJ_EXPR:
440 case BIT_NOT_EXPR:
757bf1df
DM
441 case FIX_TRUNC_EXPR:
442 case FLOAT_EXPR:
808f4dfe 443 case NEGATE_EXPR:
757bf1df 444 case NOP_EXPR:
808f4dfe 445 case VIEW_CONVERT_EXPR:
757bf1df 446 {
808f4dfe
DM
447 /* Unary ops. */
448 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
449 const svalue *sval_unaryop
450 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
451 return sval_unaryop;
757bf1df 452 }
757bf1df
DM
453
454 case EQ_EXPR:
455 case GE_EXPR:
456 case LE_EXPR:
457 case NE_EXPR:
458 case GT_EXPR:
459 case LT_EXPR:
808f4dfe
DM
460 case UNORDERED_EXPR:
461 case ORDERED_EXPR:
757bf1df
DM
462 {
463 tree rhs2 = gimple_assign_rhs2 (assign);
464
465 // TODO: constraints between svalues
808f4dfe
DM
466 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
467 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 468
808f4dfe 469 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
757bf1df 470 if (t.is_known ())
808f4dfe
DM
471 return get_rvalue (t.is_true ()
472 ? boolean_true_node
473 : boolean_false_node,
474 ctxt);
757bf1df 475 else
808f4dfe
DM
476 {
477 // TODO: symbolic value for binop
478 const svalue *sval_binop
479 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
480 rhs1_sval, rhs2_sval);
481 return sval_binop;
482 }
757bf1df
DM
483 }
484 break;
485
486 case PLUS_EXPR:
487 case MINUS_EXPR:
488 case MULT_EXPR:
808f4dfe 489 case MULT_HIGHPART_EXPR:
757bf1df 490 case TRUNC_DIV_EXPR:
808f4dfe
DM
491 case CEIL_DIV_EXPR:
492 case FLOOR_DIV_EXPR:
493 case ROUND_DIV_EXPR:
757bf1df 494 case TRUNC_MOD_EXPR:
808f4dfe
DM
495 case CEIL_MOD_EXPR:
496 case FLOOR_MOD_EXPR:
497 case ROUND_MOD_EXPR:
498 case RDIV_EXPR:
499 case EXACT_DIV_EXPR:
757bf1df
DM
500 case LSHIFT_EXPR:
501 case RSHIFT_EXPR:
808f4dfe
DM
502 case LROTATE_EXPR:
503 case RROTATE_EXPR:
757bf1df
DM
504 case BIT_IOR_EXPR:
505 case BIT_XOR_EXPR:
506 case BIT_AND_EXPR:
507 case MIN_EXPR:
508 case MAX_EXPR:
808f4dfe 509 case COMPLEX_EXPR:
757bf1df
DM
510 {
511 /* Binary ops. */
512 tree rhs2 = gimple_assign_rhs2 (assign);
513
808f4dfe
DM
514 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
515 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 516
808f4dfe
DM
517 const svalue *sval_binop
518 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
519 rhs1_sval, rhs2_sval);
520 return sval_binop;
521 }
522
523 /* Vector expressions. In theory we could implement these elementwise,
524 but for now, simply return unknown values. */
525 case VEC_DUPLICATE_EXPR:
526 case VEC_SERIES_EXPR:
527 case VEC_COND_EXPR:
528 case VEC_PERM_EXPR:
1b0be822
DM
529 case VEC_WIDEN_MULT_HI_EXPR:
530 case VEC_WIDEN_MULT_LO_EXPR:
531 case VEC_WIDEN_MULT_EVEN_EXPR:
532 case VEC_WIDEN_MULT_ODD_EXPR:
533 case VEC_UNPACK_HI_EXPR:
534 case VEC_UNPACK_LO_EXPR:
535 case VEC_UNPACK_FLOAT_HI_EXPR:
536 case VEC_UNPACK_FLOAT_LO_EXPR:
537 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
538 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
539 case VEC_PACK_TRUNC_EXPR:
540 case VEC_PACK_SAT_EXPR:
541 case VEC_PACK_FIX_TRUNC_EXPR:
542 case VEC_PACK_FLOAT_EXPR:
543 case VEC_WIDEN_LSHIFT_HI_EXPR:
544 case VEC_WIDEN_LSHIFT_LO_EXPR:
808f4dfe
DM
545 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
546 }
547}
548
549/* Update this model for the ASSIGN stmt, using CTXT to report any
550 diagnostics. */
551
552void
553region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
554{
555 tree lhs = gimple_assign_lhs (assign);
556 tree rhs1 = gimple_assign_rhs1 (assign);
557
558 const region *lhs_reg = get_lvalue (lhs, ctxt);
559
560 /* Most assignments are handled by:
561 set_value (lhs_reg, SVALUE, CTXT)
562 for some SVALUE. */
563 if (const svalue *sval = get_gassign_result (assign, ctxt))
564 {
565 set_value (lhs_reg, sval, ctxt);
566 return;
567 }
568
569 enum tree_code op = gimple_assign_rhs_code (assign);
570 switch (op)
571 {
572 default:
573 {
1b0be822 574 if (0)
808f4dfe
DM
575 sorry_at (assign->location, "unhandled assignment op: %qs",
576 get_tree_code_name (op));
1b0be822
DM
577 const svalue *unknown_sval
578 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
579 set_value (lhs_reg, unknown_sval, ctxt);
757bf1df
DM
580 }
581 break;
582
808f4dfe
DM
583 case CONSTRUCTOR:
584 {
585 if (TREE_CLOBBER_P (rhs1))
586 {
587 /* e.g. "x ={v} {CLOBBER};" */
588 clobber_region (lhs_reg);
589 }
590 else
591 {
592 /* Any CONSTRUCTOR that survives to this point is either
593 just a zero-init of everything, or a vector. */
594 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
595 zero_fill_region (lhs_reg);
596 unsigned ix;
597 tree index;
598 tree val;
599 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
600 {
601 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
602 if (!index)
603 index = build_int_cst (integer_type_node, ix);
604 gcc_assert (TREE_CODE (index) == INTEGER_CST);
605 const svalue *index_sval
606 = m_mgr->get_or_create_constant_svalue (index);
607 gcc_assert (index_sval);
608 const region *sub_reg
609 = m_mgr->get_element_region (lhs_reg,
610 TREE_TYPE (val),
611 index_sval);
612 const svalue *val_sval = get_rvalue (val, ctxt);
613 set_value (sub_reg, val_sval, ctxt);
614 }
615 }
616 }
617 break;
618
619 case STRING_CST:
757bf1df 620 {
808f4dfe
DM
621 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
622 /* Add a default binding, rather than a direct one, so that array
623 access will "inherit" the individual chars. */
624 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
625 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
626 BK_default);
757bf1df
DM
627 }
628 break;
629 }
630}
631
632/* Update this model for the CALL stmt, using CTXT to report any
633 diagnostics - the first half.
634
635 Updates to the region_model that should be made *before* sm-states
636 are updated are done here; other updates to the region_model are done
ef7827b0 637 in region_model::on_call_post.
757bf1df 638
ef7827b0
DM
639 Return true if the function call has unknown side effects (it wasn't
640 recognized and we don't have a body for it, or are unable to tell which
641 fndecl it is). */
642
643bool
757bf1df
DM
644region_model::on_call_pre (const gcall *call, region_model_context *ctxt)
645{
ef7827b0
DM
646 bool unknown_side_effects = false;
647
757bf1df
DM
648 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
649 {
808f4dfe
DM
650 call_details cd (call, this, ctxt);
651
652 /* The various impl_call_* member functions are implemented
653 in region-model-impl-calls.cc.
654 Having them split out into separate functions makes it easier
655 to put breakpoints on the handling of specific functions. */
757bf1df 656 if (is_named_call_p (callee_fndecl, "malloc", call, 1))
808f4dfe
DM
657 return impl_call_malloc (cd);
658 else if (is_named_call_p (callee_fndecl, "calloc", call, 2))
659 return impl_call_calloc (cd);
757bf1df 660 else if (is_named_call_p (callee_fndecl, "__builtin_alloca", call, 1))
808f4dfe 661 return impl_call_alloca (cd);
e516294a
DM
662 else if (gimple_call_builtin_p (call, BUILT_IN_EXPECT)
663 || gimple_call_builtin_p (call, BUILT_IN_EXPECT_WITH_PROBABILITY)
664 || gimple_call_internal_p (call, IFN_BUILTIN_EXPECT))
808f4dfe
DM
665 return impl_call_builtin_expect (cd);
666 else if (is_named_call_p (callee_fndecl, "memset", call, 3)
667 || gimple_call_builtin_p (call, BUILT_IN_MEMSET))
e516294a 668 {
808f4dfe 669 impl_call_memset (cd);
e516294a
DM
670 return false;
671 }
757bf1df
DM
672 else if (is_named_call_p (callee_fndecl, "strlen", call, 1))
673 {
808f4dfe
DM
674 if (impl_call_strlen (cd))
675 return false;
757bf1df 676 }
ef7827b0 677 else if (!fndecl_has_gimple_body_p (callee_fndecl)
808f4dfe
DM
678 && !DECL_PURE_P (callee_fndecl)
679 && !fndecl_built_in_p (callee_fndecl))
ef7827b0 680 unknown_side_effects = true;
757bf1df 681 }
ef7827b0
DM
682 else
683 unknown_side_effects = true;
757bf1df 684
808f4dfe
DM
685 /* Some of the above cases update the lhs of the call based on the
686 return value. If we get here, it hasn't been done yet, so do that
687 now. */
688 if (tree lhs = gimple_call_lhs (call))
689 {
690 const region *lhs_region = get_lvalue (lhs, ctxt);
691 if (TREE_CODE (lhs) == SSA_NAME)
692 {
693 const svalue *sval = m_mgr->get_or_create_initial_value (lhs_region);
694 set_value (lhs_region, sval, ctxt);
695 }
696 }
757bf1df 697
ef7827b0 698 return unknown_side_effects;
757bf1df
DM
699}
700
701/* Update this model for the CALL stmt, using CTXT to report any
702 diagnostics - the second half.
703
704 Updates to the region_model that should be made *after* sm-states
705 are updated are done here; other updates to the region_model are done
ef7827b0
DM
706 in region_model::on_call_pre.
707
708 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
709 to purge state. */
757bf1df
DM
710
711void
ef7827b0
DM
712region_model::on_call_post (const gcall *call,
713 bool unknown_side_effects,
714 region_model_context *ctxt)
757bf1df 715{
757bf1df
DM
716 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
717 if (is_named_call_p (callee_fndecl, "free", call, 1))
718 {
808f4dfe
DM
719 call_details cd (call, this, ctxt);
720 impl_call_free (cd);
757bf1df
DM
721 return;
722 }
ef7827b0
DM
723
724 if (unknown_side_effects)
725 handle_unrecognized_call (call, ctxt);
726}
727
ef7827b0
DM
728/* Handle a call CALL to a function with unknown behavior.
729
730 Traverse the regions in this model, determining what regions are
731 reachable from pointer arguments to CALL and from global variables,
732 recursively.
733
734 Set all reachable regions to new unknown values and purge sm-state
735 from their values, and from values that point to them. */
736
737void
738region_model::handle_unrecognized_call (const gcall *call,
739 region_model_context *ctxt)
740{
741 tree fndecl = get_fndecl_for_call (call, ctxt);
742
808f4dfe 743 reachable_regions reachable_regs (&m_store, m_mgr);
ef7827b0
DM
744
745 /* Determine the reachable regions and their mutability. */
746 {
808f4dfe
DM
747 /* Add globals and regions that already escaped in previous
748 unknown calls. */
749 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
750 &reachable_regs);
ef7827b0
DM
751
752 /* Params that are pointers. */
753 tree iter_param_types = NULL_TREE;
754 if (fndecl)
755 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
756 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
757 {
758 /* Track expected param type, where available. */
759 tree param_type = NULL_TREE;
760 if (iter_param_types)
761 {
762 param_type = TREE_VALUE (iter_param_types);
763 gcc_assert (param_type);
764 iter_param_types = TREE_CHAIN (iter_param_types);
765 }
766
767 tree parm = gimple_call_arg (call, arg_idx);
808f4dfe
DM
768 const svalue *parm_sval = get_rvalue (parm, ctxt);
769 reachable_regs.handle_parm (parm_sval, param_type);
ef7827b0
DM
770 }
771 }
772
808f4dfe
DM
773 /* Purge sm-state for the svalues that were reachable,
774 both in non-mutable and mutable form. */
775 for (svalue_set::iterator iter
776 = reachable_regs.begin_reachable_svals ();
777 iter != reachable_regs.end_reachable_svals (); ++iter)
ef7827b0 778 {
808f4dfe
DM
779 const svalue *sval = (*iter);
780 ctxt->on_unknown_change (sval, false);
781 }
782 for (svalue_set::iterator iter
783 = reachable_regs.begin_mutable_svals ();
784 iter != reachable_regs.end_mutable_svals (); ++iter)
785 {
786 const svalue *sval = (*iter);
787 ctxt->on_unknown_change (sval, true);
788 }
ef7827b0 789
808f4dfe
DM
790 /* Mark any clusters that have escaped. */
791 reachable_regs.mark_escaped_clusters ();
ef7827b0 792
808f4dfe
DM
793 /* Update bindings for all clusters that have escaped, whether above,
794 or previously. */
795 m_store.on_unknown_fncall (call, m_mgr->get_store_manager ());
796}
ef7827b0 797
808f4dfe
DM
798/* Traverse the regions in this model, determining what regions are
799 reachable from the store and populating *OUT.
ef7827b0 800
808f4dfe
DM
801 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
802 for reachability (for handling return values from functions when
803 analyzing return of the only function on the stack).
804
805 Find svalues that haven't leaked. */
806
807void
808region_model::get_reachable_svalues (svalue_set *out,
809 const svalue *extra_sval)
810{
811 reachable_regions reachable_regs (&m_store, m_mgr);
812
813 /* Add globals and regions that already escaped in previous
814 unknown calls. */
815 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
816 &reachable_regs);
817
818 if (extra_sval)
819 reachable_regs.handle_sval (extra_sval);
ef7827b0 820
808f4dfe
DM
821 /* Get regions for locals that have explicitly bound values. */
822 for (store::cluster_map_t::iterator iter = m_store.begin ();
823 iter != m_store.end (); ++iter)
824 {
825 const region *base_reg = (*iter).first;
826 if (const region *parent = base_reg->get_parent_region ())
827 if (parent->get_kind () == RK_FRAME)
828 reachable_regs.add (base_reg, false);
829 }
830
831 /* Populate *OUT based on the values that were reachable. */
832 for (svalue_set::iterator iter
833 = reachable_regs.begin_reachable_svals ();
834 iter != reachable_regs.end_reachable_svals (); ++iter)
835 out->add (*iter);
757bf1df
DM
836}
837
838/* Update this model for the RETURN_STMT, using CTXT to report any
839 diagnostics. */
840
841void
842region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
843{
844 tree callee = get_current_function ()->decl;
845 tree lhs = DECL_RESULT (callee);
846 tree rhs = gimple_return_retval (return_stmt);
847
848 if (lhs && rhs)
a96f1c38 849 copy_region (get_lvalue (lhs, ctxt), get_lvalue (rhs, ctxt), ctxt);
757bf1df
DM
850}
851
342e14ff
DM
852/* Update this model for a call and return of setjmp/sigsetjmp at CALL within
853 ENODE, using CTXT to report any diagnostics.
757bf1df 854
342e14ff
DM
855 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
856 0), as opposed to any second return due to longjmp/sigsetjmp. */
757bf1df
DM
857
858void
859region_model::on_setjmp (const gcall *call, const exploded_node *enode,
860 region_model_context *ctxt)
861{
808f4dfe
DM
862 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
863 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
864 ctxt);
757bf1df 865
808f4dfe
DM
866 /* Create a setjmp_svalue for this call and store it in BUF_REG's
867 region. */
868 if (buf_reg)
757bf1df 869 {
fd9982bb 870 setjmp_record r (enode, call);
808f4dfe
DM
871 const svalue *sval
872 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
873 set_value (buf_reg, sval, ctxt);
757bf1df
DM
874 }
875
876 /* Direct calls to setjmp return 0. */
877 if (tree lhs = gimple_call_lhs (call))
878 {
879 tree zero = build_int_cst (TREE_TYPE (lhs), 0);
808f4dfe
DM
880 const svalue *new_sval = m_mgr->get_or_create_constant_svalue (zero);
881 const region *lhs_reg = get_lvalue (lhs, ctxt);
882 set_value (lhs_reg, new_sval, ctxt);
757bf1df
DM
883 }
884}
885
886/* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
887 to a "setjmp" at SETJMP_CALL where the final stack depth should be
808f4dfe
DM
888 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
889 done, and should be done by the caller. */
757bf1df
DM
890
891void
892region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
808f4dfe 893 int setjmp_stack_depth, region_model_context *ctxt)
757bf1df
DM
894{
895 /* Evaluate the val, using the frame of the "longjmp". */
896 tree fake_retval = gimple_call_arg (longjmp_call, 1);
808f4dfe 897 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
757bf1df
DM
898
899 /* Pop any frames until we reach the stack depth of the function where
900 setjmp was called. */
901 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
902 while (get_stack_depth () > setjmp_stack_depth)
808f4dfe 903 pop_frame (NULL, NULL, ctxt);
757bf1df
DM
904
905 gcc_assert (get_stack_depth () == setjmp_stack_depth);
906
907 /* Assign to LHS of "setjmp" in new_state. */
908 if (tree lhs = gimple_call_lhs (setjmp_call))
909 {
910 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
911 tree t_zero = build_int_cst (TREE_TYPE (fake_retval), 0);
808f4dfe
DM
912 const svalue *zero_sval = m_mgr->get_or_create_constant_svalue (t_zero);
913 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
757bf1df
DM
914 /* If we have 0, use 1. */
915 if (eq_zero.is_true ())
916 {
917 tree t_one = build_int_cst (TREE_TYPE (fake_retval), 1);
808f4dfe
DM
918 const svalue *one_sval
919 = m_mgr->get_or_create_constant_svalue (t_one);
920 fake_retval_sval = one_sval;
757bf1df
DM
921 }
922 else
923 {
924 /* Otherwise note that the value is nonzero. */
808f4dfe 925 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
757bf1df
DM
926 }
927
808f4dfe
DM
928 /* Decorate the return value from setjmp as being unmergeable,
929 so that we don't attempt to merge states with it as zero
930 with states in which it's nonzero, leading to a clean distinction
931 in the exploded_graph betweeen the first return and the second
932 return. */
933 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
757bf1df 934
808f4dfe
DM
935 const region *lhs_reg = get_lvalue (lhs, ctxt);
936 set_value (lhs_reg, fake_retval_sval, ctxt);
937 }
757bf1df
DM
938}
939
940/* Update this region_model for a phi stmt of the form
941 LHS = PHI <...RHS...>.
942 where RHS is for the appropriate edge. */
943
944void
8525d1f5 945region_model::handle_phi (const gphi *phi,
808f4dfe 946 tree lhs, tree rhs,
757bf1df
DM
947 region_model_context *ctxt)
948{
949 /* For now, don't bother tracking the .MEM SSA names. */
950 if (tree var = SSA_NAME_VAR (lhs))
951 if (TREE_CODE (var) == VAR_DECL)
952 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
953 return;
954
808f4dfe 955 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
757bf1df 956
808f4dfe 957 set_value (get_lvalue (lhs, ctxt), rhs_sval, ctxt);
8525d1f5
DM
958
959 if (ctxt)
960 ctxt->on_phi (phi, rhs);
757bf1df
DM
961}
962
963/* Implementation of region_model::get_lvalue; the latter adds type-checking.
964
965 Get the id of the region for PV within this region_model,
966 emitting any diagnostics to CTXT. */
967
808f4dfe 968const region *
757bf1df
DM
969region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt)
970{
971 tree expr = pv.m_tree;
972
973 gcc_assert (expr);
974
975 switch (TREE_CODE (expr))
976 {
977 default:
808f4dfe
DM
978 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
979 dump_location_t ());
757bf1df
DM
980
981 case ARRAY_REF:
982 {
983 tree array = TREE_OPERAND (expr, 0);
984 tree index = TREE_OPERAND (expr, 1);
757bf1df 985
808f4dfe
DM
986 const region *array_reg = get_lvalue (array, ctxt);
987 const svalue *index_sval = get_rvalue (index, ctxt);
988 return m_mgr->get_element_region (array_reg,
989 TREE_TYPE (TREE_TYPE (array)),
990 index_sval);
757bf1df
DM
991 }
992 break;
993
994 case MEM_REF:
995 {
996 tree ptr = TREE_OPERAND (expr, 0);
997 tree offset = TREE_OPERAND (expr, 1);
808f4dfe
DM
998 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
999 const svalue *offset_sval = get_rvalue (offset, ctxt);
1000 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
1001 return m_mgr->get_offset_region (star_ptr,
1002 TREE_TYPE (expr),
1003 offset_sval);
757bf1df
DM
1004 }
1005 break;
1006
808f4dfe
DM
1007 case FUNCTION_DECL:
1008 return m_mgr->get_region_for_fndecl (expr);
1009
1010 case LABEL_DECL:
1011 return m_mgr->get_region_for_label (expr);
1012
757bf1df
DM
1013 case VAR_DECL:
1014 /* Handle globals. */
1015 if (is_global_var (expr))
808f4dfe 1016 return m_mgr->get_region_for_global (expr);
757bf1df
DM
1017
1018 /* Fall through. */
1019
1020 case SSA_NAME:
1021 case PARM_DECL:
1022 case RESULT_DECL:
1023 {
1024 gcc_assert (TREE_CODE (expr) == SSA_NAME
1025 || TREE_CODE (expr) == PARM_DECL
1026 || TREE_CODE (expr) == VAR_DECL
1027 || TREE_CODE (expr) == RESULT_DECL);
1028
808f4dfe
DM
1029 int stack_index = pv.m_stack_depth;
1030 const frame_region *frame = get_frame_at_index (stack_index);
757bf1df 1031 gcc_assert (frame);
808f4dfe 1032 return frame->get_region_for_local (m_mgr, expr);
757bf1df
DM
1033 }
1034
1035 case COMPONENT_REF:
1036 {
1037 /* obj.field */
1038 tree obj = TREE_OPERAND (expr, 0);
1039 tree field = TREE_OPERAND (expr, 1);
808f4dfe
DM
1040 const region *obj_reg = get_lvalue (obj, ctxt);
1041 return m_mgr->get_field_region (obj_reg, field);
41a9e940
DM
1042 }
1043 break;
1044
757bf1df 1045 case STRING_CST:
808f4dfe 1046 return m_mgr->get_region_for_string (expr);
757bf1df
DM
1047 }
1048}
1049
1050/* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
1051
09bea584
DM
1052static void
1053assert_compat_types (tree src_type, tree dst_type)
1054{
1055 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
808f4dfe
DM
1056 {
1057#if CHECKING_P
1058 if (!(useless_type_conversion_p (src_type, dst_type)))
1059 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
1060#endif
1061 }
09bea584 1062}
757bf1df 1063
808f4dfe 1064/* Get the region for PV within this region_model,
757bf1df
DM
1065 emitting any diagnostics to CTXT. */
1066
808f4dfe 1067const region *
757bf1df
DM
1068region_model::get_lvalue (path_var pv, region_model_context *ctxt)
1069{
1070 if (pv.m_tree == NULL_TREE)
808f4dfe 1071 return NULL;
757bf1df 1072
808f4dfe
DM
1073 const region *result_reg = get_lvalue_1 (pv, ctxt);
1074 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
1075 return result_reg;
757bf1df
DM
1076}
1077
808f4dfe 1078/* Get the region for EXPR within this region_model (assuming the most
757bf1df
DM
1079 recent stack frame if it's a local). */
1080
808f4dfe 1081const region *
757bf1df
DM
1082region_model::get_lvalue (tree expr, region_model_context *ctxt)
1083{
1084 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
1085}
1086
1087/* Implementation of region_model::get_rvalue; the latter adds type-checking.
1088
1089 Get the value of PV within this region_model,
1090 emitting any diagnostics to CTXT. */
1091
808f4dfe 1092const svalue *
757bf1df
DM
1093region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt)
1094{
1095 gcc_assert (pv.m_tree);
1096
1097 switch (TREE_CODE (pv.m_tree))
1098 {
1099 default:
2242b975 1100 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
757bf1df
DM
1101
1102 case ADDR_EXPR:
1103 {
1104 /* "&EXPR". */
1105 tree expr = pv.m_tree;
1106 tree op0 = TREE_OPERAND (expr, 0);
808f4dfe
DM
1107 const region *expr_reg = get_lvalue (op0, ctxt);
1108 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
757bf1df
DM
1109 }
1110 break;
1111
808f4dfe
DM
1112 case BIT_FIELD_REF:
1113 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
1114
1115 case SSA_NAME:
1116 case VAR_DECL:
1117 case PARM_DECL:
1118 case RESULT_DECL:
757bf1df
DM
1119 case ARRAY_REF:
1120 {
da7c2773
DM
1121 const region *reg = get_lvalue (pv, ctxt);
1122 return get_store_value (reg);
757bf1df
DM
1123 }
1124
808f4dfe
DM
1125 case REALPART_EXPR:
1126 case IMAGPART_EXPR:
1127 case VIEW_CONVERT_EXPR:
1128 {
1129 tree expr = pv.m_tree;
1130 tree arg = TREE_OPERAND (expr, 0);
1131 const svalue *arg_sval = get_rvalue (arg, ctxt);
1132 const svalue *sval_unaryop
1133 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
1134 arg_sval);
1135 return sval_unaryop;
1136 };
1137
757bf1df
DM
1138 case INTEGER_CST:
1139 case REAL_CST:
808f4dfe
DM
1140 case COMPLEX_CST:
1141 case VECTOR_CST:
757bf1df 1142 case STRING_CST:
808f4dfe
DM
1143 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
1144
1145 case POINTER_PLUS_EXPR:
1146 {
1147 tree expr = pv.m_tree;
1148 tree ptr = TREE_OPERAND (expr, 0);
1149 tree offset = TREE_OPERAND (expr, 1);
1150 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
1151 const svalue *offset_sval = get_rvalue (offset, ctxt);
1152 const svalue *sval_binop
1153 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
1154 ptr_sval, offset_sval);
1155 return sval_binop;
1156 }
1157
1158 /* Binary ops. */
1159 case PLUS_EXPR:
1160 case MULT_EXPR:
1161 {
1162 tree expr = pv.m_tree;
1163 tree arg0 = TREE_OPERAND (expr, 0);
1164 tree arg1 = TREE_OPERAND (expr, 1);
1165 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
1166 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
1167 const svalue *sval_binop
1168 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
1169 arg0_sval, arg1_sval);
1170 return sval_binop;
1171 }
757bf1df
DM
1172
1173 case COMPONENT_REF:
1174 case MEM_REF:
757bf1df 1175 {
808f4dfe
DM
1176 const region *ref_reg = get_lvalue (pv, ctxt);
1177 return get_store_value (ref_reg);
757bf1df
DM
1178 }
1179 }
1180}
1181
1182/* Get the value of PV within this region_model,
1183 emitting any diagnostics to CTXT. */
1184
808f4dfe 1185const svalue *
757bf1df
DM
1186region_model::get_rvalue (path_var pv, region_model_context *ctxt)
1187{
1188 if (pv.m_tree == NULL_TREE)
808f4dfe 1189 return NULL;
757bf1df 1190
808f4dfe 1191 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
757bf1df 1192
808f4dfe
DM
1193 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
1194
1195 return result_sval;
757bf1df
DM
1196}
1197
1198/* Get the value of EXPR within this region_model (assuming the most
1199 recent stack frame if it's a local). */
1200
808f4dfe 1201const svalue *
757bf1df
DM
1202region_model::get_rvalue (tree expr, region_model_context *ctxt)
1203{
1204 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
1205}
1206
623bc027
DM
1207/* Return true if this model is on a path with "main" as the entrypoint
1208 (as opposed to one in which we're merely analyzing a subset of the
1209 path through the code). */
1210
1211bool
1212region_model::called_from_main_p () const
1213{
1214 if (!m_current_frame)
1215 return false;
1216 /* Determine if the oldest stack frame in this model is for "main". */
1217 const frame_region *frame0 = get_frame_at_index (0);
1218 gcc_assert (frame0);
1219 return id_equal (DECL_NAME (frame0->get_function ()->decl), "main");
1220}
1221
1222/* Subroutine of region_model::get_store_value for when REG is (or is within)
1223 a global variable that hasn't been touched since the start of this path
1224 (or was implicitly touched due to a call to an unknown function). */
1225
1226const svalue *
1227region_model::get_initial_value_for_global (const region *reg) const
1228{
1229 /* Get the decl that REG is for (or is within). */
1230 const decl_region *base_reg
1231 = reg->get_base_region ()->dyn_cast_decl_region ();
1232 gcc_assert (base_reg);
1233 tree decl = base_reg->get_decl ();
1234
1235 /* Special-case: to avoid having to explicitly update all previously
1236 untracked globals when calling an unknown fn, they implicitly have
1237 an unknown value if an unknown call has occurred, unless this is
1238 static to-this-TU and hasn't escaped. Globals that have escaped
1239 are explicitly tracked, so we shouldn't hit this case for them. */
1240 if (m_store.called_unknown_fn_p () && TREE_PUBLIC (decl))
1241 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
1242
1243 /* If we are on a path from the entrypoint from "main" and we have a
1244 global decl defined in this TU that hasn't been touched yet, then
1245 the initial value of REG can be taken from the initialization value
1246 of the decl. */
1247 if (called_from_main_p () && !DECL_EXTERNAL (decl))
1248 {
1249 /* Get the initializer value for base_reg. */
1250 const svalue *base_reg_init
1251 = base_reg->get_svalue_for_initializer (m_mgr);
1252 gcc_assert (base_reg_init);
1253 if (reg == base_reg)
1254 return base_reg_init;
1255 else
1256 {
1257 /* Get the value for REG within base_reg_init. */
1258 binding_cluster c (base_reg);
1259 c.bind (m_mgr->get_store_manager (), base_reg, base_reg_init,
1260 BK_direct);
1261 const svalue *sval
1262 = c.get_any_binding (m_mgr->get_store_manager (), reg);
1263 if (sval)
1264 {
1265 if (reg->get_type ())
1266 sval = m_mgr->get_or_create_cast (reg->get_type (),
1267 sval);
1268 return sval;
1269 }
1270 }
1271 }
1272
1273 /* Otherwise, return INIT_VAL(REG). */
1274 return m_mgr->get_or_create_initial_value (reg);
1275}
1276
808f4dfe
DM
1277/* Get a value for REG, looking it up in the store, or otherwise falling
1278 back to "initial" or "unknown" values. */
757bf1df 1279
808f4dfe
DM
1280const svalue *
1281region_model::get_store_value (const region *reg) const
757bf1df 1282{
2867118d
DM
1283 /* Special-case: handle var_decls in the constant pool. */
1284 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
1285 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
1286 return sval;
1287
808f4dfe
DM
1288 const svalue *sval
1289 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
1290 if (sval)
757bf1df 1291 {
808f4dfe
DM
1292 if (reg->get_type ())
1293 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
1294 return sval;
757bf1df 1295 }
757bf1df 1296
808f4dfe
DM
1297 /* Special-case: read at a constant index within a STRING_CST. */
1298 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
1299 if (tree byte_offset_cst
1300 = offset_reg->get_byte_offset ()->maybe_get_constant ())
1301 if (const string_region *str_reg
1302 = reg->get_parent_region ()->dyn_cast_string_region ())
757bf1df 1303 {
808f4dfe
DM
1304 tree string_cst = str_reg->get_string_cst ();
1305 if (const svalue *char_sval
1306 = m_mgr->maybe_get_char_from_string_cst (string_cst,
1307 byte_offset_cst))
1308 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
757bf1df 1309 }
757bf1df 1310
808f4dfe
DM
1311 /* Special-case: read the initial char of a STRING_CST. */
1312 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
1313 if (const string_region *str_reg
1314 = cast_reg->get_original_region ()->dyn_cast_string_region ())
1315 {
1316 tree string_cst = str_reg->get_string_cst ();
1317 tree byte_offset_cst = build_int_cst (integer_type_node, 0);
1318 if (const svalue *char_sval
1319 = m_mgr->maybe_get_char_from_string_cst (string_cst,
1320 byte_offset_cst))
1321 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
1322 }
757bf1df 1323
808f4dfe
DM
1324 /* Otherwise we implicitly have the initial value of the region
1325 (if the cluster had been touched, binding_cluster::get_any_binding,
1326 would have returned UNKNOWN, and we would already have returned
1327 that above). */
757bf1df 1328
623bc027
DM
1329 /* Handle globals. */
1330 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
1331 == RK_GLOBALS)
1332 return get_initial_value_for_global (reg);
757bf1df 1333
808f4dfe 1334 return m_mgr->get_or_create_initial_value (reg);
757bf1df
DM
1335}
1336
808f4dfe
DM
1337/* Return false if REG does not exist, true if it may do.
1338 This is for detecting regions within the stack that don't exist anymore
1339 after frames are popped. */
757bf1df 1340
808f4dfe
DM
1341bool
1342region_model::region_exists_p (const region *reg) const
757bf1df 1343{
808f4dfe
DM
1344 /* If within a stack frame, check that the stack frame is live. */
1345 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
757bf1df 1346 {
808f4dfe
DM
1347 /* Check that the current frame is the enclosing frame, or is called
1348 by it. */
1349 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
1350 iter_frame = iter_frame->get_calling_frame ())
1351 if (iter_frame == enclosing_frame)
1352 return true;
1353 return false;
757bf1df 1354 }
808f4dfe
DM
1355
1356 return true;
757bf1df
DM
1357}
1358
808f4dfe
DM
1359/* Get a region for referencing PTR_SVAL, creating a region if need be, and
1360 potentially generating warnings via CTXT.
1361 PTR_TREE if non-NULL can be used when emitting diagnostics. */
757bf1df 1362
808f4dfe
DM
1363const region *
1364region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
1365 region_model_context *ctxt)
757bf1df 1366{
808f4dfe 1367 gcc_assert (ptr_sval);
757bf1df 1368
808f4dfe 1369 switch (ptr_sval->get_kind ())
757bf1df 1370 {
808f4dfe 1371 default:
23ebfda0 1372 break;
808f4dfe 1373
757bf1df
DM
1374 case SK_REGION:
1375 {
808f4dfe
DM
1376 const region_svalue *region_sval
1377 = as_a <const region_svalue *> (ptr_sval);
757bf1df
DM
1378 return region_sval->get_pointee ();
1379 }
1380
808f4dfe
DM
1381 case SK_BINOP:
1382 {
1383 const binop_svalue *binop_sval
1384 = as_a <const binop_svalue *> (ptr_sval);
1385 switch (binop_sval->get_op ())
1386 {
1387 case POINTER_PLUS_EXPR:
1388 {
1389 /* If we have a symbolic value expressing pointer arithmentic,
1390 try to convert it to a suitable region. */
1391 const region *parent_region
1392 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
1393 const svalue *offset = binop_sval->get_arg1 ();
1394 tree type= TREE_TYPE (ptr_sval->get_type ());
1395 return m_mgr->get_offset_region (parent_region, type, offset);
1396 }
1397 default:
23ebfda0 1398 break;
808f4dfe
DM
1399 }
1400 }
23ebfda0 1401 break;
757bf1df
DM
1402
1403 case SK_POISONED:
1404 {
1405 if (ctxt)
808f4dfe
DM
1406 {
1407 tree ptr = get_representative_tree (ptr_sval);
1408 /* If we can't get a representative tree for PTR_SVAL
1409 (e.g. if it hasn't been bound into the store), then
1410 fall back on PTR_TREE, if non-NULL. */
1411 if (!ptr)
1412 ptr = ptr_tree;
1413 if (ptr)
1414 {
1415 const poisoned_svalue *poisoned_sval
1416 = as_a <const poisoned_svalue *> (ptr_sval);
1417 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
1418 ctxt->warn (new poisoned_value_diagnostic (ptr, pkind));
1419 }
1420 }
757bf1df 1421 }
23ebfda0 1422 break;
757bf1df
DM
1423 }
1424
23ebfda0 1425 return m_mgr->get_symbolic_region (ptr_sval);
757bf1df
DM
1426}
1427
808f4dfe
DM
1428/* Set the value of the region given by LHS_REG to the value given
1429 by RHS_SVAL. */
757bf1df 1430
808f4dfe
DM
1431void
1432region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
1433 region_model_context */*ctxt*/)
757bf1df 1434{
808f4dfe
DM
1435 gcc_assert (lhs_reg);
1436 gcc_assert (rhs_sval);
1437
1438 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
1439 BK_direct);
757bf1df
DM
1440}
1441
808f4dfe 1442/* Set the value of the region given by LHS to the value given by RHS. */
757bf1df
DM
1443
1444void
808f4dfe 1445region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
757bf1df 1446{
808f4dfe
DM
1447 const region *lhs_reg = get_lvalue (lhs, ctxt);
1448 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
1449 gcc_assert (lhs_reg);
1450 gcc_assert (rhs_sval);
1451 set_value (lhs_reg, rhs_sval, ctxt);
757bf1df
DM
1452}
1453
808f4dfe 1454/* Remove all bindings overlapping REG within the store. */
884d9141
DM
1455
1456void
808f4dfe
DM
1457region_model::clobber_region (const region *reg)
1458{
1459 m_store.clobber_region (m_mgr->get_store_manager(), reg);
1460}
1461
1462/* Remove any bindings for REG within the store. */
1463
1464void
1465region_model::purge_region (const region *reg)
1466{
1467 m_store.purge_region (m_mgr->get_store_manager(), reg);
1468}
1469
1470/* Zero-fill REG. */
1471
1472void
1473region_model::zero_fill_region (const region *reg)
1474{
1475 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
1476}
1477
1478/* Mark REG as having unknown content. */
1479
1480void
1481region_model::mark_region_as_unknown (const region *reg)
884d9141 1482{
808f4dfe 1483 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg);
884d9141
DM
1484}
1485
808f4dfe 1486/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
757bf1df
DM
1487 this model. */
1488
1489tristate
808f4dfe
DM
1490region_model::eval_condition (const svalue *lhs,
1491 enum tree_code op,
1492 const svalue *rhs) const
757bf1df 1493{
e978955d
DM
1494 /* For now, make no attempt to capture constraints on floating-point
1495 values. */
1496 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
1497 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
1498 return tristate::unknown ();
1499
808f4dfe 1500 tristate ts = eval_condition_without_cm (lhs, op, rhs);
757bf1df
DM
1501 if (ts.is_known ())
1502 return ts;
1503
1504 /* Otherwise, try constraints. */
808f4dfe 1505 return m_constraints->eval_condition (lhs, op, rhs);
757bf1df
DM
1506}
1507
808f4dfe 1508/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
757bf1df
DM
1509 this model, without resorting to the constraint_manager.
1510
1511 This is exposed so that impl_region_model_context::on_state_leak can
1512 check for equality part-way through region_model::purge_unused_svalues
1513 without risking creating new ECs. */
1514
1515tristate
808f4dfe
DM
1516region_model::eval_condition_without_cm (const svalue *lhs,
1517 enum tree_code op,
1518 const svalue *rhs) const
757bf1df 1519{
757bf1df
DM
1520 gcc_assert (lhs);
1521 gcc_assert (rhs);
1522
1523 /* See what we know based on the values. */
808f4dfe
DM
1524
1525 /* For now, make no attempt to capture constraints on floating-point
1526 values. */
1527 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
1528 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
1529 return tristate::unknown ();
1530
1531 /* Unwrap any unmergeable values. */
1532 lhs = lhs->unwrap_any_unmergeable ();
1533 rhs = rhs->unwrap_any_unmergeable ();
1534
1535 if (lhs == rhs)
757bf1df 1536 {
808f4dfe
DM
1537 /* If we have the same svalue, then we have equality
1538 (apart from NaN-handling).
1539 TODO: should this definitely be the case for poisoned values? */
1540 /* Poisoned and unknown values are "unknowable". */
1541 if (lhs->get_kind () == SK_POISONED
1542 || lhs->get_kind () == SK_UNKNOWN)
1543 return tristate::TS_UNKNOWN;
e978955d 1544
808f4dfe 1545 switch (op)
757bf1df 1546 {
808f4dfe
DM
1547 case EQ_EXPR:
1548 case GE_EXPR:
1549 case LE_EXPR:
1550 return tristate::TS_TRUE;
07c86323 1551
808f4dfe
DM
1552 case NE_EXPR:
1553 case GT_EXPR:
1554 case LT_EXPR:
1555 return tristate::TS_FALSE;
1556
1557 default:
1558 /* For other ops, use the logic below. */
1559 break;
757bf1df 1560 }
808f4dfe 1561 }
757bf1df 1562
808f4dfe
DM
1563 /* If we have a pair of region_svalues, compare them. */
1564 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
1565 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
1566 {
1567 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
1568 if (res.is_known ())
1569 return res;
1570 /* Otherwise, only known through constraints. */
1571 }
757bf1df 1572
808f4dfe
DM
1573 /* If we have a pair of constants, compare them. */
1574 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
1575 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
1576 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
757bf1df 1577
808f4dfe
DM
1578 /* Handle comparison of a region_svalue against zero. */
1579
1580 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
1581 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
1582 if (zerop (cst_rhs->get_constant ()))
1583 {
1584 /* A region_svalue is a non-NULL pointer, except in certain
1585 special cases (see the comment for region::non_null_p. */
1586 const region *pointee = ptr->get_pointee ();
1587 if (pointee->non_null_p ())
757bf1df 1588 {
808f4dfe 1589 switch (op)
757bf1df 1590 {
808f4dfe
DM
1591 default:
1592 gcc_unreachable ();
1593
1594 case EQ_EXPR:
1595 case GE_EXPR:
1596 case LE_EXPR:
1597 return tristate::TS_FALSE;
1598
1599 case NE_EXPR:
1600 case GT_EXPR:
1601 case LT_EXPR:
1602 return tristate::TS_TRUE;
757bf1df
DM
1603 }
1604 }
808f4dfe
DM
1605 }
1606
1607 /* Handle rejection of equality for comparisons of the initial values of
1608 "external" values (such as params) with the address of locals. */
1609 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
1610 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
1611 {
1612 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
1613 if (res.is_known ())
1614 return res;
1615 }
1616 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
1617 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
1618 {
1619 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
1620 if (res.is_known ())
1621 return res;
1622 }
1623
1624 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
1625 if (tree rhs_cst = rhs->maybe_get_constant ())
1626 {
1627 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
1628 if (res.is_known ())
1629 return res;
1630 }
1631
1632 return tristate::TS_UNKNOWN;
1633}
1634
1635/* Subroutine of region_model::eval_condition_without_cm, for rejecting
1636 equality of INIT_VAL(PARM) with &LOCAL. */
1637
1638tristate
1639region_model::compare_initial_and_pointer (const initial_svalue *init,
1640 const region_svalue *ptr) const
1641{
1642 const region *pointee = ptr->get_pointee ();
1643
1644 /* If we have a pointer to something within a stack frame, it can't be the
1645 initial value of a param. */
1646 if (pointee->maybe_get_frame_region ())
1647 {
1648 const region *reg = init->get_region ();
1649 if (tree reg_decl = reg->maybe_get_decl ())
1650 if (TREE_CODE (reg_decl) == SSA_NAME)
1651 {
1652 tree ssa_name = reg_decl;
1653 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
1654 && SSA_NAME_VAR (ssa_name)
1655 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == PARM_DECL)
1656 return tristate::TS_FALSE;
1657 }
757bf1df
DM
1658 }
1659
1660 return tristate::TS_UNKNOWN;
1661}
1662
1663/* Attempt to add the constraint "LHS OP RHS" to this region_model.
1664 If it is consistent with existing constraints, add it, and return true.
1665 Return false if it contradicts existing constraints.
1666 Use CTXT for reporting any diagnostics associated with the accesses. */
1667
1668bool
1669region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
1670 region_model_context *ctxt)
1671{
e978955d
DM
1672 /* For now, make no attempt to capture constraints on floating-point
1673 values. */
1674 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
1675 return true;
1676
808f4dfe
DM
1677 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
1678 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
757bf1df 1679
808f4dfe 1680 tristate t_cond = eval_condition (lhs_sval, op, rhs_sval);
757bf1df
DM
1681
1682 /* If we already have the condition, do nothing. */
1683 if (t_cond.is_true ())
1684 return true;
1685
1686 /* Reject a constraint that would contradict existing knowledge, as
1687 unsatisfiable. */
1688 if (t_cond.is_false ())
1689 return false;
1690
1691 /* Store the constraint. */
808f4dfe 1692 m_constraints->add_constraint (lhs_sval, op, rhs_sval);
757bf1df
DM
1693
1694 add_any_constraints_from_ssa_def_stmt (lhs, op, rhs, ctxt);
1695
1696 /* Notify the context, if any. This exists so that the state machines
1697 in a program_state can be notified about the condition, and so can
1698 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
1699 when synthesizing constraints as above. */
1700 if (ctxt)
1701 ctxt->on_condition (lhs, op, rhs);
1702
1703 return true;
1704}
1705
1706/* Subroutine of region_model::add_constraint for handling optimized
1707 && and || conditionals.
1708
1709 If we have an SSA_NAME for a boolean compared against 0,
1710 look at anything implied by the def stmt and call add_constraint
1711 for it (which could recurse).
1712
1713 For example, if we have
1714 _1 = p_6 == 0B;
1715 _2 = p_8 == 0B
1716 _3 = _1 | _2
1717 and add the constraint
1718 (_3 == 0),
1719 then the def stmt for _3 implies that _1 and _2 are both false,
1720 and hence we can add the constraints:
1721 p_6 != 0B
1722 p_8 != 0B. */
1723
1724void
1725region_model::add_any_constraints_from_ssa_def_stmt (tree lhs,
1726 enum tree_code op,
1727 tree rhs,
1728 region_model_context *ctxt)
1729{
1730 if (TREE_CODE (lhs) != SSA_NAME)
1731 return;
1732
e516294a 1733 if (!zerop (rhs))
757bf1df
DM
1734 return;
1735
1736 if (op != NE_EXPR && op != EQ_EXPR)
1737 return;
1738
e516294a
DM
1739 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
1740 if (const gassign *assign = dyn_cast<gassign *> (def_stmt))
1741 add_any_constraints_from_gassign (op, rhs, assign, ctxt);
1742 else if (gcall *call = dyn_cast<gcall *> (def_stmt))
1743 add_any_constraints_from_gcall (op, rhs, call, ctxt);
1744}
1745
1746/* Add any constraints for an SSA_NAME defined by ASSIGN
1747 where the result OP RHS. */
1748
1749void
1750region_model::add_any_constraints_from_gassign (enum tree_code op,
1751 tree rhs,
1752 const gassign *assign,
1753 region_model_context *ctxt)
1754{
757bf1df
DM
1755 /* We have either
1756 - "LHS != false" (i.e. LHS is true), or
1757 - "LHS == false" (i.e. LHS is false). */
1758 bool is_true = op == NE_EXPR;
1759
757bf1df
DM
1760 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
1761
1762 switch (rhs_code)
1763 {
1764 default:
1765 break;
e516294a
DM
1766
1767 case NOP_EXPR:
1768 {
1769 add_constraint (gimple_assign_rhs1 (assign), op, rhs, ctxt);
1770 }
1771 break;
1772
757bf1df
DM
1773 case BIT_AND_EXPR:
1774 {
1775 if (is_true)
1776 {
1777 /* ...and "LHS == (rhs1 & rhs2) i.e. "(rhs1 & rhs2)" is true
1778 then both rhs1 and rhs2 must be true. */
1779 tree rhs1 = gimple_assign_rhs1 (assign);
1780 tree rhs2 = gimple_assign_rhs2 (assign);
1781 add_constraint (rhs1, NE_EXPR, boolean_false_node, ctxt);
1782 add_constraint (rhs2, NE_EXPR, boolean_false_node, ctxt);
1783 }
1784 }
1785 break;
1786
1787 case BIT_IOR_EXPR:
1788 {
1789 if (!is_true)
1790 {
1791 /* ...and "LHS == (rhs1 | rhs2)
1792 i.e. "(rhs1 | rhs2)" is false
1793 then both rhs1 and rhs2 must be false. */
1794 tree rhs1 = gimple_assign_rhs1 (assign);
1795 tree rhs2 = gimple_assign_rhs2 (assign);
1796 add_constraint (rhs1, EQ_EXPR, boolean_false_node, ctxt);
1797 add_constraint (rhs2, EQ_EXPR, boolean_false_node, ctxt);
1798 }
1799 }
1800 break;
1801
1802 case EQ_EXPR:
1803 case NE_EXPR:
1804 {
1805 /* ...and "LHS == (rhs1 OP rhs2)"
1806 then rhs1 OP rhs2 must have the same logical value as LHS. */
1807 tree rhs1 = gimple_assign_rhs1 (assign);
1808 tree rhs2 = gimple_assign_rhs2 (assign);
1809 if (!is_true)
1810 rhs_code
1811 = invert_tree_comparison (rhs_code, false /* honor_nans */);
1812 add_constraint (rhs1, rhs_code, rhs2, ctxt);
1813 }
1814 break;
1815 }
1816}
1817
e516294a
DM
1818/* Add any constraints for an SSA_NAME defined by CALL
1819 where the result OP RHS. */
1820
1821void
1822region_model::add_any_constraints_from_gcall (enum tree_code op,
1823 tree rhs,
1824 const gcall *call,
1825 region_model_context *ctxt)
1826{
1827 if (gimple_call_builtin_p (call, BUILT_IN_EXPECT)
1828 || gimple_call_builtin_p (call, BUILT_IN_EXPECT_WITH_PROBABILITY)
1829 || gimple_call_internal_p (call, IFN_BUILTIN_EXPECT))
1830 {
1831 /* __builtin_expect's return value is its initial argument. */
1832 add_constraint (gimple_call_arg (call, 0), op, rhs, ctxt);
1833 }
1834}
1835
757bf1df
DM
1836/* Determine what is known about the condition "LHS OP RHS" within
1837 this model.
1838 Use CTXT for reporting any diagnostics associated with the accesses. */
1839
1840tristate
1841region_model::eval_condition (tree lhs,
1842 enum tree_code op,
1843 tree rhs,
1844 region_model_context *ctxt)
1845{
e978955d
DM
1846 /* For now, make no attempt to model constraints on floating-point
1847 values. */
1848 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
1849 return tristate::unknown ();
1850
757bf1df
DM
1851 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
1852}
1853
808f4dfe
DM
1854/* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
1855 Use VISITED to prevent infinite mutual recursion with the overload for
1856 regions. */
757bf1df 1857
808f4dfe
DM
1858path_var
1859region_model::get_representative_path_var (const svalue *sval,
1860 svalue_set *visited) const
757bf1df 1861{
808f4dfe
DM
1862 if (sval == NULL)
1863 return path_var (NULL_TREE, 0);
757bf1df 1864
808f4dfe
DM
1865 if (const svalue *cast_sval = sval->maybe_undo_cast ())
1866 sval = cast_sval;
757bf1df 1867
808f4dfe
DM
1868 /* Prevent infinite recursion. */
1869 if (visited->contains (sval))
1870 return path_var (NULL_TREE, 0);
1871 visited->add (sval);
757bf1df 1872
808f4dfe
DM
1873 auto_vec<path_var> pvs;
1874 m_store.get_representative_path_vars (this, visited, sval, &pvs);
757bf1df 1875
808f4dfe
DM
1876 if (tree cst = sval->maybe_get_constant ())
1877 pvs.safe_push (path_var (cst, 0));
757bf1df 1878
90f7c300 1879 /* Handle string literals and various other pointers. */
808f4dfe
DM
1880 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
1881 {
1882 const region *reg = ptr_sval->get_pointee ();
1883 if (path_var pv = get_representative_path_var (reg, visited))
1884 return path_var (build1 (ADDR_EXPR,
1885 TREE_TYPE (sval->get_type ()),
1886 pv.m_tree),
1887 pv.m_stack_depth);
1888 }
1889
1890 /* If we have a sub_svalue, look for ways to represent the parent. */
1891 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
90f7c300 1892 {
808f4dfe
DM
1893 const svalue *parent_sval = sub_sval->get_parent ();
1894 const region *subreg = sub_sval->get_subregion ();
1895 if (path_var parent_pv
1896 = get_representative_path_var (parent_sval, visited))
1897 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
1898 return path_var (build3 (COMPONENT_REF,
1899 sval->get_type (),
1900 parent_pv.m_tree,
1901 field_reg->get_field (),
1902 NULL_TREE),
1903 parent_pv.m_stack_depth);
90f7c300
DM
1904 }
1905
808f4dfe
DM
1906 if (pvs.length () < 1)
1907 return path_var (NULL_TREE, 0);
1908
1909 pvs.qsort (readability_comparator);
1910 return pvs[0];
757bf1df
DM
1911}
1912
808f4dfe 1913/* Attempt to return a tree that represents SVAL, or return NULL_TREE. */
757bf1df 1914
808f4dfe
DM
1915tree
1916region_model::get_representative_tree (const svalue *sval) const
757bf1df 1917{
808f4dfe
DM
1918 svalue_set visited;
1919 return get_representative_path_var (sval, &visited).m_tree;
1920}
1921
1922/* Attempt to return a path_var that represents REG, or return
1923 the NULL path_var.
1924 For example, a region for a field of a local would be a path_var
1925 wrapping a COMPONENT_REF.
1926 Use VISITED to prevent infinite mutual recursion with the overload for
1927 svalues. */
757bf1df 1928
808f4dfe
DM
1929path_var
1930region_model::get_representative_path_var (const region *reg,
1931 svalue_set *visited) const
1932{
1933 switch (reg->get_kind ())
757bf1df 1934 {
808f4dfe
DM
1935 default:
1936 gcc_unreachable ();
e516294a 1937
808f4dfe
DM
1938 case RK_FRAME:
1939 case RK_GLOBALS:
1940 case RK_CODE:
1941 case RK_HEAP:
1942 case RK_STACK:
1943 case RK_ROOT:
1944 /* Regions that represent memory spaces are not expressible as trees. */
1945 return path_var (NULL_TREE, 0);
757bf1df 1946
808f4dfe 1947 case RK_FUNCTION:
884d9141 1948 {
808f4dfe
DM
1949 const function_region *function_reg
1950 = as_a <const function_region *> (reg);
1951 return path_var (function_reg->get_fndecl (), 0);
884d9141 1952 }
808f4dfe
DM
1953 case RK_LABEL:
1954 gcc_unreachable (); // TODO
90f7c300 1955
808f4dfe
DM
1956 case RK_SYMBOLIC:
1957 {
1958 const symbolic_region *symbolic_reg
1959 = as_a <const symbolic_region *> (reg);
1960 const svalue *pointer = symbolic_reg->get_pointer ();
1961 path_var pointer_pv = get_representative_path_var (pointer, visited);
1962 if (!pointer_pv)
1963 return path_var (NULL_TREE, 0);
1964 tree offset = build_int_cst (pointer->get_type (), 0);
1965 return path_var (build2 (MEM_REF,
1966 reg->get_type (),
1967 pointer_pv.m_tree,
1968 offset),
1969 pointer_pv.m_stack_depth);
1970 }
1971 case RK_DECL:
1972 {
1973 const decl_region *decl_reg = as_a <const decl_region *> (reg);
1974 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
1975 }
1976 case RK_FIELD:
1977 {
1978 const field_region *field_reg = as_a <const field_region *> (reg);
1979 path_var parent_pv
1980 = get_representative_path_var (reg->get_parent_region (), visited);
1981 if (!parent_pv)
1982 return path_var (NULL_TREE, 0);
1983 return path_var (build3 (COMPONENT_REF,
1984 reg->get_type (),
1985 parent_pv.m_tree,
1986 field_reg->get_field (),
1987 NULL_TREE),
1988 parent_pv.m_stack_depth);
1989 }
757bf1df 1990
808f4dfe
DM
1991 case RK_ELEMENT:
1992 {
1993 const element_region *element_reg
1994 = as_a <const element_region *> (reg);
1995 path_var parent_pv
1996 = get_representative_path_var (reg->get_parent_region (), visited);
1997 if (!parent_pv)
1998 return path_var (NULL_TREE, 0);
1999 path_var index_pv
2000 = get_representative_path_var (element_reg->get_index (), visited);
2001 if (!index_pv)
2002 return path_var (NULL_TREE, 0);
2003 return path_var (build4 (ARRAY_REF,
2004 reg->get_type (),
2005 parent_pv.m_tree, index_pv.m_tree,
2006 NULL_TREE, NULL_TREE),
2007 parent_pv.m_stack_depth);
2008 }
757bf1df 2009
808f4dfe 2010 case RK_OFFSET:
757bf1df 2011 {
808f4dfe
DM
2012 const offset_region *offset_reg
2013 = as_a <const offset_region *> (reg);
2014 path_var parent_pv
2015 = get_representative_path_var (reg->get_parent_region (), visited);
2016 if (!parent_pv)
2017 return path_var (NULL_TREE, 0);
2018 path_var offset_pv
2019 = get_representative_path_var (offset_reg->get_byte_offset (),
2020 visited);
2021 if (!offset_pv)
2022 return path_var (NULL_TREE, 0);
2023 return path_var (build2 (MEM_REF,
2024 reg->get_type (),
2025 parent_pv.m_tree, offset_pv.m_tree),
2026 parent_pv.m_stack_depth);
757bf1df 2027 }
757bf1df 2028
808f4dfe
DM
2029 case RK_CAST:
2030 {
2031 path_var parent_pv
2032 = get_representative_path_var (reg->get_parent_region (), visited);
2033 if (!parent_pv)
2034 return path_var (NULL_TREE, 0);
2035 return path_var (build1 (NOP_EXPR,
2036 reg->get_type (),
2037 parent_pv.m_tree),
2038 parent_pv.m_stack_depth);
2039 }
757bf1df 2040
808f4dfe
DM
2041 case RK_HEAP_ALLOCATED:
2042 case RK_ALLOCA:
2043 /* No good way to express heap-allocated/alloca regions as trees. */
2044 return path_var (NULL_TREE, 0);
757bf1df 2045
808f4dfe
DM
2046 case RK_STRING:
2047 {
2048 const string_region *string_reg = as_a <const string_region *> (reg);
2049 return path_var (string_reg->get_string_cst (), 0);
2050 }
757bf1df 2051
808f4dfe
DM
2052 case RK_UNKNOWN:
2053 return path_var (NULL_TREE, 0);
2054 }
757bf1df
DM
2055}
2056
2057/* Update this model for any phis in SNODE, assuming we came from
2058 LAST_CFG_SUPEREDGE. */
2059
2060void
2061region_model::update_for_phis (const supernode *snode,
2062 const cfg_superedge *last_cfg_superedge,
2063 region_model_context *ctxt)
2064{
2065 gcc_assert (last_cfg_superedge);
2066
2067 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
2068 !gsi_end_p (gpi); gsi_next (&gpi))
2069 {
2070 gphi *phi = gpi.phi ();
2071
2072 tree src = last_cfg_superedge->get_phi_arg (phi);
2073 tree lhs = gimple_phi_result (phi);
2074
2075 /* Update next_state based on phi. */
808f4dfe 2076 handle_phi (phi, lhs, src, ctxt);
757bf1df
DM
2077 }
2078}
2079
2080/* Attempt to update this model for taking EDGE (where the last statement
2081 was LAST_STMT), returning true if the edge can be taken, false
2082 otherwise.
2083
2084 For CFG superedges where LAST_STMT is a conditional or a switch
2085 statement, attempt to add the relevant conditions for EDGE to this
2086 model, returning true if they are feasible, or false if they are
2087 impossible.
2088
2089 For call superedges, push frame information and store arguments
2090 into parameters.
2091
2092 For return superedges, pop frame information and store return
2093 values into any lhs.
2094
2095 Rejection of call/return superedges happens elsewhere, in
2096 program_point::on_edge (i.e. based on program point, rather
2097 than program state). */
2098
2099bool
2100region_model::maybe_update_for_edge (const superedge &edge,
2101 const gimple *last_stmt,
2102 region_model_context *ctxt)
2103{
2104 /* Handle frame updates for interprocedural edges. */
2105 switch (edge.m_kind)
2106 {
2107 default:
2108 break;
2109
2110 case SUPEREDGE_CALL:
2111 {
2112 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
2113 update_for_call_superedge (*call_edge, ctxt);
2114 }
2115 break;
2116
2117 case SUPEREDGE_RETURN:
2118 {
2119 const return_superedge *return_edge
2120 = as_a <const return_superedge *> (&edge);
2121 update_for_return_superedge (*return_edge, ctxt);
2122 }
2123 break;
2124
2125 case SUPEREDGE_INTRAPROCEDURAL_CALL:
2126 {
2127 const callgraph_superedge *cg_sedge
2128 = as_a <const callgraph_superedge *> (&edge);
2129 update_for_call_summary (*cg_sedge, ctxt);
2130 }
2131 break;
2132 }
2133
2134 if (last_stmt == NULL)
2135 return true;
2136
2137 /* Apply any constraints for conditionals/switch statements. */
2138
2139 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
2140 {
2141 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
2142 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt);
2143 }
2144
2145 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
2146 {
2147 const switch_cfg_superedge *switch_sedge
2148 = as_a <const switch_cfg_superedge *> (&edge);
2149 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt, ctxt);
2150 }
2151
2152 return true;
2153}
2154
2155/* Push a new frame_region on to the stack region.
2156 Populate the frame_region with child regions for the function call's
2157 parameters, using values from the arguments at the callsite in the
2158 caller's frame. */
2159
2160void
2161region_model::update_for_call_superedge (const call_superedge &call_edge,
2162 region_model_context *ctxt)
2163{
808f4dfe 2164 /* Build a vec of argument svalues, using the current top
757bf1df
DM
2165 frame for resolving tree expressions. */
2166 const gcall *call_stmt = call_edge.get_call_stmt ();
808f4dfe 2167 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
757bf1df
DM
2168
2169 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
2170 {
2171 tree arg = gimple_call_arg (call_stmt, i);
808f4dfe 2172 arg_svals.quick_push (get_rvalue (arg, ctxt));
757bf1df
DM
2173 }
2174
808f4dfe 2175 push_frame (call_edge.get_callee_function (), &arg_svals, ctxt);
757bf1df
DM
2176}
2177
a96f1c38
DM
2178/* Pop the top-most frame_region from the stack, and copy the return
2179 region's values (if any) into the region for the lvalue of the LHS of
757bf1df 2180 the call (if any). */
757bf1df
DM
2181void
2182region_model::update_for_return_superedge (const return_superedge &return_edge,
2183 region_model_context *ctxt)
2184{
a96f1c38 2185 /* Get the region for the result of the call, within the caller frame. */
808f4dfe 2186 const region *result_dst_reg = NULL;
757bf1df
DM
2187 const gcall *call_stmt = return_edge.get_call_stmt ();
2188 tree lhs = gimple_call_lhs (call_stmt);
2189 if (lhs)
a96f1c38
DM
2190 {
2191 /* Normally we access the top-level frame, which is:
808f4dfe 2192 path_var (expr, get_stack_depth () - 1)
a96f1c38 2193 whereas here we need the caller frame, hence "- 2" here. */
808f4dfe
DM
2194 gcc_assert (get_stack_depth () >= 2);
2195 result_dst_reg = get_lvalue (path_var (lhs, get_stack_depth () - 2),
a96f1c38
DM
2196 ctxt);
2197 }
2198
808f4dfe 2199 pop_frame (result_dst_reg, NULL, ctxt);
757bf1df
DM
2200}
2201
2202/* Update this region_model with a summary of the effect of calling
2203 and returning from CG_SEDGE.
2204
2205 TODO: Currently this is extremely simplistic: we merely set the
2206 return value to "unknown". A proper implementation would e.g. update
2207 sm-state, and presumably be reworked to support multiple outcomes. */
2208
2209void
2210region_model::update_for_call_summary (const callgraph_superedge &cg_sedge,
2211 region_model_context *ctxt)
2212{
2213 /* For now, set any return value to "unknown". */
2214 const gcall *call_stmt = cg_sedge.get_call_stmt ();
2215 tree lhs = gimple_call_lhs (call_stmt);
2216 if (lhs)
808f4dfe 2217 mark_region_as_unknown (get_lvalue (lhs, ctxt));
757bf1df
DM
2218
2219 // TODO: actually implement some kind of summary here
2220}
2221
2222/* Given a true or false edge guarded by conditional statement COND_STMT,
2223 determine appropriate constraints for the edge to be taken.
2224
2225 If they are feasible, add the constraints and return true.
2226
2227 Return false if the constraints contradict existing knowledge
2228 (and so the edge should not be taken). */
2229
2230bool
2231region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
2232 const gcond *cond_stmt,
2233 region_model_context *ctxt)
2234{
2235 ::edge cfg_edge = sedge.get_cfg_edge ();
2236 gcc_assert (cfg_edge != NULL);
2237 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
2238
2239 enum tree_code op = gimple_cond_code (cond_stmt);
2240 tree lhs = gimple_cond_lhs (cond_stmt);
2241 tree rhs = gimple_cond_rhs (cond_stmt);
2242 if (cfg_edge->flags & EDGE_FALSE_VALUE)
2243 op = invert_tree_comparison (op, false /* honor_nans */);
2244 return add_constraint (lhs, op, rhs, ctxt);
2245}
2246
2247/* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
2248 for the edge to be taken.
2249
2250 If they are feasible, add the constraints and return true.
2251
2252 Return false if the constraints contradict existing knowledge
2253 (and so the edge should not be taken). */
2254
2255bool
2256region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
2257 const gswitch *switch_stmt,
2258 region_model_context *ctxt)
2259{
2260 tree index = gimple_switch_index (switch_stmt);
2261 tree case_label = edge.get_case_label ();
2262 gcc_assert (TREE_CODE (case_label) == CASE_LABEL_EXPR);
2263 tree lower_bound = CASE_LOW (case_label);
2264 tree upper_bound = CASE_HIGH (case_label);
2265 if (lower_bound)
2266 {
2267 if (upper_bound)
2268 {
2269 /* Range. */
2270 if (!add_constraint (index, GE_EXPR, lower_bound, ctxt))
2271 return false;
2272 return add_constraint (index, LE_EXPR, upper_bound, ctxt);
2273 }
2274 else
2275 /* Single-value. */
2276 return add_constraint (index, EQ_EXPR, lower_bound, ctxt);
2277 }
2278 else
2279 {
2280 /* The default case.
2281 Add exclusions based on the other cases. */
2282 for (unsigned other_idx = 1;
2283 other_idx < gimple_switch_num_labels (switch_stmt);
2284 other_idx++)
2285 {
2286 tree other_label = gimple_switch_label (switch_stmt,
2287 other_idx);
2288 tree other_lower_bound = CASE_LOW (other_label);
2289 tree other_upper_bound = CASE_HIGH (other_label);
2290 gcc_assert (other_lower_bound);
2291 if (other_upper_bound)
2292 {
2293 /* Exclude this range-valued case.
2294 For now, we just exclude the boundary values.
2295 TODO: exclude the values within the region. */
2296 if (!add_constraint (index, NE_EXPR, other_lower_bound, ctxt))
2297 return false;
2298 if (!add_constraint (index, NE_EXPR, other_upper_bound, ctxt))
2299 return false;
2300 }
2301 else
2302 /* Exclude this single-valued case. */
2303 if (!add_constraint (index, NE_EXPR, other_lower_bound, ctxt))
2304 return false;
2305 }
2306 return true;
2307 }
2308}
2309
808f4dfe
DM
2310/* For use with push_frame when handling a top-level call within the analysis.
2311 PARAM has a defined but unknown initial value.
2312 Anything it points to has escaped, since the calling context "knows"
2313 the pointer, and thus calls to unknown functions could read/write into
2314 the region. */
757bf1df
DM
2315
2316void
808f4dfe 2317region_model::on_top_level_param (tree param,
3a25f345 2318 region_model_context *ctxt)
757bf1df 2319{
808f4dfe 2320 if (POINTER_TYPE_P (TREE_TYPE (param)))
5eae0ac7 2321 {
808f4dfe
DM
2322 const region *param_reg = get_lvalue (param, ctxt);
2323 const svalue *init_ptr_sval
2324 = m_mgr->get_or_create_initial_value (param_reg);
2325 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
2326 m_store.mark_as_escaped (pointee_reg);
5eae0ac7 2327 }
757bf1df
DM
2328}
2329
808f4dfe
DM
2330/* Update this region_model to reflect pushing a frame onto the stack
2331 for a call to FUN.
757bf1df 2332
808f4dfe
DM
2333 If ARG_SVALS is non-NULL, use it to populate the parameters
2334 in the new frame.
2335 Otherwise, the params have their initial_svalues.
757bf1df 2336
808f4dfe 2337 Return the frame_region for the new frame. */
757bf1df 2338
808f4dfe
DM
2339const region *
2340region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
2341 region_model_context *ctxt)
757bf1df 2342{
808f4dfe
DM
2343 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
2344 if (arg_svals)
757bf1df 2345 {
808f4dfe
DM
2346 /* Arguments supplied from a caller frame. */
2347 tree fndecl = fun->decl;
2348 unsigned idx = 0;
2349 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
2350 iter_parm = DECL_CHAIN (iter_parm), ++idx)
757bf1df 2351 {
808f4dfe
DM
2352 /* If there's a mismatching declaration, the call stmt might
2353 not have enough args. Handle this case by leaving the
2354 rest of the params as uninitialized. */
2355 if (idx >= arg_svals->length ())
2356 break;
2357 const svalue *arg_sval = (*arg_svals)[idx];
2358 const region *parm_reg = get_lvalue (iter_parm, ctxt);
2359 set_value (parm_reg, arg_sval, ctxt);
757bf1df 2360
808f4dfe
DM
2361 /* Also do it for default SSA name (sharing the same value). */
2362 tree parm_default_ssa = ssa_default_def (fun, iter_parm);
2363 if (parm_default_ssa)
2364 {
2365 const region *defssa_reg = get_lvalue (parm_default_ssa, ctxt);
2366 set_value (defssa_reg, arg_sval, ctxt);
2367 }
757bf1df 2368 }
757bf1df 2369 }
808f4dfe 2370 else
757bf1df 2371 {
808f4dfe
DM
2372 /* Otherwise we have a top-level call within the analysis. The params
2373 have defined but unknown initial values.
2374 Anything they point to has escaped. */
2375 tree fndecl = fun->decl;
2376 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
2377 iter_parm = DECL_CHAIN (iter_parm))
757bf1df 2378 {
808f4dfe
DM
2379 on_top_level_param (iter_parm, ctxt);
2380 tree parm_default_ssa = ssa_default_def (fun, iter_parm);
2381 if (parm_default_ssa)
2382 on_top_level_param (parm_default_ssa, ctxt);
757bf1df
DM
2383 }
2384 }
757bf1df 2385
808f4dfe 2386 return m_current_frame;
757bf1df
DM
2387}
2388
808f4dfe
DM
2389/* Get the function of the top-most frame in this region_model's stack.
2390 There must be such a frame. */
757bf1df 2391
808f4dfe
DM
2392function *
2393region_model::get_current_function () const
757bf1df 2394{
808f4dfe
DM
2395 const frame_region *frame = get_current_frame ();
2396 gcc_assert (frame);
2397 return frame->get_function ();
757bf1df
DM
2398}
2399
808f4dfe 2400/* Pop the topmost frame_region from this region_model's stack;
757bf1df 2401
808f4dfe
DM
2402 If RESULT_DST_REG is non-null, copy any return value from the frame
2403 into RESULT_DST_REG's region.
2404 If OUT_RESULT is non-null, copy any return value from the frame
2405 into *OUT_RESULT.
757bf1df 2406
808f4dfe
DM
2407 Purge the frame region and all its descendent regions.
2408 Convert any pointers that point into such regions into
2409 POISON_KIND_POPPED_STACK svalues. */
757bf1df 2410
808f4dfe
DM
2411void
2412region_model::pop_frame (const region *result_dst_reg,
2413 const svalue **out_result,
2414 region_model_context *ctxt)
2415{
2416 gcc_assert (m_current_frame);
757bf1df 2417
808f4dfe
DM
2418 /* Evaluate the result, within the callee frame. */
2419 const frame_region *frame_reg = m_current_frame;
2420 tree fndecl = m_current_frame->get_function ()->decl;
2421 tree result = DECL_RESULT (fndecl);
2422 if (result && TREE_TYPE (result) != void_type_node)
2423 {
2424 if (result_dst_reg)
2425 {
2426 /* Copy the result to RESULT_DST_REG. */
2427 copy_region (result_dst_reg,
2428 get_lvalue (result, ctxt),
2429 ctxt);
2430 }
2431 if (out_result)
2432 *out_result = get_rvalue (result, ctxt);
2433 }
757bf1df 2434
808f4dfe
DM
2435 /* Pop the frame. */
2436 m_current_frame = m_current_frame->get_calling_frame ();
757bf1df 2437
808f4dfe 2438 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
757bf1df
DM
2439}
2440
808f4dfe 2441/* Get the number of frames in this region_model's stack. */
757bf1df 2442
808f4dfe
DM
2443int
2444region_model::get_stack_depth () const
757bf1df 2445{
808f4dfe
DM
2446 const frame_region *frame = get_current_frame ();
2447 if (frame)
2448 return frame->get_stack_depth ();
2449 else
2450 return 0;
757bf1df
DM
2451}
2452
808f4dfe
DM
2453/* Get the frame_region with the given index within the stack.
2454 The frame_region must exist. */
757bf1df 2455
808f4dfe
DM
2456const frame_region *
2457region_model::get_frame_at_index (int index) const
757bf1df 2458{
808f4dfe
DM
2459 const frame_region *frame = get_current_frame ();
2460 gcc_assert (frame);
2461 gcc_assert (index >= 0);
2462 gcc_assert (index <= frame->get_index ());
2463 while (index != frame->get_index ())
2464 {
2465 frame = frame->get_calling_frame ();
2466 gcc_assert (frame);
2467 }
2468 return frame;
757bf1df
DM
2469}
2470
808f4dfe
DM
2471/* Unbind svalues for any regions in REG and below.
2472 Find any pointers to such regions; convert them to
2473 poisoned values of kind PKIND. */
757bf1df 2474
808f4dfe
DM
2475void
2476region_model::unbind_region_and_descendents (const region *reg,
2477 enum poison_kind pkind)
757bf1df 2478{
808f4dfe
DM
2479 /* Gather a set of base regions to be unbound. */
2480 hash_set<const region *> base_regs;
2481 for (store::cluster_map_t::iterator iter = m_store.begin ();
2482 iter != m_store.end (); ++iter)
757bf1df 2483 {
808f4dfe
DM
2484 const region *iter_base_reg = (*iter).first;
2485 if (iter_base_reg->descendent_of_p (reg))
2486 base_regs.add (iter_base_reg);
757bf1df 2487 }
808f4dfe
DM
2488 for (hash_set<const region *>::iterator iter = base_regs.begin ();
2489 iter != base_regs.end (); ++iter)
2490 m_store.purge_cluster (*iter);
757bf1df 2491
808f4dfe
DM
2492 /* Find any pointers to REG or its descendents; convert to poisoned. */
2493 poison_any_pointers_to_descendents (reg, pkind);
757bf1df
DM
2494}
2495
808f4dfe
DM
2496/* Implementation of BindingVisitor.
2497 Update the bound svalues for regions below REG to use poisoned
2498 values instead. */
757bf1df 2499
808f4dfe 2500struct bad_pointer_finder
757bf1df 2501{
808f4dfe
DM
2502 bad_pointer_finder (const region *reg, enum poison_kind pkind,
2503 region_model_manager *mgr)
2504 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
2505 {}
757bf1df 2506
808f4dfe
DM
2507 void on_binding (const binding_key *, const svalue *&sval)
2508 {
2509 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
2510 {
2511 const region *ptr_dst = ptr_sval->get_pointee ();
2512 /* Poison ptrs to descendents of REG, but not to REG itself,
2513 otherwise double-free detection doesn't work (since sm-state
2514 for "free" is stored on the original ptr svalue). */
2515 if (ptr_dst->descendent_of_p (m_reg)
2516 && ptr_dst != m_reg)
2517 {
2518 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
2519 sval->get_type ());
2520 ++m_count;
2521 }
2522 }
2523 }
757bf1df 2524
808f4dfe
DM
2525 const region *m_reg;
2526 enum poison_kind m_pkind;
2527 region_model_manager *const m_mgr;
2528 int m_count;
2529};
757bf1df 2530
808f4dfe
DM
2531/* Find any pointers to REG or its descendents; convert them to
2532 poisoned values of kind PKIND.
2533 Return the number of pointers that were poisoned. */
757bf1df 2534
808f4dfe
DM
2535int
2536region_model::poison_any_pointers_to_descendents (const region *reg,
2537 enum poison_kind pkind)
2538{
2539 bad_pointer_finder bv (reg, pkind, m_mgr);
2540 m_store.for_each_binding (bv);
2541 return bv.m_count;
757bf1df
DM
2542}
2543
808f4dfe
DM
2544/* Attempt to merge THIS with OTHER_MODEL, writing the result
2545 to OUT_MODEL. Use POINT to distinguish values created as a
2546 result of merging. */
757bf1df 2547
808f4dfe
DM
2548bool
2549region_model::can_merge_with_p (const region_model &other_model,
2550 const program_point &point,
2551 region_model *out_model) const
757bf1df 2552{
808f4dfe
DM
2553 gcc_assert (out_model);
2554 gcc_assert (m_mgr == other_model.m_mgr);
2555 gcc_assert (m_mgr == out_model->m_mgr);
757bf1df 2556
808f4dfe
DM
2557 if (m_current_frame != other_model.m_current_frame)
2558 return false;
2559 out_model->m_current_frame = m_current_frame;
757bf1df 2560
808f4dfe 2561 model_merger m (this, &other_model, point, out_model);
757bf1df 2562
808f4dfe
DM
2563 if (!store::can_merge_p (&m_store, &other_model.m_store,
2564 &out_model->m_store, m_mgr->get_store_manager (),
2565 &m))
2566 return false;
2567
2568 /* Merge constraints. */
2569 constraint_manager::merge (*m_constraints,
2570 *other_model.m_constraints,
2571 out_model->m_constraints,
2572 m);
757bf1df 2573
808f4dfe 2574 return true;
757bf1df
DM
2575}
2576
2577/* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
2578 otherwise. */
2579
2580tree
2581region_model::get_fndecl_for_call (const gcall *call,
2582 region_model_context *ctxt)
2583{
2584 tree fn_ptr = gimple_call_fn (call);
2585 if (fn_ptr == NULL_TREE)
2586 return NULL_TREE;
808f4dfe
DM
2587 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
2588 if (const region_svalue *fn_ptr_ptr
2589 = fn_ptr_sval->dyn_cast_region_svalue ())
757bf1df 2590 {
808f4dfe
DM
2591 const region *reg = fn_ptr_ptr->get_pointee ();
2592 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
757bf1df 2593 {
808f4dfe 2594 tree fn_decl = fn_reg->get_fndecl ();
0ba70d1b
DM
2595 cgraph_node *node = cgraph_node::get (fn_decl);
2596 if (!node)
2597 return NULL_TREE;
2598 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
91f993b7
DM
2599 if (ultimate_node)
2600 return ultimate_node->decl;
757bf1df
DM
2601 }
2602 }
2603
2604 return NULL_TREE;
2605}
2606
808f4dfe 2607/* Would be much simpler to use a lambda here, if it were supported. */
757bf1df 2608
808f4dfe 2609struct append_ssa_names_cb_data
757bf1df 2610{
808f4dfe
DM
2611 const region_model *model;
2612 auto_vec<const decl_region *> *out;
2613};
757bf1df 2614
808f4dfe
DM
2615/* Populate *OUT with all decl_regions for SSA names in the current
2616 frame that have clusters within the store. */
757bf1df
DM
2617
2618void
808f4dfe
DM
2619region_model::
2620get_ssa_name_regions_for_current_frame (auto_vec<const decl_region *> *out)
2621 const
757bf1df 2622{
808f4dfe
DM
2623 append_ssa_names_cb_data data;
2624 data.model = this;
2625 data.out = out;
2626 m_store.for_each_cluster (append_ssa_names_cb, &data);
757bf1df
DM
2627}
2628
808f4dfe 2629/* Implementation detail of get_ssa_name_regions_for_current_frame. */
757bf1df 2630
808f4dfe
DM
2631void
2632region_model::append_ssa_names_cb (const region *base_reg,
2633 append_ssa_names_cb_data *cb_data)
757bf1df 2634{
808f4dfe
DM
2635 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
2636 return;
2637 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
757bf1df 2638 {
808f4dfe
DM
2639 if (TREE_CODE (decl_reg->get_decl ()) == SSA_NAME)
2640 cb_data->out->safe_push (decl_reg);
757bf1df
DM
2641 }
2642}
2643
808f4dfe 2644/* Return a new region describing a heap-allocated block of memory. */
757bf1df 2645
808f4dfe
DM
2646const region *
2647region_model::create_region_for_heap_alloc (const svalue *size_in_bytes)
757bf1df 2648{
808f4dfe
DM
2649 const region *reg = m_mgr->create_region_for_heap_alloc ();
2650 record_dynamic_extents (reg, size_in_bytes);
2651 return reg;
757bf1df
DM
2652}
2653
808f4dfe
DM
2654/* Return a new region describing a block of memory allocated within the
2655 current frame. */
757bf1df 2656
808f4dfe
DM
2657const region *
2658region_model::create_region_for_alloca (const svalue *size_in_bytes)
757bf1df 2659{
808f4dfe
DM
2660 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
2661 record_dynamic_extents (reg, size_in_bytes);
2662 return reg;
757bf1df
DM
2663}
2664
808f4dfe
DM
2665/* Placeholder hook for recording that the size of REG is SIZE_IN_BYTES.
2666 Currently does nothing. */
757bf1df
DM
2667
2668void
808f4dfe
DM
2669region_model::
2670record_dynamic_extents (const region *reg ATTRIBUTE_UNUSED,
2671 const svalue *size_in_bytes ATTRIBUTE_UNUSED)
757bf1df
DM
2672{
2673}
2674
808f4dfe 2675/* struct model_merger. */
757bf1df 2676
808f4dfe 2677/* Dump a multiline representation of this merger to PP. */
757bf1df
DM
2678
2679void
808f4dfe 2680model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
757bf1df 2681{
808f4dfe
DM
2682 pp_string (pp, "model A:");
2683 pp_newline (pp);
2684 m_model_a->dump_to_pp (pp, simple, true);
2685 pp_newline (pp);
757bf1df 2686
808f4dfe 2687 pp_string (pp, "model B:");
757bf1df 2688 pp_newline (pp);
808f4dfe 2689 m_model_b->dump_to_pp (pp, simple, true);
757bf1df
DM
2690 pp_newline (pp);
2691
808f4dfe 2692 pp_string (pp, "merged model:");
757bf1df 2693 pp_newline (pp);
808f4dfe 2694 m_merged_model->dump_to_pp (pp, simple, true);
757bf1df
DM
2695 pp_newline (pp);
2696}
2697
808f4dfe 2698/* Dump a multiline representation of this merger to FILE. */
757bf1df
DM
2699
2700void
808f4dfe 2701model_merger::dump (FILE *fp, bool simple) const
757bf1df
DM
2702{
2703 pretty_printer pp;
2704 pp_format_decoder (&pp) = default_tree_printer;
2705 pp_show_color (&pp) = pp_show_color (global_dc->printer);
2706 pp.buffer->stream = fp;
808f4dfe 2707 dump_to_pp (&pp, simple);
757bf1df
DM
2708 pp_flush (&pp);
2709}
2710
808f4dfe 2711/* Dump a multiline representation of this merger to stderr. */
757bf1df
DM
2712
2713DEBUG_FUNCTION void
808f4dfe 2714model_merger::dump (bool simple) const
757bf1df 2715{
808f4dfe 2716 dump (stderr, simple);
757bf1df
DM
2717}
2718
75038aa6
DM
2719} // namespace ana
2720
808f4dfe 2721/* Dump RMODEL fully to stderr (i.e. without summarization). */
757bf1df 2722
808f4dfe
DM
2723DEBUG_FUNCTION void
2724debug (const region_model &rmodel)
757bf1df 2725{
808f4dfe 2726 rmodel.dump (false);
757bf1df
DM
2727}
2728
808f4dfe 2729/* class engine. */
757bf1df 2730
808f4dfe 2731/* Dump the managed objects by class to LOGGER, and the per-class totals. */
757bf1df 2732
808f4dfe
DM
2733void
2734engine::log_stats (logger *logger) const
757bf1df 2735{
808f4dfe 2736 m_mgr.log_stats (logger, true);
757bf1df
DM
2737}
2738
75038aa6
DM
2739namespace ana {
2740
757bf1df
DM
2741#if CHECKING_P
2742
2743namespace selftest {
2744
8c08c983
DM
2745/* Build a constant tree of the given type from STR. */
2746
2747static tree
2748build_real_cst_from_string (tree type, const char *str)
2749{
2750 REAL_VALUE_TYPE real;
2751 real_from_string (&real, str);
2752 return build_real (type, real);
2753}
2754
2755/* Append various "interesting" constants to OUT (e.g. NaN). */
2756
2757static void
2758append_interesting_constants (auto_vec<tree> *out)
2759{
2760 out->safe_push (build_int_cst (integer_type_node, 0));
2761 out->safe_push (build_int_cst (integer_type_node, 42));
2762 out->safe_push (build_int_cst (unsigned_type_node, 0));
2763 out->safe_push (build_int_cst (unsigned_type_node, 42));
2764 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
2765 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
2766 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
2767 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
2768 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
2769 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
2770 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
2771 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
2772}
2773
2774/* Verify that tree_cmp is a well-behaved comparator for qsort, even
2775 if the underlying constants aren't comparable. */
2776
2777static void
2778test_tree_cmp_on_constants ()
2779{
2780 auto_vec<tree> csts;
2781 append_interesting_constants (&csts);
2782
2783 /* Try sorting every triple. */
2784 const unsigned num = csts.length ();
2785 for (unsigned i = 0; i < num; i++)
2786 for (unsigned j = 0; j < num; j++)
2787 for (unsigned k = 0; k < num; k++)
2788 {
2789 auto_vec<tree> v (3);
2790 v.quick_push (csts[i]);
2791 v.quick_push (csts[j]);
2792 v.quick_push (csts[k]);
2793 v.qsort (tree_cmp);
2794 }
2795}
2796
757bf1df
DM
2797/* Implementation detail of the ASSERT_CONDITION_* macros. */
2798
808f4dfe
DM
2799void
2800assert_condition (const location &loc,
2801 region_model &model,
2802 const svalue *lhs, tree_code op, const svalue *rhs,
2803 tristate expected)
2804{
2805 tristate actual = model.eval_condition (lhs, op, rhs);
2806 ASSERT_EQ_AT (loc, actual, expected);
2807}
2808
2809/* Implementation detail of the ASSERT_CONDITION_* macros. */
2810
757bf1df
DM
2811void
2812assert_condition (const location &loc,
2813 region_model &model,
2814 tree lhs, tree_code op, tree rhs,
2815 tristate expected)
2816{
2817 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
2818 ASSERT_EQ_AT (loc, actual, expected);
2819}
2820
90f7c300
DM
2821/* Implementation detail of ASSERT_DUMP_TREE_EQ. */
2822
2823static void
2824assert_dump_tree_eq (const location &loc, tree t, const char *expected)
2825{
2826 auto_fix_quotes sentinel;
2827 pretty_printer pp;
2828 pp_format_decoder (&pp) = default_tree_printer;
2829 dump_tree (&pp, t);
2830 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
2831}
2832
2833/* Assert that dump_tree (T) is EXPECTED. */
2834
2835#define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
2836 SELFTEST_BEGIN_STMT \
2837 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
2838 SELFTEST_END_STMT
2839
757bf1df
DM
2840/* Implementation detail of ASSERT_DUMP_EQ. */
2841
2842static void
2843assert_dump_eq (const location &loc,
2844 const region_model &model,
2845 bool summarize,
2846 const char *expected)
2847{
2848 auto_fix_quotes sentinel;
2849 pretty_printer pp;
2850 pp_format_decoder (&pp) = default_tree_printer;
808f4dfe
DM
2851
2852 model.dump_to_pp (&pp, summarize, true);
757bf1df
DM
2853 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
2854}
2855
2856/* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
2857
2858#define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
2859 SELFTEST_BEGIN_STMT \
2860 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
2861 SELFTEST_END_STMT
2862
2863/* Smoketest for region_model::dump_to_pp. */
2864
2865static void
2866test_dump ()
2867{
808f4dfe
DM
2868 region_model_manager mgr;
2869 region_model model (&mgr);
757bf1df
DM
2870
2871 ASSERT_DUMP_EQ (model, false,
808f4dfe
DM
2872 "stack depth: 0\n"
2873 "m_called_unknown_fn: FALSE\n"
2874 "constraint_manager:\n"
2875 " equiv classes:\n"
2876 " constraints:\n");
2877 ASSERT_DUMP_EQ (model, true,
2878 "stack depth: 0\n"
2879 "m_called_unknown_fn: FALSE\n"
2880 "constraint_manager:\n"
757bf1df
DM
2881 " equiv classes:\n"
2882 " constraints:\n");
757bf1df
DM
2883}
2884
884d9141
DM
2885/* Helper function for selftests. Create a struct or union type named NAME,
2886 with the fields given by the FIELD_DECLS in FIELDS.
2887 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
2888 create a UNION_TYPE. */
2889
2890static tree
2891make_test_compound_type (const char *name, bool is_struct,
2892 const auto_vec<tree> *fields)
2893{
2894 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
2895 TYPE_NAME (t) = get_identifier (name);
2896 TYPE_SIZE (t) = 0;
2897
2898 tree fieldlist = NULL;
2899 int i;
2900 tree field;
2901 FOR_EACH_VEC_ELT (*fields, i, field)
2902 {
2903 gcc_assert (TREE_CODE (field) == FIELD_DECL);
2904 DECL_CONTEXT (field) = t;
2905 fieldlist = chainon (field, fieldlist);
2906 }
2907 fieldlist = nreverse (fieldlist);
2908 TYPE_FIELDS (t) = fieldlist;
2909
2910 layout_type (t);
2911 return t;
2912}
2913
a96f1c38
DM
2914/* Selftest fixture for creating the type "struct coord {int x; int y; };". */
2915
2916struct coord_test
2917{
2918 coord_test ()
2919 {
2920 auto_vec<tree> fields;
2921 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2922 get_identifier ("x"), integer_type_node);
2923 fields.safe_push (m_x_field);
2924 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2925 get_identifier ("y"), integer_type_node);
2926 fields.safe_push (m_y_field);
2927 m_coord_type = make_test_compound_type ("coord", true, &fields);
2928 }
2929
2930 tree m_x_field;
2931 tree m_y_field;
2932 tree m_coord_type;
2933};
2934
808f4dfe 2935/* Verify usage of a struct. */
884d9141
DM
2936
2937static void
808f4dfe 2938test_struct ()
884d9141 2939{
a96f1c38
DM
2940 coord_test ct;
2941
2942 tree c = build_global_decl ("c", ct.m_coord_type);
2943 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
2944 c, ct.m_x_field, NULL_TREE);
2945 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
2946 c, ct.m_y_field, NULL_TREE);
884d9141
DM
2947
2948 tree int_17 = build_int_cst (integer_type_node, 17);
2949 tree int_m3 = build_int_cst (integer_type_node, -3);
2950
808f4dfe
DM
2951 region_model_manager mgr;
2952 region_model model (&mgr);
884d9141
DM
2953 model.set_value (c_x, int_17, NULL);
2954 model.set_value (c_y, int_m3, NULL);
2955
808f4dfe
DM
2956 /* Verify get_offset for "c.x". */
2957 {
2958 const region *c_x_reg = model.get_lvalue (c_x, NULL);
2959 region_offset offset = c_x_reg->get_offset ();
2960 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
2961 ASSERT_EQ (offset.get_bit_offset (), 0);
2962 }
2963
2964 /* Verify get_offset for "c.y". */
2965 {
2966 const region *c_y_reg = model.get_lvalue (c_y, NULL);
2967 region_offset offset = c_y_reg->get_offset ();
2968 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
2969 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
2970 }
884d9141
DM
2971}
2972
808f4dfe 2973/* Verify usage of an array element. */
884d9141
DM
2974
2975static void
808f4dfe 2976test_array_1 ()
884d9141
DM
2977{
2978 tree tlen = size_int (10);
2979 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
2980
2981 tree a = build_global_decl ("a", arr_type);
2982
808f4dfe
DM
2983 region_model_manager mgr;
2984 region_model model (&mgr);
884d9141
DM
2985 tree int_0 = build_int_cst (integer_type_node, 0);
2986 tree a_0 = build4 (ARRAY_REF, char_type_node,
2987 a, int_0, NULL_TREE, NULL_TREE);
2988 tree char_A = build_int_cst (char_type_node, 'A');
2989 model.set_value (a_0, char_A, NULL);
884d9141
DM
2990}
2991
90f7c300
DM
2992/* Verify that region_model::get_representative_tree works as expected. */
2993
2994static void
2995test_get_representative_tree ()
2996{
808f4dfe
DM
2997 region_model_manager mgr;
2998
90f7c300
DM
2999 /* STRING_CST. */
3000 {
3001 tree string_cst = build_string (4, "foo");
808f4dfe
DM
3002 region_model m (&mgr);
3003 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
3004 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
3005 ASSERT_EQ (rep, string_cst);
3006 }
3007
3008 /* String literal. */
3009 {
3010 tree string_cst_ptr = build_string_literal (4, "foo");
808f4dfe
DM
3011 region_model m (&mgr);
3012 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
3013 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
3014 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
3015 }
808f4dfe
DM
3016
3017 /* Value of an element within an array. */
3018 {
3019 tree tlen = size_int (10);
3020 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
3021 tree a = build_global_decl ("a", arr_type);
3022 placeholder_svalue test_sval (char_type_node, "test value");
3023
3024 /* Value of a[3]. */
3025 {
3026 test_region_model_context ctxt;
3027 region_model model (&mgr);
3028 tree int_3 = build_int_cst (integer_type_node, 3);
3029 tree a_3 = build4 (ARRAY_REF, char_type_node,
3030 a, int_3, NULL_TREE, NULL_TREE);
3031 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
3032 model.set_value (a_3_reg, &test_sval, &ctxt);
3033 tree rep = model.get_representative_tree (&test_sval);
3034 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
3035 }
3036
3037 /* Value of a[0]. */
3038 {
3039 test_region_model_context ctxt;
3040 region_model model (&mgr);
3041 tree idx = build_int_cst (integer_type_node, 0);
3042 tree a_0 = build4 (ARRAY_REF, char_type_node,
3043 a, idx, NULL_TREE, NULL_TREE);
3044 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
3045 model.set_value (a_0_reg, &test_sval, &ctxt);
3046 tree rep = model.get_representative_tree (&test_sval);
3047 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
3048 }
3049 }
3050
3051 /* Value of a field within a struct. */
3052 {
3053 coord_test ct;
3054
3055 tree c = build_global_decl ("c", ct.m_coord_type);
3056 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
3057 c, ct.m_x_field, NULL_TREE);
3058 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
3059 c, ct.m_y_field, NULL_TREE);
3060
3061 test_region_model_context ctxt;
3062
3063 /* Value of initial field. */
3064 {
3065 region_model m (&mgr);
3066 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
3067 placeholder_svalue test_sval_x (integer_type_node, "test x val");
3068 m.set_value (c_x_reg, &test_sval_x, &ctxt);
3069 tree rep = m.get_representative_tree (&test_sval_x);
3070 ASSERT_DUMP_TREE_EQ (rep, "c.x");
3071 }
3072
3073 /* Value of non-initial field. */
3074 {
3075 region_model m (&mgr);
3076 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
3077 placeholder_svalue test_sval_y (integer_type_node, "test y val");
3078 m.set_value (c_y_reg, &test_sval_y, &ctxt);
3079 tree rep = m.get_representative_tree (&test_sval_y);
3080 ASSERT_DUMP_TREE_EQ (rep, "c.y");
3081 }
3082 }
90f7c300
DM
3083}
3084
757bf1df 3085/* Verify that calling region_model::get_rvalue repeatedly on the same
808f4dfe 3086 tree constant retrieves the same svalue *. */
757bf1df
DM
3087
3088static void
3089test_unique_constants ()
3090{
3091 tree int_0 = build_int_cst (integer_type_node, 0);
3092 tree int_42 = build_int_cst (integer_type_node, 42);
3093
3094 test_region_model_context ctxt;
808f4dfe
DM
3095 region_model_manager mgr;
3096 region_model model (&mgr);
757bf1df
DM
3097 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
3098 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
3099 model.get_rvalue (int_42, &ctxt));
3100 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
3101 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
757bf1df 3102
808f4dfe
DM
3103 /* A "(const int)42" will be a different tree from "(int)42)"... */
3104 tree const_int_type_node
3105 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
3106 tree const_int_42 = build_int_cst (const_int_type_node, 42);
3107 ASSERT_NE (int_42, const_int_42);
3108 /* It should have a different const_svalue. */
3109 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
3110 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
3111 ASSERT_NE (int_42_sval, const_int_42_sval);
3112 /* But they should compare as equal. */
3113 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
3114 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
757bf1df
DM
3115}
3116
808f4dfe
DM
3117/* Verify that each type gets its own singleton unknown_svalue within a
3118 region_model_manager, and that NULL_TREE gets its own singleton. */
757bf1df
DM
3119
3120static void
808f4dfe 3121test_unique_unknowns ()
757bf1df 3122{
808f4dfe
DM
3123 region_model_manager mgr;
3124 const svalue *unknown_int
3125 = mgr.get_or_create_unknown_svalue (integer_type_node);
3126 /* Repeated calls with the same type should get the same "unknown"
3127 svalue. */
3128 const svalue *unknown_int_2
3129 = mgr.get_or_create_unknown_svalue (integer_type_node);
3130 ASSERT_EQ (unknown_int, unknown_int_2);
757bf1df 3131
808f4dfe
DM
3132 /* Different types (or the NULL type) should have different
3133 unknown_svalues. */
3134 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
3135 ASSERT_NE (unknown_NULL_type, unknown_int);
757bf1df 3136
808f4dfe
DM
3137 /* Repeated calls with NULL for the type should get the same "unknown"
3138 svalue. */
3139 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
3140 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
757bf1df
DM
3141}
3142
808f4dfe 3143/* Verify that initial_svalue are handled as expected. */
757bf1df 3144
808f4dfe
DM
3145static void
3146test_initial_svalue_folding ()
757bf1df 3147{
808f4dfe
DM
3148 region_model_manager mgr;
3149 tree x = build_global_decl ("x", integer_type_node);
3150 tree y = build_global_decl ("y", integer_type_node);
757bf1df 3151
808f4dfe
DM
3152 test_region_model_context ctxt;
3153 region_model model (&mgr);
3154 const svalue *x_init = model.get_rvalue (x, &ctxt);
3155 const svalue *y_init = model.get_rvalue (y, &ctxt);
3156 ASSERT_NE (x_init, y_init);
3157 const region *x_reg = model.get_lvalue (x, &ctxt);
3158 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
757bf1df 3159
808f4dfe 3160}
757bf1df 3161
808f4dfe 3162/* Verify that unary ops are folded as expected. */
757bf1df
DM
3163
3164static void
808f4dfe 3165test_unaryop_svalue_folding ()
757bf1df 3166{
808f4dfe 3167 region_model_manager mgr;
757bf1df
DM
3168 tree x = build_global_decl ("x", integer_type_node);
3169 tree y = build_global_decl ("y", integer_type_node);
3170
808f4dfe
DM
3171 test_region_model_context ctxt;
3172 region_model model (&mgr);
3173 const svalue *x_init = model.get_rvalue (x, &ctxt);
3174 const svalue *y_init = model.get_rvalue (y, &ctxt);
3175 const region *x_reg = model.get_lvalue (x, &ctxt);
3176 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
3177
3178 /* "(int)x" -> "x". */
3179 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
3180
3181 /* "(void *)x" -> something other than "x". */
3182 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
3183
3184 /* "!(x == y)" -> "x != y". */
3185 ASSERT_EQ (mgr.get_or_create_unaryop
3186 (boolean_type_node, TRUTH_NOT_EXPR,
3187 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
3188 x_init, y_init)),
3189 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
3190 x_init, y_init));
3191 /* "!(x > y)" -> "x <= y". */
3192 ASSERT_EQ (mgr.get_or_create_unaryop
3193 (boolean_type_node, TRUTH_NOT_EXPR,
3194 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
3195 x_init, y_init)),
3196 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
3197 x_init, y_init));
3198}
3199
3200/* Verify that binops on constant svalues are folded. */
757bf1df 3201
808f4dfe
DM
3202static void
3203test_binop_svalue_folding ()
3204{
3205#define NUM_CSTS 10
3206 tree cst_int[NUM_CSTS];
3207 region_model_manager mgr;
3208 const svalue *cst_sval[NUM_CSTS];
3209 for (int i = 0; i < NUM_CSTS; i++)
3210 {
3211 cst_int[i] = build_int_cst (integer_type_node, i);
3212 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
3213 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
3214 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
3215 }
757bf1df 3216
808f4dfe
DM
3217 for (int i = 0; i < NUM_CSTS; i++)
3218 for (int j = 0; j < NUM_CSTS; j++)
3219 {
3220 if (i != j)
3221 ASSERT_NE (cst_sval[i], cst_sval[j]);
3222 if (i + j < NUM_CSTS)
3223 {
3224 const svalue *sum
3225 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
3226 cst_sval[i], cst_sval[j]);
3227 ASSERT_EQ (sum, cst_sval[i + j]);
3228 }
3229 if (i - j >= 0)
3230 {
3231 const svalue *difference
3232 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
3233 cst_sval[i], cst_sval[j]);
3234 ASSERT_EQ (difference, cst_sval[i - j]);
3235 }
3236 if (i * j < NUM_CSTS)
3237 {
3238 const svalue *product
3239 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
3240 cst_sval[i], cst_sval[j]);
3241 ASSERT_EQ (product, cst_sval[i * j]);
3242 }
3243 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
3244 cst_sval[i], cst_sval[j]);
3245 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
3246 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
3247 cst_sval[i], cst_sval[j]);
3248 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
3249 // etc
3250 }
757bf1df 3251
808f4dfe 3252 tree x = build_global_decl ("x", integer_type_node);
757bf1df 3253
808f4dfe
DM
3254 test_region_model_context ctxt;
3255 region_model model (&mgr);
3256 const svalue *x_init = model.get_rvalue (x, &ctxt);
3257
3258 /* PLUS_EXPR folding. */
3259 const svalue *x_init_plus_zero
3260 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
3261 x_init, cst_sval[0]);
3262 ASSERT_EQ (x_init_plus_zero, x_init);
3263 const svalue *zero_plus_x_init
3264 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
3265 cst_sval[0], x_init);
3266 ASSERT_EQ (zero_plus_x_init, x_init);
3267
3268 /* MULT_EXPR folding. */
3269 const svalue *x_init_times_zero
3270 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
3271 x_init, cst_sval[0]);
3272 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
3273 const svalue *zero_times_x_init
3274 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
3275 cst_sval[0], x_init);
3276 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
3277
3278 const svalue *x_init_times_one
3279 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
3280 x_init, cst_sval[1]);
3281 ASSERT_EQ (x_init_times_one, x_init);
3282 const svalue *one_times_x_init
3283 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
3284 cst_sval[1], x_init);
3285 ASSERT_EQ (one_times_x_init, x_init);
3286
3287 // etc
3288 // TODO: do we want to use the match-and-simplify DSL for this?
3289
3290 /* Verify that binops put any constants on the RHS. */
3291 const svalue *four_times_x_init
3292 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
3293 cst_sval[4], x_init);
3294 const svalue *x_init_times_four
3295 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
3296 x_init, cst_sval[4]);
3297 ASSERT_EQ (four_times_x_init, x_init_times_four);
3298 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
3299 ASSERT_EQ (binop->get_op (), MULT_EXPR);
3300 ASSERT_EQ (binop->get_arg0 (), x_init);
3301 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
3302
3303 /* Verify that ((x + 1) + 1) == (x + 2). */
3304 const svalue *x_init_plus_one
3305 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
3306 x_init, cst_sval[1]);
3307 const svalue *x_init_plus_two
3308 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
3309 x_init, cst_sval[2]);
3310 const svalue *x_init_plus_one_plus_one
3311 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
3312 x_init_plus_one, cst_sval[1]);
3313 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
3314}
3315
3316/* Verify that sub_svalues are folded as expected. */
757bf1df 3317
808f4dfe
DM
3318static void
3319test_sub_svalue_folding ()
3320{
3321 coord_test ct;
3322 tree c = build_global_decl ("c", ct.m_coord_type);
3323 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
3324 c, ct.m_x_field, NULL_TREE);
757bf1df 3325
808f4dfe
DM
3326 region_model_manager mgr;
3327 region_model model (&mgr);
3328 test_region_model_context ctxt;
3329 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
757bf1df 3330
808f4dfe
DM
3331 /* Verify that sub_svalue of "unknown" simply
3332 yields an unknown. */
757bf1df 3333
808f4dfe
DM
3334 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
3335 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
3336 unknown, c_x_reg);
3337 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
3338 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
757bf1df
DM
3339}
3340
808f4dfe 3341/* Test that region::descendent_of_p works as expected. */
757bf1df
DM
3342
3343static void
808f4dfe 3344test_descendent_of_p ()
757bf1df 3345{
808f4dfe
DM
3346 region_model_manager mgr;
3347 const region *stack = mgr.get_stack_region ();
3348 const region *heap = mgr.get_heap_region ();
3349 const region *code = mgr.get_code_region ();
3350 const region *globals = mgr.get_globals_region ();
757bf1df 3351
808f4dfe
DM
3352 /* descendent_of_p should return true when used on the region itself. */
3353 ASSERT_TRUE (stack->descendent_of_p (stack));
3354 ASSERT_FALSE (stack->descendent_of_p (heap));
3355 ASSERT_FALSE (stack->descendent_of_p (code));
3356 ASSERT_FALSE (stack->descendent_of_p (globals));
757bf1df 3357
808f4dfe
DM
3358 tree x = build_global_decl ("x", integer_type_node);
3359 const region *x_reg = mgr.get_region_for_global (x);
3360 ASSERT_TRUE (x_reg->descendent_of_p (globals));
757bf1df 3361
808f4dfe
DM
3362 /* A cast_region should be a descendent of the original region. */
3363 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
3364 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
757bf1df
DM
3365}
3366
3367/* Verify that simple assignments work as expected. */
3368
3369static void
3370test_assignment ()
3371{
3372 tree int_0 = build_int_cst (integer_type_node, 0);
3373 tree x = build_global_decl ("x", integer_type_node);
3374 tree y = build_global_decl ("y", integer_type_node);
3375
3376 /* "x == 0", then use of y, then "y = 0;". */
808f4dfe
DM
3377 region_model_manager mgr;
3378 region_model model (&mgr);
757bf1df
DM
3379 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
3380 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
3381 model.set_value (model.get_lvalue (y, NULL),
3382 model.get_rvalue (int_0, NULL),
3383 NULL);
3384 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
3385 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
757bf1df
DM
3386}
3387
a96f1c38
DM
3388/* Verify that compound assignments work as expected. */
3389
3390static void
3391test_compound_assignment ()
3392{
3393 coord_test ct;
3394
3395 tree c = build_global_decl ("c", ct.m_coord_type);
3396 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
3397 c, ct.m_x_field, NULL_TREE);
3398 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
3399 c, ct.m_y_field, NULL_TREE);
3400 tree d = build_global_decl ("d", ct.m_coord_type);
3401 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
3402 d, ct.m_x_field, NULL_TREE);
3403 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
3404 d, ct.m_y_field, NULL_TREE);
3405
3406 tree int_17 = build_int_cst (integer_type_node, 17);
3407 tree int_m3 = build_int_cst (integer_type_node, -3);
3408
808f4dfe
DM
3409 region_model_manager mgr;
3410 region_model model (&mgr);
a96f1c38
DM
3411 model.set_value (c_x, int_17, NULL);
3412 model.set_value (c_y, int_m3, NULL);
3413
a96f1c38
DM
3414 /* Copy c to d. */
3415 model.copy_region (model.get_lvalue (d, NULL), model.get_lvalue (c, NULL),
3416 NULL);
3417 /* Check that the fields have the same svalues. */
3418 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
3419 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
3420}
3421
757bf1df
DM
3422/* Verify the details of pushing and popping stack frames. */
3423
3424static void
3425test_stack_frames ()
3426{
3427 tree int_42 = build_int_cst (integer_type_node, 42);
3428 tree int_10 = build_int_cst (integer_type_node, 10);
3429 tree int_5 = build_int_cst (integer_type_node, 5);
3430 tree int_0 = build_int_cst (integer_type_node, 0);
3431
3432 auto_vec <tree> param_types;
3433 tree parent_fndecl = make_fndecl (integer_type_node,
3434 "parent_fn",
3435 param_types);
3436 allocate_struct_function (parent_fndecl, true);
3437
3438 tree child_fndecl = make_fndecl (integer_type_node,
3439 "child_fn",
3440 param_types);
3441 allocate_struct_function (child_fndecl, true);
3442
3443 /* "a" and "b" in the parent frame. */
3444 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
3445 get_identifier ("a"),
3446 integer_type_node);
3447 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
3448 get_identifier ("b"),
3449 integer_type_node);
3450 /* "x" and "y" in a child frame. */
3451 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
3452 get_identifier ("x"),
3453 integer_type_node);
3454 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
3455 get_identifier ("y"),
3456 integer_type_node);
3457
3458 /* "p" global. */
3459 tree p = build_global_decl ("p", ptr_type_node);
3460
3461 /* "q" global. */
3462 tree q = build_global_decl ("q", ptr_type_node);
3463
808f4dfe 3464 region_model_manager mgr;
757bf1df 3465 test_region_model_context ctxt;
808f4dfe 3466 region_model model (&mgr);
757bf1df
DM
3467
3468 /* Push stack frame for "parent_fn". */
808f4dfe
DM
3469 const region *parent_frame_reg
3470 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl),
3471 NULL, &ctxt);
3472 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
3473 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
3474 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
3475 model.set_value (a_in_parent_reg,
3476 model.get_rvalue (int_42, &ctxt),
3477 &ctxt);
3478 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
3479
757bf1df
DM
3480 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
3481 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
3482 tristate (tristate::TS_TRUE));
3483
3484 /* Push stack frame for "child_fn". */
808f4dfe 3485 const region *child_frame_reg
757bf1df 3486 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
808f4dfe
DM
3487 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
3488 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
3489 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
3490 model.set_value (x_in_child_reg,
3491 model.get_rvalue (int_0, &ctxt),
3492 &ctxt);
3493 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
3494
757bf1df
DM
3495 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
3496 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
3497 tristate (tristate::TS_TRUE));
3498
3499 /* Point a global pointer at a local in the child frame: p = &x. */
808f4dfe
DM
3500 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
3501 model.set_value (p_in_globals_reg,
3502 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
757bf1df 3503 &ctxt);
808f4dfe 3504 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
757bf1df
DM
3505
3506 /* Point another global pointer at p: q = &p. */
808f4dfe
DM
3507 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
3508 model.set_value (q_in_globals_reg,
3509 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
757bf1df
DM
3510 &ctxt);
3511
808f4dfe
DM
3512 /* Test region::descendent_of_p. */
3513 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
3514 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
3515 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
757bf1df
DM
3516
3517 /* Pop the "child_fn" frame from the stack. */
808f4dfe
DM
3518 model.pop_frame (NULL, NULL, &ctxt);
3519 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
3520 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
757bf1df
DM
3521
3522 /* Verify that p (which was pointing at the local "x" in the popped
3523 frame) has been poisoned. */
808f4dfe 3524 const svalue *new_p_sval = model.get_rvalue (p, &ctxt);
757bf1df
DM
3525 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
3526 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
3527 POISON_KIND_POPPED_STACK);
3528
3529 /* Verify that q still points to p, in spite of the region
3530 renumbering. */
808f4dfe 3531 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
757bf1df
DM
3532 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
3533 ASSERT_EQ (new_q_sval->dyn_cast_region_svalue ()->get_pointee (),
3534 model.get_lvalue (p, &ctxt));
3535
3536 /* Verify that top of stack has been updated. */
808f4dfe 3537 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
757bf1df
DM
3538
3539 /* Verify locals in parent frame. */
3540 /* Verify "a" still has its value. */
808f4dfe 3541 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
757bf1df
DM
3542 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
3543 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
3544 int_42);
3545 /* Verify "b" still has its constraint. */
3546 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
3547 tristate (tristate::TS_TRUE));
3548}
3549
3550/* Verify that get_representative_path_var works as expected, that
808f4dfe 3551 we can map from regions to parms and back within a recursive call
757bf1df
DM
3552 stack. */
3553
3554static void
3555test_get_representative_path_var ()
3556{
3557 auto_vec <tree> param_types;
3558 tree fndecl = make_fndecl (integer_type_node,
3559 "factorial",
3560 param_types);
3561 allocate_struct_function (fndecl, true);
3562
3563 /* Parm "n". */
3564 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
3565 get_identifier ("n"),
3566 integer_type_node);
3567
808f4dfe
DM
3568 region_model_manager mgr;
3569 test_region_model_context ctxt;
3570 region_model model (&mgr);
757bf1df
DM
3571
3572 /* Push 5 stack frames for "factorial", each with a param */
808f4dfe
DM
3573 auto_vec<const region *> parm_regs;
3574 auto_vec<const svalue *> parm_svals;
757bf1df
DM
3575 for (int depth = 0; depth < 5; depth++)
3576 {
808f4dfe
DM
3577 const region *frame_n_reg
3578 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
3579 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
3580 parm_regs.safe_push (parm_n_reg);
757bf1df 3581
808f4dfe
DM
3582 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
3583 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
3584 parm_svals.safe_push (sval_n);
757bf1df
DM
3585 }
3586
3587 /* Verify that we can recognize that the regions are the parms,
3588 at every depth. */
3589 for (int depth = 0; depth < 5; depth++)
3590 {
808f4dfe
DM
3591 {
3592 svalue_set visited;
3593 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
3594 &visited),
3595 path_var (n, depth + 1));
3596 }
757bf1df
DM
3597 /* ...and that we can lookup lvalues for locals for all frames,
3598 not just the top. */
3599 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
808f4dfe 3600 parm_regs[depth]);
757bf1df 3601 /* ...and that we can locate the svalues. */
808f4dfe
DM
3602 {
3603 svalue_set visited;
3604 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
3605 &visited),
3606 path_var (n, depth + 1));
3607 }
757bf1df
DM
3608 }
3609}
3610
808f4dfe 3611/* Ensure that region_model::operator== works as expected. */
757bf1df
DM
3612
3613static void
808f4dfe 3614test_equality_1 ()
757bf1df 3615{
808f4dfe
DM
3616 tree int_42 = build_int_cst (integer_type_node, 42);
3617 tree int_17 = build_int_cst (integer_type_node, 17);
757bf1df 3618
808f4dfe
DM
3619/* Verify that "empty" region_model instances are equal to each other. */
3620 region_model_manager mgr;
3621 region_model model0 (&mgr);
3622 region_model model1 (&mgr);
757bf1df 3623 ASSERT_EQ (model0, model1);
808f4dfe
DM
3624
3625 /* Verify that setting state in model1 makes the models non-equal. */
3626 tree x = build_global_decl ("x", integer_type_node);
3627 model0.set_value (x, int_42, NULL);
3628 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
3629 ASSERT_NE (model0, model1);
3630
3631 /* Verify the copy-ctor. */
3632 region_model model2 (model0);
3633 ASSERT_EQ (model0, model2);
3634 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
3635 ASSERT_NE (model1, model2);
3636
3637 /* Verify that models obtained from copy-ctor are independently editable
3638 w/o affecting the original model. */
3639 model2.set_value (x, int_17, NULL);
3640 ASSERT_NE (model0, model2);
3641 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
3642 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
757bf1df
DM
3643}
3644
3645/* Verify that region models for
3646 x = 42; y = 113;
3647 and
3648 y = 113; x = 42;
808f4dfe 3649 are equal. */
757bf1df
DM
3650
3651static void
3652test_canonicalization_2 ()
3653{
3654 tree int_42 = build_int_cst (integer_type_node, 42);
3655 tree int_113 = build_int_cst (integer_type_node, 113);
3656 tree x = build_global_decl ("x", integer_type_node);
3657 tree y = build_global_decl ("y", integer_type_node);
3658
808f4dfe
DM
3659 region_model_manager mgr;
3660 region_model model0 (&mgr);
757bf1df
DM
3661 model0.set_value (model0.get_lvalue (x, NULL),
3662 model0.get_rvalue (int_42, NULL),
3663 NULL);
3664 model0.set_value (model0.get_lvalue (y, NULL),
3665 model0.get_rvalue (int_113, NULL),
3666 NULL);
3667
808f4dfe 3668 region_model model1 (&mgr);
757bf1df
DM
3669 model1.set_value (model1.get_lvalue (y, NULL),
3670 model1.get_rvalue (int_113, NULL),
3671 NULL);
3672 model1.set_value (model1.get_lvalue (x, NULL),
3673 model1.get_rvalue (int_42, NULL),
3674 NULL);
3675
757bf1df
DM
3676 ASSERT_EQ (model0, model1);
3677}
3678
3679/* Verify that constraints for
3680 x > 3 && y > 42
3681 and
3682 y > 42 && x > 3
3683 are equal after canonicalization. */
3684
3685static void
3686test_canonicalization_3 ()
3687{
3688 tree int_3 = build_int_cst (integer_type_node, 3);
3689 tree int_42 = build_int_cst (integer_type_node, 42);
3690 tree x = build_global_decl ("x", integer_type_node);
3691 tree y = build_global_decl ("y", integer_type_node);
3692
808f4dfe
DM
3693 region_model_manager mgr;
3694 region_model model0 (&mgr);
757bf1df
DM
3695 model0.add_constraint (x, GT_EXPR, int_3, NULL);
3696 model0.add_constraint (y, GT_EXPR, int_42, NULL);
3697
808f4dfe 3698 region_model model1 (&mgr);
757bf1df
DM
3699 model1.add_constraint (y, GT_EXPR, int_42, NULL);
3700 model1.add_constraint (x, GT_EXPR, int_3, NULL);
3701
808f4dfe
DM
3702 model0.canonicalize ();
3703 model1.canonicalize ();
757bf1df
DM
3704 ASSERT_EQ (model0, model1);
3705}
3706
8c08c983
DM
3707/* Verify that we can canonicalize a model containing NaN and other real
3708 constants. */
3709
3710static void
3711test_canonicalization_4 ()
3712{
3713 auto_vec<tree> csts;
3714 append_interesting_constants (&csts);
3715
808f4dfe
DM
3716 region_model_manager mgr;
3717 region_model model (&mgr);
8c08c983
DM
3718
3719 unsigned i;
3720 tree cst;
3721 FOR_EACH_VEC_ELT (csts, i, cst)
3722 model.get_rvalue (cst, NULL);
3723
808f4dfe 3724 model.canonicalize ();
8c08c983
DM
3725}
3726
757bf1df
DM
3727/* Assert that if we have two region_model instances
3728 with values VAL_A and VAL_B for EXPR that they are
3729 mergable. Write the merged model to *OUT_MERGED_MODEL,
3730 and the merged svalue ptr to *OUT_MERGED_SVALUE.
3731 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
3732 for that region_model. */
3733
3734static void
3735assert_region_models_merge (tree expr, tree val_a, tree val_b,
808f4dfe
DM
3736 region_model *out_merged_model,
3737 const svalue **out_merged_svalue)
757bf1df 3738{
808f4dfe 3739 program_point point (program_point::origin ());
757bf1df 3740 test_region_model_context ctxt;
808f4dfe
DM
3741 region_model_manager *mgr = out_merged_model->get_manager ();
3742 region_model model0 (mgr);
3743 region_model model1 (mgr);
757bf1df
DM
3744 if (val_a)
3745 model0.set_value (model0.get_lvalue (expr, &ctxt),
3746 model0.get_rvalue (val_a, &ctxt),
3747 &ctxt);
3748 if (val_b)
3749 model1.set_value (model1.get_lvalue (expr, &ctxt),
3750 model1.get_rvalue (val_b, &ctxt),
3751 &ctxt);
3752
3753 /* They should be mergeable. */
808f4dfe
DM
3754 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
3755 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
757bf1df
DM
3756}
3757
3758/* Verify that we can merge region_model instances. */
3759
3760static void
3761test_state_merging ()
3762{
3763 tree int_42 = build_int_cst (integer_type_node, 42);
3764 tree int_113 = build_int_cst (integer_type_node, 113);
3765 tree x = build_global_decl ("x", integer_type_node);
3766 tree y = build_global_decl ("y", integer_type_node);
3767 tree z = build_global_decl ("z", integer_type_node);
3768 tree p = build_global_decl ("p", ptr_type_node);
3769
3770 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
3771 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
3772
3773 auto_vec <tree> param_types;
3774 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
3775 allocate_struct_function (test_fndecl, true);
3776
3777 /* Param "a". */
3778 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
3779 get_identifier ("a"),
3780 integer_type_node);
3781 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
3782
455f58ec
DM
3783 /* Param "q", a pointer. */
3784 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
3785 get_identifier ("q"),
3786 ptr_type_node);
3787
808f4dfe
DM
3788 program_point point (program_point::origin ());
3789 region_model_manager mgr;
3790
757bf1df 3791 {
808f4dfe
DM
3792 region_model model0 (&mgr);
3793 region_model model1 (&mgr);
3794 region_model merged (&mgr);
757bf1df 3795 /* Verify empty models can be merged. */
808f4dfe 3796 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
3797 ASSERT_EQ (model0, merged);
3798 }
3799
3800 /* Verify that we can merge two contradictory constraints on the
3801 value for a global. */
3802 /* TODO: verify that the merged model doesn't have a value for
3803 the global */
3804 {
808f4dfe
DM
3805 region_model model0 (&mgr);
3806 region_model model1 (&mgr);
3807 region_model merged (&mgr);
757bf1df
DM
3808 test_region_model_context ctxt;
3809 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
3810 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe 3811 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
3812 ASSERT_NE (model0, merged);
3813 ASSERT_NE (model1, merged);
3814 }
3815
3816 /* Verify handling of a PARM_DECL. */
3817 {
3818 test_region_model_context ctxt;
808f4dfe
DM
3819 region_model model0 (&mgr);
3820 region_model model1 (&mgr);
757bf1df
DM
3821 ASSERT_EQ (model0.get_stack_depth (), 0);
3822 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
3823 ASSERT_EQ (model0.get_stack_depth (), 1);
757bf1df
DM
3824 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
3825
808f4dfe
DM
3826 placeholder_svalue test_sval (integer_type_node, "test sval");
3827 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
3828 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
757bf1df
DM
3829 ASSERT_EQ (model0, model1);
3830
757bf1df 3831 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
3832 region_model merged (&mgr);
3833 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 3834 ASSERT_EQ (model0, merged);
808f4dfe
DM
3835 /* In particular, "a" should have the placeholder value. */
3836 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
757bf1df
DM
3837 }
3838
3839 /* Verify handling of a global. */
3840 {
3841 test_region_model_context ctxt;
808f4dfe
DM
3842 region_model model0 (&mgr);
3843 region_model model1 (&mgr);
757bf1df 3844
808f4dfe
DM
3845 placeholder_svalue test_sval (integer_type_node, "test sval");
3846 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
3847 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
3848 ASSERT_EQ (model0, model1);
757bf1df
DM
3849
3850 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
3851 region_model merged (&mgr);
3852 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 3853 ASSERT_EQ (model0, merged);
808f4dfe
DM
3854 /* In particular, "x" should have the placeholder value. */
3855 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
757bf1df
DM
3856 }
3857
3858 /* Use global-handling to verify various combinations of values. */
3859
3860 /* Two equal constant values. */
3861 {
808f4dfe
DM
3862 region_model merged (&mgr);
3863 const svalue *merged_x_sval;
757bf1df
DM
3864 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
3865
3866 /* In particular, there should be a constant value for "x". */
3867 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
3868 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
3869 int_42);
3870 }
3871
3872 /* Two non-equal constant values. */
3873 {
808f4dfe
DM
3874 region_model merged (&mgr);
3875 const svalue *merged_x_sval;
757bf1df
DM
3876 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
3877
808f4dfe
DM
3878 /* In particular, there should be a "widening" value for "x". */
3879 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
757bf1df
DM
3880 }
3881
808f4dfe 3882 /* Initial and constant. */
757bf1df 3883 {
808f4dfe
DM
3884 region_model merged (&mgr);
3885 const svalue *merged_x_sval;
757bf1df
DM
3886 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
3887
3888 /* In particular, there should be an unknown value for "x". */
3889 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
3890 }
3891
808f4dfe 3892 /* Constant and initial. */
757bf1df 3893 {
808f4dfe
DM
3894 region_model merged (&mgr);
3895 const svalue *merged_x_sval;
757bf1df
DM
3896 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
3897
3898 /* In particular, there should be an unknown value for "x". */
3899 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
3900 }
3901
3902 /* Unknown and constant. */
3903 // TODO
3904
3905 /* Pointers: NULL and NULL. */
3906 // TODO
3907
3908 /* Pointers: NULL and non-NULL. */
3909 // TODO
3910
3911 /* Pointers: non-NULL and non-NULL: ptr to a local. */
3912 {
808f4dfe 3913 region_model model0 (&mgr);
757bf1df 3914 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
757bf1df
DM
3915 model0.set_value (model0.get_lvalue (p, NULL),
3916 model0.get_rvalue (addr_of_a, NULL), NULL);
3917
3918 region_model model1 (model0);
3919 ASSERT_EQ (model0, model1);
3920
3921 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
3922 region_model merged (&mgr);
3923 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
3924 ASSERT_EQ (model0, merged);
3925 }
3926
3927 /* Pointers: non-NULL and non-NULL: ptr to a global. */
3928 {
808f4dfe 3929 region_model merged (&mgr);
757bf1df 3930 /* p == &y in both input models. */
808f4dfe 3931 const svalue *merged_p_sval;
757bf1df
DM
3932 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
3933 &merged_p_sval);
3934
3935 /* We should get p == &y in the merged model. */
3936 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
808f4dfe
DM
3937 const region_svalue *merged_p_ptr
3938 = merged_p_sval->dyn_cast_region_svalue ();
3939 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
3940 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
757bf1df
DM
3941 }
3942
3943 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
3944 {
808f4dfe
DM
3945 region_model merged (&mgr);
3946 /* x == &y vs x == &z in the input models; these are actually casts
3947 of the ptrs to "int". */
3948 const svalue *merged_x_sval;
3949 // TODO:
757bf1df
DM
3950 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
3951 &merged_x_sval);
3952
3953 /* We should get x == unknown in the merged model. */
3954 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
3955 }
3956
3957 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
3958 {
3959 test_region_model_context ctxt;
808f4dfe
DM
3960 region_model model0 (&mgr);
3961 tree size = build_int_cst (integer_type_node, 1024);
3962 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
3963 const region *new_reg = model0.create_region_for_heap_alloc (size_sval);
3964 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
757bf1df 3965 model0.set_value (model0.get_lvalue (p, &ctxt),
808f4dfe 3966 ptr_sval, &ctxt);
757bf1df
DM
3967
3968 region_model model1 (model0);
3969
3970 ASSERT_EQ (model0, model1);
3971
808f4dfe
DM
3972 region_model merged (&mgr);
3973 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 3974
808f4dfe 3975 /* The merged model ought to be identical. */
757bf1df
DM
3976 ASSERT_EQ (model0, merged);
3977 }
3978
808f4dfe
DM
3979 /* Two regions sharing the same placeholder svalue should continue sharing
3980 it after self-merger. */
757bf1df
DM
3981 {
3982 test_region_model_context ctxt;
808f4dfe
DM
3983 region_model model0 (&mgr);
3984 placeholder_svalue placeholder_sval (integer_type_node, "test");
3985 model0.set_value (model0.get_lvalue (x, &ctxt),
3986 &placeholder_sval, &ctxt);
3987 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
757bf1df
DM
3988 region_model model1 (model0);
3989
3990 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
3991 region_model merged (&mgr);
3992 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
3993 ASSERT_EQ (model0, merged);
3994
3995 /* In particular, we should have x == y. */
3996 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
3997 tristate (tristate::TS_TRUE));
3998 }
3999
757bf1df 4000 {
808f4dfe
DM
4001 region_model model0 (&mgr);
4002 region_model model1 (&mgr);
757bf1df
DM
4003 test_region_model_context ctxt;
4004 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
4005 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
808f4dfe
DM
4006 region_model merged (&mgr);
4007 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
4008 }
4009
4010 {
808f4dfe
DM
4011 region_model model0 (&mgr);
4012 region_model model1 (&mgr);
757bf1df
DM
4013 test_region_model_context ctxt;
4014 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
4015 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
4016 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe
DM
4017 region_model merged (&mgr);
4018 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 4019 }
757bf1df
DM
4020
4021 // TODO: what can't we merge? need at least one such test
4022
4023 /* TODO: various things
4024 - heap regions
4025 - value merging:
4026 - every combination, but in particular
808f4dfe 4027 - pairs of regions
757bf1df
DM
4028 */
4029
4030 /* Views. */
4031 {
4032 test_region_model_context ctxt;
808f4dfe 4033 region_model model0 (&mgr);
757bf1df 4034
808f4dfe
DM
4035 const region *x_reg = model0.get_lvalue (x, &ctxt);
4036 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
757bf1df
DM
4037 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
4038
4039 region_model model1 (model0);
4040 ASSERT_EQ (model1, model0);
4041
4042 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
4043 region_model merged (&mgr);
4044 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 4045 }
455f58ec
DM
4046
4047 /* Verify that we can merge a model in which a local in an older stack
4048 frame points to a local in a more recent stack frame. */
4049 {
808f4dfe 4050 region_model model0 (&mgr);
455f58ec 4051 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
808f4dfe 4052 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
455f58ec
DM
4053
4054 /* Push a second frame. */
808f4dfe 4055 const region *reg_2nd_frame
455f58ec
DM
4056 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
4057
4058 /* Have a pointer in the older frame point to a local in the
4059 more recent frame. */
808f4dfe
DM
4060 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
4061 model0.set_value (q_in_first_frame, sval_ptr, NULL);
455f58ec
DM
4062
4063 /* Verify that it's pointing at the newer frame. */
808f4dfe
DM
4064 const region *reg_pointee
4065 = sval_ptr->dyn_cast_region_svalue ()->get_pointee ();
4066 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
455f58ec 4067
808f4dfe 4068 model0.canonicalize ();
455f58ec
DM
4069
4070 region_model model1 (model0);
4071 ASSERT_EQ (model0, model1);
4072
4073 /* They should be mergeable, and the result should be the same
4074 (after canonicalization, at least). */
808f4dfe
DM
4075 region_model merged (&mgr);
4076 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
4077 merged.canonicalize ();
455f58ec
DM
4078 ASSERT_EQ (model0, merged);
4079 }
4080
4081 /* Verify that we can merge a model in which a local points to a global. */
4082 {
808f4dfe 4083 region_model model0 (&mgr);
455f58ec
DM
4084 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
4085 model0.set_value (model0.get_lvalue (q, NULL),
4086 model0.get_rvalue (addr_of_y, NULL), NULL);
4087
455f58ec
DM
4088 region_model model1 (model0);
4089 ASSERT_EQ (model0, model1);
4090
4091 /* They should be mergeable, and the result should be the same
4092 (after canonicalization, at least). */
808f4dfe
DM
4093 region_model merged (&mgr);
4094 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
455f58ec
DM
4095 ASSERT_EQ (model0, merged);
4096 }
757bf1df
DM
4097}
4098
4099/* Verify that constraints are correctly merged when merging region_model
4100 instances. */
4101
4102static void
4103test_constraint_merging ()
4104{
4105 tree int_0 = build_int_cst (integer_type_node, 0);
4106 tree int_5 = build_int_cst (integer_type_node, 5);
4107 tree x = build_global_decl ("x", integer_type_node);
4108 tree y = build_global_decl ("y", integer_type_node);
4109 tree z = build_global_decl ("z", integer_type_node);
4110 tree n = build_global_decl ("n", integer_type_node);
4111
808f4dfe 4112 region_model_manager mgr;
757bf1df
DM
4113 test_region_model_context ctxt;
4114
4115 /* model0: 0 <= (x == y) < n. */
808f4dfe 4116 region_model model0 (&mgr);
757bf1df
DM
4117 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
4118 model0.add_constraint (x, GE_EXPR, int_0, NULL);
4119 model0.add_constraint (x, LT_EXPR, n, NULL);
4120
4121 /* model1: z != 5 && (0 <= x < n). */
808f4dfe 4122 region_model model1 (&mgr);
757bf1df
DM
4123 model1.add_constraint (z, NE_EXPR, int_5, NULL);
4124 model1.add_constraint (x, GE_EXPR, int_0, NULL);
4125 model1.add_constraint (x, LT_EXPR, n, NULL);
4126
4127 /* They should be mergeable; the merged constraints should
4128 be: (0 <= x < n). */
808f4dfe
DM
4129 program_point point (program_point::origin ());
4130 region_model merged (&mgr);
4131 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
4132
4133 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
4134 tristate (tristate::TS_TRUE));
4135 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
4136 tristate (tristate::TS_TRUE));
4137
4138 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
4139 tristate (tristate::TS_UNKNOWN));
4140 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
4141 tristate (tristate::TS_UNKNOWN));
4142}
4143
808f4dfe
DM
4144/* Verify that widening_svalue::eval_condition_without_cm works as
4145 expected. */
4146
4147static void
4148test_widening_constraints ()
4149{
4150 program_point point (program_point::origin ());
4151 tree int_0 = build_int_cst (integer_type_node, 0);
4152 tree int_m1 = build_int_cst (integer_type_node, -1);
4153 tree int_1 = build_int_cst (integer_type_node, 1);
4154 tree int_256 = build_int_cst (integer_type_node, 256);
4155 region_model_manager mgr;
4156 test_region_model_context ctxt;
4157 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
4158 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
4159 const svalue *w_zero_then_one_sval
4160 = mgr.get_or_create_widening_svalue (integer_type_node, point,
4161 int_0_sval, int_1_sval);
4162 const widening_svalue *w_zero_then_one
4163 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
4164 ASSERT_EQ (w_zero_then_one->get_direction (),
4165 widening_svalue::DIR_ASCENDING);
4166 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
4167 tristate::TS_FALSE);
4168 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
4169 tristate::TS_FALSE);
4170 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
4171 tristate::TS_UNKNOWN);
4172 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
4173 tristate::TS_UNKNOWN);
4174
4175 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
4176 tristate::TS_FALSE);
4177 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
4178 tristate::TS_UNKNOWN);
4179 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
4180 tristate::TS_UNKNOWN);
4181 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
4182 tristate::TS_UNKNOWN);
4183
4184 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
4185 tristate::TS_TRUE);
4186 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
4187 tristate::TS_UNKNOWN);
4188 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
4189 tristate::TS_UNKNOWN);
4190 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
4191 tristate::TS_UNKNOWN);
4192
4193 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
4194 tristate::TS_TRUE);
4195 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
4196 tristate::TS_TRUE);
4197 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
4198 tristate::TS_UNKNOWN);
4199 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
4200 tristate::TS_UNKNOWN);
4201
4202 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
4203 tristate::TS_FALSE);
4204 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
4205 tristate::TS_UNKNOWN);
4206 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
4207 tristate::TS_UNKNOWN);
4208 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
4209 tristate::TS_UNKNOWN);
4210
4211 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
4212 tristate::TS_TRUE);
4213 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
4214 tristate::TS_UNKNOWN);
4215 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
4216 tristate::TS_UNKNOWN);
4217 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
4218 tristate::TS_UNKNOWN);
4219}
4220
4221/* Verify merging constraints for states simulating successive iterations
4222 of a loop.
4223 Simulate:
4224 for (i = 0; i < 256; i++)
4225 [...body...]
4226 i.e. this gimple:.
4227 i_15 = 0;
4228 goto <bb 4>;
4229
4230 <bb 4> :
4231 i_11 = PHI <i_15(2), i_23(3)>
4232 if (i_11 <= 255)
4233 goto <bb 3>;
4234 else
4235 goto [AFTER LOOP]
4236
4237 <bb 3> :
4238 [LOOP BODY]
4239 i_23 = i_11 + 1;
4240
4241 and thus these ops (and resultant states):
4242 i_11 = PHI()
4243 {i_11: 0}
4244 add_constraint (i_11 <= 255) [for the true edge]
4245 {i_11: 0} [constraint was a no-op]
4246 i_23 = i_11 + 1;
4247 {i_22: 1}
4248 i_11 = PHI()
4249 {i_11: WIDENED (at phi, 0, 1)}
4250 add_constraint (i_11 <= 255) [for the true edge]
4251 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
4252 i_23 = i_11 + 1;
4253 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
4254 i_11 = PHI(); merge with state at phi above
4255 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
4256 [changing meaning of "WIDENED" here]
4257 if (i_11 <= 255)
4258 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
4259 F: {i_11: 256}
4260 */
4261
4262static void
4263test_iteration_1 ()
4264{
4265 program_point point (program_point::origin ());
4266
4267 tree int_0 = build_int_cst (integer_type_node, 0);
4268 tree int_1 = build_int_cst (integer_type_node, 1);
4269 tree int_256 = build_int_cst (integer_type_node, 256);
4270 tree int_257 = build_int_cst (integer_type_node, 257);
4271 tree i = build_global_decl ("i", integer_type_node);
4272
4273 region_model_manager mgr;
4274 test_region_model_context ctxt;
4275
4276 /* model0: i: 0. */
4277 region_model model0 (&mgr);
4278 model0.set_value (i, int_0, &ctxt);
4279
4280 /* model1: i: 1. */
4281 region_model model1 (&mgr);
4282 model1.set_value (i, int_1, &ctxt);
4283
4284 /* Should merge "i" to a widened value. */
4285 region_model model2 (&mgr);
4286 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
4287 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
4288 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
4289 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
4290 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
4291
4292 /* Add constraint: i < 256 */
4293 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
4294 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
4295 tristate (tristate::TS_TRUE));
4296 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
4297 tristate (tristate::TS_TRUE));
4298
4299 /* Try merging with the initial state. */
4300 region_model model3 (&mgr);
4301 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
4302 /* Merging the merged value with the initial value should be idempotent,
4303 so that the analysis converges. */
4304 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
4305 /* Merger of 0 and a widening value with constraint < CST
4306 should retain the constraint, even though it was implicit
4307 for the 0 case. */
4308 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
4309 tristate (tristate::TS_TRUE));
4310 /* ...and we should have equality: the analysis should have converged. */
4311 ASSERT_EQ (model3, model2);
4312
4313 /* "i_23 = i_11 + 1;" */
4314 region_model model4 (model3);
4315 ASSERT_EQ (model4, model2);
4316 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
4317 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
4318 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
4319
4320 /* Try merging with the "i: 1" state. */
4321 region_model model5 (&mgr);
4322 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
4323 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
4324 ASSERT_EQ (model5, model4);
4325
4326 /* "i_11 = PHI();" merge with state at phi above.
4327 For i, we should have a merger of WIDENING with WIDENING + 1,
4328 and this should be WIDENING again. */
4329 region_model model6 (&mgr);
4330 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
4331 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
4332 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
4333
4334 ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257);
4335}
4336
6969ac30
DM
4337/* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
4338 all cast pointers to that region are also known to be non-NULL. */
4339
4340static void
4341test_malloc_constraints ()
4342{
808f4dfe
DM
4343 region_model_manager mgr;
4344 region_model model (&mgr);
6969ac30
DM
4345 tree p = build_global_decl ("p", ptr_type_node);
4346 tree char_star = build_pointer_type (char_type_node);
4347 tree q = build_global_decl ("q", char_star);
4348 tree null_ptr = build_int_cst (ptr_type_node, 0);
4349
808f4dfe
DM
4350 const svalue *size_in_bytes
4351 = mgr.get_or_create_unknown_svalue (integer_type_node);
4352 const region *reg = model.create_region_for_heap_alloc (size_in_bytes);
4353 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
4354 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
6969ac30
DM
4355 model.set_value (q, p, NULL);
4356
6969ac30
DM
4357 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
4358 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
4359 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
4360 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
4361
4362 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
4363
6969ac30
DM
4364 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
4365 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
4366 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
4367 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
4368}
4369
808f4dfe
DM
4370/* Smoketest of getting and setting the value of a variable. */
4371
4372static void
4373test_var ()
4374{
4375 /* "int i;" */
4376 tree i = build_global_decl ("i", integer_type_node);
4377
4378 tree int_17 = build_int_cst (integer_type_node, 17);
4379 tree int_m3 = build_int_cst (integer_type_node, -3);
4380
4381 region_model_manager mgr;
4382 region_model model (&mgr);
4383
4384 const region *i_reg = model.get_lvalue (i, NULL);
4385 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
4386
4387 /* Reading "i" should give a symbolic "initial value". */
4388 const svalue *sval_init = model.get_rvalue (i, NULL);
4389 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
4390 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
4391 /* ..and doing it again should give the same "initial value". */
4392 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
4393
4394 /* "i = 17;". */
4395 model.set_value (i, int_17, NULL);
4396 ASSERT_EQ (model.get_rvalue (i, NULL),
4397 model.get_rvalue (int_17, NULL));
4398
4399 /* "i = -3;". */
4400 model.set_value (i, int_m3, NULL);
4401 ASSERT_EQ (model.get_rvalue (i, NULL),
4402 model.get_rvalue (int_m3, NULL));
4403
4404 /* Verify get_offset for "i". */
4405 {
4406 region_offset offset = i_reg->get_offset ();
4407 ASSERT_EQ (offset.get_base_region (), i_reg);
4408 ASSERT_EQ (offset.get_bit_offset (), 0);
4409 }
4410}
4411
4412static void
4413test_array_2 ()
4414{
4415 /* "int arr[10];" */
4416 tree tlen = size_int (10);
4417 tree arr_type
4418 = build_array_type (integer_type_node, build_index_type (tlen));
4419 tree arr = build_global_decl ("arr", arr_type);
4420
4421 /* "int i;" */
4422 tree i = build_global_decl ("i", integer_type_node);
4423
4424 tree int_0 = build_int_cst (integer_type_node, 0);
4425 tree int_1 = build_int_cst (integer_type_node, 1);
4426
4427 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
4428 arr, int_0, NULL_TREE, NULL_TREE);
4429 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
4430 arr, int_1, NULL_TREE, NULL_TREE);
4431 tree arr_i = build4 (ARRAY_REF, integer_type_node,
4432 arr, i, NULL_TREE, NULL_TREE);
4433
4434 tree int_17 = build_int_cst (integer_type_node, 17);
4435 tree int_42 = build_int_cst (integer_type_node, 42);
4436 tree int_m3 = build_int_cst (integer_type_node, -3);
4437
4438 region_model_manager mgr;
4439 region_model model (&mgr);
4440 /* "arr[0] = 17;". */
4441 model.set_value (arr_0, int_17, NULL);
4442 /* "arr[1] = -3;". */
4443 model.set_value (arr_1, int_m3, NULL);
4444
4445 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
4446 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
4447
4448 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
4449 model.set_value (arr_1, int_42, NULL);
4450 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
4451
4452 /* Verify get_offset for "arr[0]". */
4453 {
4454 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
4455 region_offset offset = arr_0_reg->get_offset ();
4456 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
4457 ASSERT_EQ (offset.get_bit_offset (), 0);
4458 }
4459
4460 /* Verify get_offset for "arr[1]". */
4461 {
4462 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
4463 region_offset offset = arr_1_reg->get_offset ();
4464 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
4465 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
4466 }
4467
4468 /* "arr[i] = i;" - this should remove the earlier bindings. */
4469 model.set_value (arr_i, i, NULL);
4470 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
4471 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
4472
4473 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
4474 model.set_value (arr_0, int_17, NULL);
4475 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
4476 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
4477}
4478
4479/* Smoketest of dereferencing a pointer via MEM_REF. */
4480
4481static void
4482test_mem_ref ()
4483{
4484 /*
4485 x = 17;
4486 p = &x;
4487 *p;
4488 */
4489 tree x = build_global_decl ("x", integer_type_node);
4490 tree int_star = build_pointer_type (integer_type_node);
4491 tree p = build_global_decl ("p", int_star);
4492
4493 tree int_17 = build_int_cst (integer_type_node, 17);
4494 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
4495 tree offset_0 = build_int_cst (integer_type_node, 0);
4496 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
4497
4498 region_model_manager mgr;
4499 region_model model (&mgr);
4500
4501 /* "x = 17;". */
4502 model.set_value (x, int_17, NULL);
4503
4504 /* "p = &x;". */
4505 model.set_value (p, addr_of_x, NULL);
4506
4507 const svalue *sval = model.get_rvalue (star_p, NULL);
4508 ASSERT_EQ (sval->maybe_get_constant (), int_17);
4509}
4510
4511/* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
4512 Analogous to this code:
4513 void test_6 (int a[10])
4514 {
4515 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
4516 a[3] = 42;
4517 __analyzer_eval (a[3] == 42); [should be TRUE]
4518 }
4519 from data-model-1.c, which looks like this at the gimple level:
4520 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
4521 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
4522 int _2 = *_1; # MEM_REF
4523 _Bool _3 = _2 == 42;
4524 int _4 = (int) _3;
4525 __analyzer_eval (_4);
4526
4527 # a[3] = 42;
4528 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
4529 *_5 = 42; # MEM_REF
4530
4531 # __analyzer_eval (a[3] == 42); [should be TRUE]
4532 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
4533 int _7 = *_6; # MEM_REF
4534 _Bool _8 = _7 == 42;
4535 int _9 = (int) _8;
4536 __analyzer_eval (_9); */
4537
4538static void
4539test_POINTER_PLUS_EXPR_then_MEM_REF ()
4540{
4541 tree int_star = build_pointer_type (integer_type_node);
4542 tree a = build_global_decl ("a", int_star);
4543 tree offset_12 = build_int_cst (size_type_node, 12);
4544 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
4545 tree offset_0 = build_int_cst (integer_type_node, 0);
4546 tree mem_ref = build2 (MEM_REF, integer_type_node,
4547 pointer_plus_expr, offset_0);
4548 region_model_manager mgr;
4549 region_model m (&mgr);
4550
4551 tree int_42 = build_int_cst (integer_type_node, 42);
4552 m.set_value (mem_ref, int_42, NULL);
4553 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
4554}
4555
4556/* Verify that malloc works. */
4557
4558static void
4559test_malloc ()
4560{
4561 tree int_star = build_pointer_type (integer_type_node);
4562 tree p = build_global_decl ("p", int_star);
4563 tree n = build_global_decl ("n", integer_type_node);
4564 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
4565 n, build_int_cst (size_type_node, 4));
4566
4567 region_model_manager mgr;
4568 test_region_model_context ctxt;
4569 region_model model (&mgr);
4570
4571 /* "p = malloc (n * 4);". */
4572 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
4573 const region *reg = model.create_region_for_heap_alloc (size_sval);
4574 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
4575 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
4576 // TODO: verify dynamic extents
4577}
4578
4579/* Verify that alloca works. */
4580
4581static void
4582test_alloca ()
4583{
4584 auto_vec <tree> param_types;
4585 tree fndecl = make_fndecl (integer_type_node,
4586 "test_fn",
4587 param_types);
4588 allocate_struct_function (fndecl, true);
4589
4590
4591 tree int_star = build_pointer_type (integer_type_node);
4592 tree p = build_global_decl ("p", int_star);
4593 tree n = build_global_decl ("n", integer_type_node);
4594 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
4595 n, build_int_cst (size_type_node, 4));
4596
4597 region_model_manager mgr;
4598 test_region_model_context ctxt;
4599 region_model model (&mgr);
4600
4601 /* Push stack frame. */
4602 const region *frame_reg
4603 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl),
4604 NULL, &ctxt);
4605 /* "p = alloca (n * 4);". */
4606 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
4607 const region *reg = model.create_region_for_alloca (size_sval);
4608 ASSERT_EQ (reg->get_parent_region (), frame_reg);
4609 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
4610 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
4611 // TODO: verify dynamic extents
4612
4613 /* Verify that the pointers to the alloca region are replaced by
4614 poisoned values when the frame is popped. */
4615 model.pop_frame (NULL, NULL, &ctxt);
4616 ASSERT_EQ (model.get_rvalue (p, &ctxt)->get_kind (), SK_POISONED);
4617}
4618
757bf1df
DM
4619/* Run all of the selftests within this file. */
4620
4621void
4622analyzer_region_model_cc_tests ()
4623{
8c08c983 4624 test_tree_cmp_on_constants ();
757bf1df 4625 test_dump ();
808f4dfe
DM
4626 test_struct ();
4627 test_array_1 ();
90f7c300 4628 test_get_representative_tree ();
757bf1df 4629 test_unique_constants ();
808f4dfe
DM
4630 test_unique_unknowns ();
4631 test_initial_svalue_folding ();
4632 test_unaryop_svalue_folding ();
4633 test_binop_svalue_folding ();
4634 test_sub_svalue_folding ();
4635 test_descendent_of_p ();
757bf1df 4636 test_assignment ();
a96f1c38 4637 test_compound_assignment ();
757bf1df
DM
4638 test_stack_frames ();
4639 test_get_representative_path_var ();
808f4dfe 4640 test_equality_1 ();
757bf1df
DM
4641 test_canonicalization_2 ();
4642 test_canonicalization_3 ();
8c08c983 4643 test_canonicalization_4 ();
757bf1df
DM
4644 test_state_merging ();
4645 test_constraint_merging ();
808f4dfe
DM
4646 test_widening_constraints ();
4647 test_iteration_1 ();
6969ac30 4648 test_malloc_constraints ();
808f4dfe
DM
4649 test_var ();
4650 test_array_2 ();
4651 test_mem_ref ();
4652 test_POINTER_PLUS_EXPR_then_MEM_REF ();
4653 test_malloc ();
4654 test_alloca ();
757bf1df
DM
4655}
4656
4657} // namespace selftest
4658
4659#endif /* CHECKING_P */
4660
75038aa6
DM
4661} // namespace ana
4662
757bf1df 4663#endif /* #if ENABLE_ANALYZER */