]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/analyzer/region-model.cc
libgccjit: Update comments for gcc_jit_context_new_rvalue_from* functions
[thirdparty/gcc.git] / gcc / analyzer / region-model.cc
CommitLineData
757bf1df
DM
1/* Classes for modeling the state of memory.
2 Copyright (C) 2019-2020 Free Software Foundation, Inc.
3 Contributed by David Malcolm <dmalcolm@redhat.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 3, or (at your option)
10any later version.
11
12GCC is distributed in the hope that it will be useful, but
13WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tree.h"
25#include "function.h"
26#include "basic-block.h"
27#include "gimple.h"
28#include "gimple-iterator.h"
7892ff37 29#include "diagnostic-core.h"
757bf1df
DM
30#include "graphviz.h"
31#include "options.h"
32#include "cgraph.h"
33#include "tree-dfa.h"
34#include "stringpool.h"
35#include "convert.h"
36#include "target.h"
37#include "fold-const.h"
38#include "tree-pretty-print.h"
39#include "diagnostic-color.h"
40#include "diagnostic-metadata.h"
757bf1df 41#include "tristate.h"
ef7827b0 42#include "bitmap.h"
757bf1df
DM
43#include "selftest.h"
44#include "function.h"
45#include "analyzer/analyzer.h"
46#include "analyzer/analyzer-logging.h"
47#include "ordered-hash-map.h"
48#include "options.h"
49#include "cgraph.h"
50#include "cfg.h"
51#include "digraph.h"
52#include "analyzer/supergraph.h"
53#include "sbitmap.h"
808f4dfe
DM
54#include "analyzer/call-string.h"
55#include "analyzer/program-point.h"
56#include "analyzer/store.h"
757bf1df
DM
57#include "analyzer/region-model.h"
58#include "analyzer/constraint-manager.h"
59#include "diagnostic-event-id.h"
60#include "analyzer/sm.h"
61#include "diagnostic-event-id.h"
62#include "analyzer/sm.h"
63#include "analyzer/pending-diagnostic.h"
808f4dfe 64#include "analyzer/region-model-reachability.h"
757bf1df 65#include "analyzer/analyzer-selftests.h"
884d9141 66#include "stor-layout.h"
757bf1df
DM
67
68#if ENABLE_ANALYZER
69
75038aa6
DM
70namespace ana {
71
757bf1df
DM
72/* Dump T to PP in language-independent form, for debugging/logging/dumping
73 purposes. */
74
757bf1df 75void
808f4dfe 76dump_tree (pretty_printer *pp, tree t)
757bf1df 77{
808f4dfe 78 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
757bf1df
DM
79}
80
808f4dfe
DM
81/* Dump T to PP in language-independent form in quotes, for
82 debugging/logging/dumping purposes. */
757bf1df
DM
83
84void
808f4dfe 85dump_quoted_tree (pretty_printer *pp, tree t)
757bf1df 86{
808f4dfe
DM
87 pp_begin_quote (pp, pp_show_color (pp));
88 dump_tree (pp, t);
89 pp_end_quote (pp, pp_show_color (pp));
757bf1df
DM
90}
91
808f4dfe
DM
92/* Equivalent to pp_printf (pp, "%qT", t), to avoid nesting pp_printf
93 calls within other pp_printf calls.
757bf1df 94
808f4dfe
DM
95 default_tree_printer handles 'T' and some other codes by calling
96 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
97 dump_generic_node calls pp_printf in various places, leading to
98 garbled output.
757bf1df 99
808f4dfe
DM
100 Ideally pp_printf could be made to be reentrant, but in the meantime
101 this function provides a workaround. */
6969ac30
DM
102
103void
808f4dfe 104print_quoted_type (pretty_printer *pp, tree t)
6969ac30 105{
808f4dfe
DM
106 pp_begin_quote (pp, pp_show_color (pp));
107 dump_generic_node (pp, t, 0, TDF_SLIM, 0);
108 pp_end_quote (pp, pp_show_color (pp));
6969ac30
DM
109}
110
757bf1df
DM
111/* class region_model. */
112
808f4dfe 113/* Ctor for region_model: construct an "empty" model. */
757bf1df 114
808f4dfe
DM
115region_model::region_model (region_model_manager *mgr)
116: m_mgr (mgr), m_store (), m_current_frame (NULL)
757bf1df 117{
808f4dfe 118 m_constraints = new constraint_manager (mgr);
757bf1df
DM
119}
120
121/* region_model's copy ctor. */
122
123region_model::region_model (const region_model &other)
808f4dfe
DM
124: m_mgr (other.m_mgr), m_store (other.m_store),
125 m_constraints (new constraint_manager (*other.m_constraints)),
126 m_current_frame (other.m_current_frame)
757bf1df 127{
757bf1df
DM
128}
129
130/* region_model's dtor. */
131
132region_model::~region_model ()
133{
134 delete m_constraints;
135}
136
137/* region_model's assignment operator. */
138
139region_model &
140region_model::operator= (const region_model &other)
141{
808f4dfe
DM
142 /* m_mgr is const. */
143 gcc_assert (m_mgr == other.m_mgr);
757bf1df 144
808f4dfe 145 m_store = other.m_store;
757bf1df
DM
146
147 delete m_constraints;
808f4dfe 148 m_constraints = new constraint_manager (*other.m_constraints);
757bf1df 149
808f4dfe 150 m_current_frame = other.m_current_frame;
757bf1df
DM
151
152 return *this;
153}
154
155/* Equality operator for region_model.
156
808f4dfe
DM
157 Amongst other things this directly compares the stores and the constraint
158 managers, so for this to be meaningful both this and OTHER should
757bf1df
DM
159 have been canonicalized. */
160
161bool
162region_model::operator== (const region_model &other) const
163{
808f4dfe
DM
164 /* We can only compare instances that use the same manager. */
165 gcc_assert (m_mgr == other.m_mgr);
757bf1df 166
808f4dfe 167 if (m_store != other.m_store)
757bf1df
DM
168 return false;
169
170 if (*m_constraints != *other.m_constraints)
171 return false;
172
808f4dfe
DM
173 if (m_current_frame != other.m_current_frame)
174 return false;
757bf1df
DM
175
176 gcc_checking_assert (hash () == other.hash ());
177
178 return true;
179}
180
181/* Generate a hash value for this region_model. */
182
183hashval_t
808f4dfe
DM
184region_model::hash () const
185{
186 hashval_t result = m_store.hash ();
187 result ^= m_constraints->hash ();
188 return result;
757bf1df
DM
189}
190
808f4dfe
DM
191/* Dump a representation of this model to PP, showing the
192 stack, the store, and any constraints.
193 Use SIMPLE to control how svalues and regions are printed. */
757bf1df
DM
194
195void
808f4dfe
DM
196region_model::dump_to_pp (pretty_printer *pp, bool simple,
197 bool multiline) const
757bf1df 198{
808f4dfe
DM
199 /* Dump stack. */
200 pp_printf (pp, "stack depth: %i", get_stack_depth ());
201 if (multiline)
202 pp_newline (pp);
203 else
204 pp_string (pp, " {");
205 for (const frame_region *iter_frame = m_current_frame; iter_frame;
206 iter_frame = iter_frame->get_calling_frame ())
207 {
208 if (multiline)
209 pp_string (pp, " ");
210 else if (iter_frame != m_current_frame)
211 pp_string (pp, ", ");
212 pp_printf (pp, "frame (index %i): ", iter_frame->get_index ());
213 iter_frame->dump_to_pp (pp, simple);
214 if (multiline)
215 pp_newline (pp);
216 }
217 if (!multiline)
218 pp_string (pp, "}");
219
220 /* Dump store. */
221 if (!multiline)
222 pp_string (pp, ", {");
223 m_store.dump_to_pp (pp, simple, multiline,
224 m_mgr->get_store_manager ());
225 if (!multiline)
226 pp_string (pp, "}");
227
228 /* Dump constraints. */
229 pp_string (pp, "constraint_manager:");
230 if (multiline)
231 pp_newline (pp);
232 else
233 pp_string (pp, " {");
234 m_constraints->dump_to_pp (pp, multiline);
235 if (!multiline)
236 pp_string (pp, "}");
237}
757bf1df 238
808f4dfe 239/* Dump a representation of this model to FILE. */
757bf1df 240
808f4dfe
DM
241void
242region_model::dump (FILE *fp, bool simple, bool multiline) const
243{
244 pretty_printer pp;
245 pp_format_decoder (&pp) = default_tree_printer;
246 pp_show_color (&pp) = pp_show_color (global_dc->printer);
247 pp.buffer->stream = fp;
248 dump_to_pp (&pp, simple, multiline);
249 pp_newline (&pp);
250 pp_flush (&pp);
757bf1df
DM
251}
252
808f4dfe 253/* Dump a multiline representation of this model to stderr. */
757bf1df 254
808f4dfe
DM
255DEBUG_FUNCTION void
256region_model::dump (bool simple) const
257{
258 dump (stderr, simple, true);
259}
757bf1df 260
808f4dfe 261/* Dump a multiline representation of this model to stderr. */
757bf1df 262
808f4dfe
DM
263DEBUG_FUNCTION void
264region_model::debug () const
757bf1df 265{
808f4dfe 266 dump (true);
757bf1df
DM
267}
268
808f4dfe
DM
269/* Canonicalize the store and constraints, to maximize the chance of
270 equality between region_model instances. */
757bf1df
DM
271
272void
808f4dfe 273region_model::canonicalize ()
757bf1df 274{
808f4dfe
DM
275 m_store.canonicalize (m_mgr->get_store_manager ());
276 m_constraints->canonicalize ();
757bf1df
DM
277}
278
279/* Return true if this region_model is in canonical form. */
280
281bool
282region_model::canonicalized_p () const
283{
284 region_model copy (*this);
808f4dfe 285 copy.canonicalize ();
757bf1df
DM
286 return *this == copy;
287}
288
808f4dfe
DM
289/* See the comment for store::loop_replay_fixup. */
290
291void
292region_model::loop_replay_fixup (const region_model *dst_state)
293{
294 m_store.loop_replay_fixup (dst_state->get_store (), m_mgr);
295}
296
757bf1df
DM
297/* A subclass of pending_diagnostic for complaining about uses of
298 poisoned values. */
299
300class poisoned_value_diagnostic
301: public pending_diagnostic_subclass<poisoned_value_diagnostic>
302{
303public:
304 poisoned_value_diagnostic (tree expr, enum poison_kind pkind)
305 : m_expr (expr), m_pkind (pkind)
306 {}
307
308 const char *get_kind () const FINAL OVERRIDE { return "poisoned_value_diagnostic"; }
309
310 bool operator== (const poisoned_value_diagnostic &other) const
311 {
312 return m_expr == other.m_expr;
313 }
314
315 bool emit (rich_location *rich_loc) FINAL OVERRIDE
316 {
317 switch (m_pkind)
318 {
319 default:
320 gcc_unreachable ();
757bf1df
DM
321 case POISON_KIND_FREED:
322 {
323 diagnostic_metadata m;
324 m.add_cwe (416); /* "CWE-416: Use After Free". */
6c8e5844
DM
325 return warning_meta (rich_loc, m,
326 OPT_Wanalyzer_use_after_free,
327 "use after %<free%> of %qE",
328 m_expr);
757bf1df
DM
329 }
330 break;
331 case POISON_KIND_POPPED_STACK:
332 {
757bf1df 333 /* TODO: which CWE? */
808f4dfe
DM
334 return warning_at
335 (rich_loc,
336 OPT_Wanalyzer_use_of_pointer_in_stale_stack_frame,
337 "dereferencing pointer %qE to within stale stack frame",
338 m_expr);
757bf1df
DM
339 }
340 break;
341 }
342 }
343
344 label_text describe_final_event (const evdesc::final_event &ev) FINAL OVERRIDE
345 {
346 switch (m_pkind)
347 {
348 default:
349 gcc_unreachable ();
757bf1df
DM
350 case POISON_KIND_FREED:
351 return ev.formatted_print ("use after %<free%> of %qE here",
352 m_expr);
353 case POISON_KIND_POPPED_STACK:
354 return ev.formatted_print
808f4dfe 355 ("dereferencing pointer %qE to within stale stack frame",
757bf1df
DM
356 m_expr);
357 }
358 }
359
360private:
361 tree m_expr;
362 enum poison_kind m_pkind;
363};
364
808f4dfe
DM
365/* If ASSIGN is a stmt that can be modelled via
366 set_value (lhs_reg, SVALUE, CTXT)
367 for some SVALUE, get the SVALUE.
368 Otherwise return NULL. */
757bf1df 369
808f4dfe
DM
370const svalue *
371region_model::get_gassign_result (const gassign *assign,
372 region_model_context *ctxt)
757bf1df
DM
373{
374 tree lhs = gimple_assign_lhs (assign);
375 tree rhs1 = gimple_assign_rhs1 (assign);
757bf1df
DM
376 enum tree_code op = gimple_assign_rhs_code (assign);
377 switch (op)
378 {
379 default:
808f4dfe 380 return NULL;
757bf1df
DM
381
382 case POINTER_PLUS_EXPR:
383 {
384 /* e.g. "_1 = a_10(D) + 12;" */
385 tree ptr = rhs1;
386 tree offset = gimple_assign_rhs2 (assign);
387
808f4dfe
DM
388 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
389 const svalue *offset_sval = get_rvalue (offset, ctxt);
390 /* Quoting tree.def, "the second operand [of a POINTER_PLUS_EXPR]
391 is an integer of type sizetype". */
392 offset_sval = m_mgr->get_or_create_cast (size_type_node, offset_sval);
393
394 const svalue *sval_binop
395 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
396 ptr_sval, offset_sval);
397 return sval_binop;
757bf1df
DM
398 }
399 break;
400
401 case POINTER_DIFF_EXPR:
402 {
403 /* e.g. "_1 = p_2(D) - q_3(D);". */
808f4dfe
DM
404 tree rhs2 = gimple_assign_rhs2 (assign);
405 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
406 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 407
808f4dfe 408 // TODO: perhaps fold to zero if they're known to be equal?
757bf1df 409
808f4dfe
DM
410 const svalue *sval_binop
411 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
412 rhs1_sval, rhs2_sval);
413 return sval_binop;
757bf1df
DM
414 }
415 break;
416
808f4dfe
DM
417 /* Assignments of the form
418 set_value (lvalue (LHS), rvalue (EXPR))
419 for various EXPR.
420 We already have the lvalue for the LHS above, as "lhs_reg". */
421 case ADDR_EXPR: /* LHS = &RHS; */
422 case BIT_FIELD_REF:
423 case COMPONENT_REF: /* LHS = op0.op1; */
757bf1df 424 case MEM_REF:
757bf1df 425 case REAL_CST:
808f4dfe
DM
426 case COMPLEX_CST:
427 case VECTOR_CST:
757bf1df
DM
428 case INTEGER_CST:
429 case ARRAY_REF:
808f4dfe
DM
430 case SSA_NAME: /* LHS = VAR; */
431 case VAR_DECL: /* LHS = VAR; */
432 case PARM_DECL:/* LHS = VAR; */
433 case REALPART_EXPR:
434 case IMAGPART_EXPR:
435 return get_rvalue (rhs1, ctxt);
436
437 case ABS_EXPR:
438 case ABSU_EXPR:
439 case CONJ_EXPR:
440 case BIT_NOT_EXPR:
757bf1df
DM
441 case FIX_TRUNC_EXPR:
442 case FLOAT_EXPR:
808f4dfe 443 case NEGATE_EXPR:
757bf1df 444 case NOP_EXPR:
808f4dfe 445 case VIEW_CONVERT_EXPR:
757bf1df 446 {
808f4dfe
DM
447 /* Unary ops. */
448 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
449 const svalue *sval_unaryop
450 = m_mgr->get_or_create_unaryop (TREE_TYPE (lhs), op, rhs_sval);
451 return sval_unaryop;
757bf1df 452 }
757bf1df
DM
453
454 case EQ_EXPR:
455 case GE_EXPR:
456 case LE_EXPR:
457 case NE_EXPR:
458 case GT_EXPR:
459 case LT_EXPR:
808f4dfe
DM
460 case UNORDERED_EXPR:
461 case ORDERED_EXPR:
757bf1df
DM
462 {
463 tree rhs2 = gimple_assign_rhs2 (assign);
464
808f4dfe
DM
465 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
466 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 467
2f5951bd 468 if (TREE_TYPE (lhs) == boolean_type_node)
808f4dfe 469 {
2f5951bd
DM
470 /* Consider constraints between svalues. */
471 tristate t = eval_condition (rhs1_sval, op, rhs2_sval);
472 if (t.is_known ())
473 return m_mgr->get_or_create_constant_svalue
474 (t.is_true () ? boolean_true_node : boolean_false_node);
808f4dfe 475 }
2f5951bd
DM
476
477 /* Otherwise, generate a symbolic binary op. */
478 const svalue *sval_binop
479 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
480 rhs1_sval, rhs2_sval);
481 return sval_binop;
757bf1df
DM
482 }
483 break;
484
485 case PLUS_EXPR:
486 case MINUS_EXPR:
487 case MULT_EXPR:
808f4dfe 488 case MULT_HIGHPART_EXPR:
757bf1df 489 case TRUNC_DIV_EXPR:
808f4dfe
DM
490 case CEIL_DIV_EXPR:
491 case FLOOR_DIV_EXPR:
492 case ROUND_DIV_EXPR:
757bf1df 493 case TRUNC_MOD_EXPR:
808f4dfe
DM
494 case CEIL_MOD_EXPR:
495 case FLOOR_MOD_EXPR:
496 case ROUND_MOD_EXPR:
497 case RDIV_EXPR:
498 case EXACT_DIV_EXPR:
757bf1df
DM
499 case LSHIFT_EXPR:
500 case RSHIFT_EXPR:
808f4dfe
DM
501 case LROTATE_EXPR:
502 case RROTATE_EXPR:
757bf1df
DM
503 case BIT_IOR_EXPR:
504 case BIT_XOR_EXPR:
505 case BIT_AND_EXPR:
506 case MIN_EXPR:
507 case MAX_EXPR:
808f4dfe 508 case COMPLEX_EXPR:
757bf1df
DM
509 {
510 /* Binary ops. */
511 tree rhs2 = gimple_assign_rhs2 (assign);
512
808f4dfe
DM
513 const svalue *rhs1_sval = get_rvalue (rhs1, ctxt);
514 const svalue *rhs2_sval = get_rvalue (rhs2, ctxt);
757bf1df 515
808f4dfe
DM
516 const svalue *sval_binop
517 = m_mgr->get_or_create_binop (TREE_TYPE (lhs), op,
518 rhs1_sval, rhs2_sval);
519 return sval_binop;
520 }
521
522 /* Vector expressions. In theory we could implement these elementwise,
523 but for now, simply return unknown values. */
524 case VEC_DUPLICATE_EXPR:
525 case VEC_SERIES_EXPR:
526 case VEC_COND_EXPR:
527 case VEC_PERM_EXPR:
1b0be822
DM
528 case VEC_WIDEN_MULT_HI_EXPR:
529 case VEC_WIDEN_MULT_LO_EXPR:
530 case VEC_WIDEN_MULT_EVEN_EXPR:
531 case VEC_WIDEN_MULT_ODD_EXPR:
532 case VEC_UNPACK_HI_EXPR:
533 case VEC_UNPACK_LO_EXPR:
534 case VEC_UNPACK_FLOAT_HI_EXPR:
535 case VEC_UNPACK_FLOAT_LO_EXPR:
536 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
537 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
538 case VEC_PACK_TRUNC_EXPR:
539 case VEC_PACK_SAT_EXPR:
540 case VEC_PACK_FIX_TRUNC_EXPR:
541 case VEC_PACK_FLOAT_EXPR:
542 case VEC_WIDEN_LSHIFT_HI_EXPR:
543 case VEC_WIDEN_LSHIFT_LO_EXPR:
808f4dfe
DM
544 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
545 }
546}
547
548/* Update this model for the ASSIGN stmt, using CTXT to report any
549 diagnostics. */
550
551void
552region_model::on_assignment (const gassign *assign, region_model_context *ctxt)
553{
554 tree lhs = gimple_assign_lhs (assign);
555 tree rhs1 = gimple_assign_rhs1 (assign);
556
557 const region *lhs_reg = get_lvalue (lhs, ctxt);
558
559 /* Most assignments are handled by:
560 set_value (lhs_reg, SVALUE, CTXT)
561 for some SVALUE. */
562 if (const svalue *sval = get_gassign_result (assign, ctxt))
563 {
564 set_value (lhs_reg, sval, ctxt);
565 return;
566 }
567
568 enum tree_code op = gimple_assign_rhs_code (assign);
569 switch (op)
570 {
571 default:
572 {
1b0be822 573 if (0)
808f4dfe
DM
574 sorry_at (assign->location, "unhandled assignment op: %qs",
575 get_tree_code_name (op));
1b0be822
DM
576 const svalue *unknown_sval
577 = m_mgr->get_or_create_unknown_svalue (TREE_TYPE (lhs));
578 set_value (lhs_reg, unknown_sval, ctxt);
757bf1df
DM
579 }
580 break;
581
808f4dfe
DM
582 case CONSTRUCTOR:
583 {
584 if (TREE_CLOBBER_P (rhs1))
585 {
586 /* e.g. "x ={v} {CLOBBER};" */
587 clobber_region (lhs_reg);
588 }
589 else
590 {
591 /* Any CONSTRUCTOR that survives to this point is either
592 just a zero-init of everything, or a vector. */
593 if (!CONSTRUCTOR_NO_CLEARING (rhs1))
594 zero_fill_region (lhs_reg);
595 unsigned ix;
596 tree index;
597 tree val;
598 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), ix, index, val)
599 {
600 gcc_assert (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE);
601 if (!index)
602 index = build_int_cst (integer_type_node, ix);
603 gcc_assert (TREE_CODE (index) == INTEGER_CST);
604 const svalue *index_sval
605 = m_mgr->get_or_create_constant_svalue (index);
606 gcc_assert (index_sval);
607 const region *sub_reg
608 = m_mgr->get_element_region (lhs_reg,
609 TREE_TYPE (val),
610 index_sval);
611 const svalue *val_sval = get_rvalue (val, ctxt);
612 set_value (sub_reg, val_sval, ctxt);
613 }
614 }
615 }
616 break;
617
618 case STRING_CST:
757bf1df 619 {
808f4dfe
DM
620 /* e.g. "struct s2 x = {{'A', 'B', 'C', 'D'}};". */
621 /* Add a default binding, rather than a direct one, so that array
622 access will "inherit" the individual chars. */
623 const svalue *rhs_sval = get_rvalue (rhs1, ctxt);
624 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
625 BK_default);
757bf1df
DM
626 }
627 break;
628 }
629}
630
631/* Update this model for the CALL stmt, using CTXT to report any
632 diagnostics - the first half.
633
634 Updates to the region_model that should be made *before* sm-states
635 are updated are done here; other updates to the region_model are done
ef7827b0 636 in region_model::on_call_post.
757bf1df 637
ef7827b0
DM
638 Return true if the function call has unknown side effects (it wasn't
639 recognized and we don't have a body for it, or are unable to tell which
640 fndecl it is). */
641
642bool
757bf1df
DM
643region_model::on_call_pre (const gcall *call, region_model_context *ctxt)
644{
ef7827b0
DM
645 bool unknown_side_effects = false;
646
757bf1df
DM
647 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
648 {
808f4dfe
DM
649 call_details cd (call, this, ctxt);
650
651 /* The various impl_call_* member functions are implemented
652 in region-model-impl-calls.cc.
653 Having them split out into separate functions makes it easier
654 to put breakpoints on the handling of specific functions. */
757bf1df 655 if (is_named_call_p (callee_fndecl, "malloc", call, 1))
808f4dfe
DM
656 return impl_call_malloc (cd);
657 else if (is_named_call_p (callee_fndecl, "calloc", call, 2))
658 return impl_call_calloc (cd);
757bf1df 659 else if (is_named_call_p (callee_fndecl, "__builtin_alloca", call, 1))
808f4dfe 660 return impl_call_alloca (cd);
e516294a
DM
661 else if (gimple_call_builtin_p (call, BUILT_IN_EXPECT)
662 || gimple_call_builtin_p (call, BUILT_IN_EXPECT_WITH_PROBABILITY)
663 || gimple_call_internal_p (call, IFN_BUILTIN_EXPECT))
808f4dfe
DM
664 return impl_call_builtin_expect (cd);
665 else if (is_named_call_p (callee_fndecl, "memset", call, 3)
666 || gimple_call_builtin_p (call, BUILT_IN_MEMSET))
e516294a 667 {
808f4dfe 668 impl_call_memset (cd);
e516294a
DM
669 return false;
670 }
757bf1df
DM
671 else if (is_named_call_p (callee_fndecl, "strlen", call, 1))
672 {
808f4dfe
DM
673 if (impl_call_strlen (cd))
674 return false;
757bf1df 675 }
ef7827b0 676 else if (!fndecl_has_gimple_body_p (callee_fndecl)
808f4dfe
DM
677 && !DECL_PURE_P (callee_fndecl)
678 && !fndecl_built_in_p (callee_fndecl))
ef7827b0 679 unknown_side_effects = true;
757bf1df 680 }
ef7827b0
DM
681 else
682 unknown_side_effects = true;
757bf1df 683
808f4dfe
DM
684 /* Some of the above cases update the lhs of the call based on the
685 return value. If we get here, it hasn't been done yet, so do that
686 now. */
687 if (tree lhs = gimple_call_lhs (call))
688 {
689 const region *lhs_region = get_lvalue (lhs, ctxt);
690 if (TREE_CODE (lhs) == SSA_NAME)
691 {
692 const svalue *sval = m_mgr->get_or_create_initial_value (lhs_region);
693 set_value (lhs_region, sval, ctxt);
694 }
695 }
757bf1df 696
ef7827b0 697 return unknown_side_effects;
757bf1df
DM
698}
699
700/* Update this model for the CALL stmt, using CTXT to report any
701 diagnostics - the second half.
702
703 Updates to the region_model that should be made *after* sm-states
704 are updated are done here; other updates to the region_model are done
ef7827b0
DM
705 in region_model::on_call_pre.
706
707 If UNKNOWN_SIDE_EFFECTS is true, also call handle_unrecognized_call
708 to purge state. */
757bf1df
DM
709
710void
ef7827b0
DM
711region_model::on_call_post (const gcall *call,
712 bool unknown_side_effects,
713 region_model_context *ctxt)
757bf1df 714{
757bf1df
DM
715 if (tree callee_fndecl = get_fndecl_for_call (call, ctxt))
716 if (is_named_call_p (callee_fndecl, "free", call, 1))
717 {
808f4dfe
DM
718 call_details cd (call, this, ctxt);
719 impl_call_free (cd);
757bf1df
DM
720 return;
721 }
ef7827b0
DM
722
723 if (unknown_side_effects)
724 handle_unrecognized_call (call, ctxt);
725}
726
ef7827b0
DM
727/* Handle a call CALL to a function with unknown behavior.
728
729 Traverse the regions in this model, determining what regions are
730 reachable from pointer arguments to CALL and from global variables,
731 recursively.
732
733 Set all reachable regions to new unknown values and purge sm-state
734 from their values, and from values that point to them. */
735
736void
737region_model::handle_unrecognized_call (const gcall *call,
738 region_model_context *ctxt)
739{
740 tree fndecl = get_fndecl_for_call (call, ctxt);
741
808f4dfe 742 reachable_regions reachable_regs (&m_store, m_mgr);
ef7827b0
DM
743
744 /* Determine the reachable regions and their mutability. */
745 {
808f4dfe
DM
746 /* Add globals and regions that already escaped in previous
747 unknown calls. */
748 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
749 &reachable_regs);
ef7827b0
DM
750
751 /* Params that are pointers. */
752 tree iter_param_types = NULL_TREE;
753 if (fndecl)
754 iter_param_types = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
755 for (unsigned arg_idx = 0; arg_idx < gimple_call_num_args (call); arg_idx++)
756 {
757 /* Track expected param type, where available. */
758 tree param_type = NULL_TREE;
759 if (iter_param_types)
760 {
761 param_type = TREE_VALUE (iter_param_types);
762 gcc_assert (param_type);
763 iter_param_types = TREE_CHAIN (iter_param_types);
764 }
765
766 tree parm = gimple_call_arg (call, arg_idx);
808f4dfe
DM
767 const svalue *parm_sval = get_rvalue (parm, ctxt);
768 reachable_regs.handle_parm (parm_sval, param_type);
ef7827b0
DM
769 }
770 }
771
808f4dfe
DM
772 /* Purge sm-state for the svalues that were reachable,
773 both in non-mutable and mutable form. */
774 for (svalue_set::iterator iter
775 = reachable_regs.begin_reachable_svals ();
776 iter != reachable_regs.end_reachable_svals (); ++iter)
ef7827b0 777 {
808f4dfe
DM
778 const svalue *sval = (*iter);
779 ctxt->on_unknown_change (sval, false);
780 }
781 for (svalue_set::iterator iter
782 = reachable_regs.begin_mutable_svals ();
783 iter != reachable_regs.end_mutable_svals (); ++iter)
784 {
785 const svalue *sval = (*iter);
786 ctxt->on_unknown_change (sval, true);
787 }
ef7827b0 788
808f4dfe
DM
789 /* Mark any clusters that have escaped. */
790 reachable_regs.mark_escaped_clusters ();
ef7827b0 791
808f4dfe
DM
792 /* Update bindings for all clusters that have escaped, whether above,
793 or previously. */
794 m_store.on_unknown_fncall (call, m_mgr->get_store_manager ());
795}
ef7827b0 796
808f4dfe
DM
797/* Traverse the regions in this model, determining what regions are
798 reachable from the store and populating *OUT.
ef7827b0 799
808f4dfe
DM
800 If EXTRA_SVAL is non-NULL, treat it as an additional "root"
801 for reachability (for handling return values from functions when
802 analyzing return of the only function on the stack).
803
804 Find svalues that haven't leaked. */
805
806void
807region_model::get_reachable_svalues (svalue_set *out,
808 const svalue *extra_sval)
809{
810 reachable_regions reachable_regs (&m_store, m_mgr);
811
812 /* Add globals and regions that already escaped in previous
813 unknown calls. */
814 m_store.for_each_cluster (reachable_regions::init_cluster_cb,
815 &reachable_regs);
816
817 if (extra_sval)
818 reachable_regs.handle_sval (extra_sval);
ef7827b0 819
808f4dfe
DM
820 /* Get regions for locals that have explicitly bound values. */
821 for (store::cluster_map_t::iterator iter = m_store.begin ();
822 iter != m_store.end (); ++iter)
823 {
824 const region *base_reg = (*iter).first;
825 if (const region *parent = base_reg->get_parent_region ())
826 if (parent->get_kind () == RK_FRAME)
827 reachable_regs.add (base_reg, false);
828 }
829
830 /* Populate *OUT based on the values that were reachable. */
831 for (svalue_set::iterator iter
832 = reachable_regs.begin_reachable_svals ();
833 iter != reachable_regs.end_reachable_svals (); ++iter)
834 out->add (*iter);
757bf1df
DM
835}
836
837/* Update this model for the RETURN_STMT, using CTXT to report any
838 diagnostics. */
839
840void
841region_model::on_return (const greturn *return_stmt, region_model_context *ctxt)
842{
843 tree callee = get_current_function ()->decl;
844 tree lhs = DECL_RESULT (callee);
845 tree rhs = gimple_return_retval (return_stmt);
846
847 if (lhs && rhs)
a96f1c38 848 copy_region (get_lvalue (lhs, ctxt), get_lvalue (rhs, ctxt), ctxt);
757bf1df
DM
849}
850
342e14ff
DM
851/* Update this model for a call and return of setjmp/sigsetjmp at CALL within
852 ENODE, using CTXT to report any diagnostics.
757bf1df 853
342e14ff
DM
854 This is for the initial direct invocation of setjmp/sigsetjmp (which returns
855 0), as opposed to any second return due to longjmp/sigsetjmp. */
757bf1df
DM
856
857void
858region_model::on_setjmp (const gcall *call, const exploded_node *enode,
859 region_model_context *ctxt)
860{
808f4dfe
DM
861 const svalue *buf_ptr = get_rvalue (gimple_call_arg (call, 0), ctxt);
862 const region *buf_reg = deref_rvalue (buf_ptr, gimple_call_arg (call, 0),
863 ctxt);
757bf1df 864
808f4dfe
DM
865 /* Create a setjmp_svalue for this call and store it in BUF_REG's
866 region. */
867 if (buf_reg)
757bf1df 868 {
fd9982bb 869 setjmp_record r (enode, call);
808f4dfe
DM
870 const svalue *sval
871 = m_mgr->get_or_create_setjmp_svalue (r, buf_reg->get_type ());
872 set_value (buf_reg, sval, ctxt);
757bf1df
DM
873 }
874
875 /* Direct calls to setjmp return 0. */
876 if (tree lhs = gimple_call_lhs (call))
877 {
878 tree zero = build_int_cst (TREE_TYPE (lhs), 0);
808f4dfe
DM
879 const svalue *new_sval = m_mgr->get_or_create_constant_svalue (zero);
880 const region *lhs_reg = get_lvalue (lhs, ctxt);
881 set_value (lhs_reg, new_sval, ctxt);
757bf1df
DM
882 }
883}
884
885/* Update this region_model for rewinding from a "longjmp" at LONGJMP_CALL
886 to a "setjmp" at SETJMP_CALL where the final stack depth should be
808f4dfe
DM
887 SETJMP_STACK_DEPTH. Pop any stack frames. Leak detection is *not*
888 done, and should be done by the caller. */
757bf1df
DM
889
890void
891region_model::on_longjmp (const gcall *longjmp_call, const gcall *setjmp_call,
808f4dfe 892 int setjmp_stack_depth, region_model_context *ctxt)
757bf1df
DM
893{
894 /* Evaluate the val, using the frame of the "longjmp". */
895 tree fake_retval = gimple_call_arg (longjmp_call, 1);
808f4dfe 896 const svalue *fake_retval_sval = get_rvalue (fake_retval, ctxt);
757bf1df
DM
897
898 /* Pop any frames until we reach the stack depth of the function where
899 setjmp was called. */
900 gcc_assert (get_stack_depth () >= setjmp_stack_depth);
901 while (get_stack_depth () > setjmp_stack_depth)
808f4dfe 902 pop_frame (NULL, NULL, ctxt);
757bf1df
DM
903
904 gcc_assert (get_stack_depth () == setjmp_stack_depth);
905
906 /* Assign to LHS of "setjmp" in new_state. */
907 if (tree lhs = gimple_call_lhs (setjmp_call))
908 {
909 /* Passing 0 as the val to longjmp leads to setjmp returning 1. */
910 tree t_zero = build_int_cst (TREE_TYPE (fake_retval), 0);
808f4dfe
DM
911 const svalue *zero_sval = m_mgr->get_or_create_constant_svalue (t_zero);
912 tristate eq_zero = eval_condition (fake_retval_sval, EQ_EXPR, zero_sval);
757bf1df
DM
913 /* If we have 0, use 1. */
914 if (eq_zero.is_true ())
915 {
916 tree t_one = build_int_cst (TREE_TYPE (fake_retval), 1);
808f4dfe
DM
917 const svalue *one_sval
918 = m_mgr->get_or_create_constant_svalue (t_one);
919 fake_retval_sval = one_sval;
757bf1df
DM
920 }
921 else
922 {
923 /* Otherwise note that the value is nonzero. */
808f4dfe 924 m_constraints->add_constraint (fake_retval_sval, NE_EXPR, zero_sval);
757bf1df
DM
925 }
926
808f4dfe
DM
927 /* Decorate the return value from setjmp as being unmergeable,
928 so that we don't attempt to merge states with it as zero
929 with states in which it's nonzero, leading to a clean distinction
930 in the exploded_graph betweeen the first return and the second
931 return. */
932 fake_retval_sval = m_mgr->get_or_create_unmergeable (fake_retval_sval);
757bf1df 933
808f4dfe
DM
934 const region *lhs_reg = get_lvalue (lhs, ctxt);
935 set_value (lhs_reg, fake_retval_sval, ctxt);
936 }
757bf1df
DM
937}
938
939/* Update this region_model for a phi stmt of the form
940 LHS = PHI <...RHS...>.
941 where RHS is for the appropriate edge. */
942
943void
8525d1f5 944region_model::handle_phi (const gphi *phi,
808f4dfe 945 tree lhs, tree rhs,
757bf1df
DM
946 region_model_context *ctxt)
947{
948 /* For now, don't bother tracking the .MEM SSA names. */
949 if (tree var = SSA_NAME_VAR (lhs))
950 if (TREE_CODE (var) == VAR_DECL)
951 if (VAR_DECL_IS_VIRTUAL_OPERAND (var))
952 return;
953
808f4dfe 954 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
757bf1df 955
808f4dfe 956 set_value (get_lvalue (lhs, ctxt), rhs_sval, ctxt);
8525d1f5
DM
957
958 if (ctxt)
959 ctxt->on_phi (phi, rhs);
757bf1df
DM
960}
961
962/* Implementation of region_model::get_lvalue; the latter adds type-checking.
963
964 Get the id of the region for PV within this region_model,
965 emitting any diagnostics to CTXT. */
966
808f4dfe 967const region *
757bf1df
DM
968region_model::get_lvalue_1 (path_var pv, region_model_context *ctxt)
969{
970 tree expr = pv.m_tree;
971
972 gcc_assert (expr);
973
974 switch (TREE_CODE (expr))
975 {
976 default:
808f4dfe
DM
977 return m_mgr->get_region_for_unexpected_tree_code (ctxt, expr,
978 dump_location_t ());
757bf1df
DM
979
980 case ARRAY_REF:
981 {
982 tree array = TREE_OPERAND (expr, 0);
983 tree index = TREE_OPERAND (expr, 1);
757bf1df 984
808f4dfe
DM
985 const region *array_reg = get_lvalue (array, ctxt);
986 const svalue *index_sval = get_rvalue (index, ctxt);
987 return m_mgr->get_element_region (array_reg,
988 TREE_TYPE (TREE_TYPE (array)),
989 index_sval);
757bf1df
DM
990 }
991 break;
992
993 case MEM_REF:
994 {
995 tree ptr = TREE_OPERAND (expr, 0);
996 tree offset = TREE_OPERAND (expr, 1);
808f4dfe
DM
997 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
998 const svalue *offset_sval = get_rvalue (offset, ctxt);
999 const region *star_ptr = deref_rvalue (ptr_sval, ptr, ctxt);
1000 return m_mgr->get_offset_region (star_ptr,
1001 TREE_TYPE (expr),
1002 offset_sval);
757bf1df
DM
1003 }
1004 break;
1005
808f4dfe
DM
1006 case FUNCTION_DECL:
1007 return m_mgr->get_region_for_fndecl (expr);
1008
1009 case LABEL_DECL:
1010 return m_mgr->get_region_for_label (expr);
1011
757bf1df
DM
1012 case VAR_DECL:
1013 /* Handle globals. */
1014 if (is_global_var (expr))
808f4dfe 1015 return m_mgr->get_region_for_global (expr);
757bf1df
DM
1016
1017 /* Fall through. */
1018
1019 case SSA_NAME:
1020 case PARM_DECL:
1021 case RESULT_DECL:
1022 {
1023 gcc_assert (TREE_CODE (expr) == SSA_NAME
1024 || TREE_CODE (expr) == PARM_DECL
1025 || TREE_CODE (expr) == VAR_DECL
1026 || TREE_CODE (expr) == RESULT_DECL);
1027
808f4dfe
DM
1028 int stack_index = pv.m_stack_depth;
1029 const frame_region *frame = get_frame_at_index (stack_index);
757bf1df 1030 gcc_assert (frame);
808f4dfe 1031 return frame->get_region_for_local (m_mgr, expr);
757bf1df
DM
1032 }
1033
1034 case COMPONENT_REF:
1035 {
1036 /* obj.field */
1037 tree obj = TREE_OPERAND (expr, 0);
1038 tree field = TREE_OPERAND (expr, 1);
808f4dfe
DM
1039 const region *obj_reg = get_lvalue (obj, ctxt);
1040 return m_mgr->get_field_region (obj_reg, field);
41a9e940
DM
1041 }
1042 break;
1043
757bf1df 1044 case STRING_CST:
808f4dfe 1045 return m_mgr->get_region_for_string (expr);
757bf1df
DM
1046 }
1047}
1048
1049/* Assert that SRC_TYPE can be converted to DST_TYPE as a no-op. */
1050
09bea584
DM
1051static void
1052assert_compat_types (tree src_type, tree dst_type)
1053{
1054 if (src_type && dst_type && !VOID_TYPE_P (dst_type))
808f4dfe
DM
1055 {
1056#if CHECKING_P
1057 if (!(useless_type_conversion_p (src_type, dst_type)))
1058 internal_error ("incompatible types: %qT and %qT", src_type, dst_type);
1059#endif
1060 }
09bea584 1061}
757bf1df 1062
808f4dfe 1063/* Get the region for PV within this region_model,
757bf1df
DM
1064 emitting any diagnostics to CTXT. */
1065
808f4dfe 1066const region *
757bf1df
DM
1067region_model::get_lvalue (path_var pv, region_model_context *ctxt)
1068{
1069 if (pv.m_tree == NULL_TREE)
808f4dfe 1070 return NULL;
757bf1df 1071
808f4dfe
DM
1072 const region *result_reg = get_lvalue_1 (pv, ctxt);
1073 assert_compat_types (result_reg->get_type (), TREE_TYPE (pv.m_tree));
1074 return result_reg;
757bf1df
DM
1075}
1076
808f4dfe 1077/* Get the region for EXPR within this region_model (assuming the most
757bf1df
DM
1078 recent stack frame if it's a local). */
1079
808f4dfe 1080const region *
757bf1df
DM
1081region_model::get_lvalue (tree expr, region_model_context *ctxt)
1082{
1083 return get_lvalue (path_var (expr, get_stack_depth () - 1), ctxt);
1084}
1085
1086/* Implementation of region_model::get_rvalue; the latter adds type-checking.
1087
1088 Get the value of PV within this region_model,
1089 emitting any diagnostics to CTXT. */
1090
808f4dfe 1091const svalue *
757bf1df
DM
1092region_model::get_rvalue_1 (path_var pv, region_model_context *ctxt)
1093{
1094 gcc_assert (pv.m_tree);
1095
1096 switch (TREE_CODE (pv.m_tree))
1097 {
1098 default:
2242b975 1099 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
757bf1df
DM
1100
1101 case ADDR_EXPR:
1102 {
1103 /* "&EXPR". */
1104 tree expr = pv.m_tree;
1105 tree op0 = TREE_OPERAND (expr, 0);
808f4dfe
DM
1106 const region *expr_reg = get_lvalue (op0, ctxt);
1107 return m_mgr->get_ptr_svalue (TREE_TYPE (expr), expr_reg);
757bf1df
DM
1108 }
1109 break;
1110
808f4dfe
DM
1111 case BIT_FIELD_REF:
1112 return m_mgr->get_or_create_unknown_svalue (TREE_TYPE (pv.m_tree));
1113
1114 case SSA_NAME:
1115 case VAR_DECL:
1116 case PARM_DECL:
1117 case RESULT_DECL:
757bf1df
DM
1118 case ARRAY_REF:
1119 {
da7c2773
DM
1120 const region *reg = get_lvalue (pv, ctxt);
1121 return get_store_value (reg);
757bf1df
DM
1122 }
1123
808f4dfe
DM
1124 case REALPART_EXPR:
1125 case IMAGPART_EXPR:
1126 case VIEW_CONVERT_EXPR:
1127 {
1128 tree expr = pv.m_tree;
1129 tree arg = TREE_OPERAND (expr, 0);
1130 const svalue *arg_sval = get_rvalue (arg, ctxt);
1131 const svalue *sval_unaryop
1132 = m_mgr->get_or_create_unaryop (TREE_TYPE (expr), TREE_CODE (expr),
1133 arg_sval);
1134 return sval_unaryop;
1135 };
1136
757bf1df
DM
1137 case INTEGER_CST:
1138 case REAL_CST:
808f4dfe
DM
1139 case COMPLEX_CST:
1140 case VECTOR_CST:
757bf1df 1141 case STRING_CST:
808f4dfe
DM
1142 return m_mgr->get_or_create_constant_svalue (pv.m_tree);
1143
1144 case POINTER_PLUS_EXPR:
1145 {
1146 tree expr = pv.m_tree;
1147 tree ptr = TREE_OPERAND (expr, 0);
1148 tree offset = TREE_OPERAND (expr, 1);
1149 const svalue *ptr_sval = get_rvalue (ptr, ctxt);
1150 const svalue *offset_sval = get_rvalue (offset, ctxt);
1151 const svalue *sval_binop
1152 = m_mgr->get_or_create_binop (TREE_TYPE (expr), POINTER_PLUS_EXPR,
1153 ptr_sval, offset_sval);
1154 return sval_binop;
1155 }
1156
1157 /* Binary ops. */
1158 case PLUS_EXPR:
1159 case MULT_EXPR:
1160 {
1161 tree expr = pv.m_tree;
1162 tree arg0 = TREE_OPERAND (expr, 0);
1163 tree arg1 = TREE_OPERAND (expr, 1);
1164 const svalue *arg0_sval = get_rvalue (arg0, ctxt);
1165 const svalue *arg1_sval = get_rvalue (arg1, ctxt);
1166 const svalue *sval_binop
1167 = m_mgr->get_or_create_binop (TREE_TYPE (expr), TREE_CODE (expr),
1168 arg0_sval, arg1_sval);
1169 return sval_binop;
1170 }
757bf1df
DM
1171
1172 case COMPONENT_REF:
1173 case MEM_REF:
757bf1df 1174 {
808f4dfe
DM
1175 const region *ref_reg = get_lvalue (pv, ctxt);
1176 return get_store_value (ref_reg);
757bf1df
DM
1177 }
1178 }
1179}
1180
1181/* Get the value of PV within this region_model,
1182 emitting any diagnostics to CTXT. */
1183
808f4dfe 1184const svalue *
757bf1df
DM
1185region_model::get_rvalue (path_var pv, region_model_context *ctxt)
1186{
1187 if (pv.m_tree == NULL_TREE)
808f4dfe 1188 return NULL;
757bf1df 1189
808f4dfe 1190 const svalue *result_sval = get_rvalue_1 (pv, ctxt);
757bf1df 1191
808f4dfe
DM
1192 assert_compat_types (result_sval->get_type (), TREE_TYPE (pv.m_tree));
1193
1194 return result_sval;
757bf1df
DM
1195}
1196
1197/* Get the value of EXPR within this region_model (assuming the most
1198 recent stack frame if it's a local). */
1199
808f4dfe 1200const svalue *
757bf1df
DM
1201region_model::get_rvalue (tree expr, region_model_context *ctxt)
1202{
1203 return get_rvalue (path_var (expr, get_stack_depth () - 1), ctxt);
1204}
1205
623bc027
DM
1206/* Return true if this model is on a path with "main" as the entrypoint
1207 (as opposed to one in which we're merely analyzing a subset of the
1208 path through the code). */
1209
1210bool
1211region_model::called_from_main_p () const
1212{
1213 if (!m_current_frame)
1214 return false;
1215 /* Determine if the oldest stack frame in this model is for "main". */
1216 const frame_region *frame0 = get_frame_at_index (0);
1217 gcc_assert (frame0);
1218 return id_equal (DECL_NAME (frame0->get_function ()->decl), "main");
1219}
1220
1221/* Subroutine of region_model::get_store_value for when REG is (or is within)
1222 a global variable that hasn't been touched since the start of this path
1223 (or was implicitly touched due to a call to an unknown function). */
1224
1225const svalue *
1226region_model::get_initial_value_for_global (const region *reg) const
1227{
1228 /* Get the decl that REG is for (or is within). */
1229 const decl_region *base_reg
1230 = reg->get_base_region ()->dyn_cast_decl_region ();
1231 gcc_assert (base_reg);
1232 tree decl = base_reg->get_decl ();
1233
1234 /* Special-case: to avoid having to explicitly update all previously
1235 untracked globals when calling an unknown fn, they implicitly have
1236 an unknown value if an unknown call has occurred, unless this is
1237 static to-this-TU and hasn't escaped. Globals that have escaped
1238 are explicitly tracked, so we shouldn't hit this case for them. */
1239 if (m_store.called_unknown_fn_p () && TREE_PUBLIC (decl))
1240 return m_mgr->get_or_create_unknown_svalue (reg->get_type ());
1241
1242 /* If we are on a path from the entrypoint from "main" and we have a
1243 global decl defined in this TU that hasn't been touched yet, then
1244 the initial value of REG can be taken from the initialization value
1245 of the decl. */
1246 if (called_from_main_p () && !DECL_EXTERNAL (decl))
1247 {
1248 /* Get the initializer value for base_reg. */
1249 const svalue *base_reg_init
1250 = base_reg->get_svalue_for_initializer (m_mgr);
1251 gcc_assert (base_reg_init);
1252 if (reg == base_reg)
1253 return base_reg_init;
1254 else
1255 {
1256 /* Get the value for REG within base_reg_init. */
1257 binding_cluster c (base_reg);
1258 c.bind (m_mgr->get_store_manager (), base_reg, base_reg_init,
1259 BK_direct);
1260 const svalue *sval
1261 = c.get_any_binding (m_mgr->get_store_manager (), reg);
1262 if (sval)
1263 {
1264 if (reg->get_type ())
1265 sval = m_mgr->get_or_create_cast (reg->get_type (),
1266 sval);
1267 return sval;
1268 }
1269 }
1270 }
1271
1272 /* Otherwise, return INIT_VAL(REG). */
1273 return m_mgr->get_or_create_initial_value (reg);
1274}
1275
808f4dfe
DM
1276/* Get a value for REG, looking it up in the store, or otherwise falling
1277 back to "initial" or "unknown" values. */
757bf1df 1278
808f4dfe
DM
1279const svalue *
1280region_model::get_store_value (const region *reg) const
757bf1df 1281{
2867118d
DM
1282 /* Special-case: handle var_decls in the constant pool. */
1283 if (const decl_region *decl_reg = reg->dyn_cast_decl_region ())
1284 if (const svalue *sval = decl_reg->maybe_get_constant_value (m_mgr))
1285 return sval;
1286
808f4dfe
DM
1287 const svalue *sval
1288 = m_store.get_any_binding (m_mgr->get_store_manager (), reg);
1289 if (sval)
757bf1df 1290 {
808f4dfe
DM
1291 if (reg->get_type ())
1292 sval = m_mgr->get_or_create_cast (reg->get_type (), sval);
1293 return sval;
757bf1df 1294 }
757bf1df 1295
808f4dfe
DM
1296 /* Special-case: read at a constant index within a STRING_CST. */
1297 if (const offset_region *offset_reg = reg->dyn_cast_offset_region ())
1298 if (tree byte_offset_cst
1299 = offset_reg->get_byte_offset ()->maybe_get_constant ())
1300 if (const string_region *str_reg
1301 = reg->get_parent_region ()->dyn_cast_string_region ())
757bf1df 1302 {
808f4dfe
DM
1303 tree string_cst = str_reg->get_string_cst ();
1304 if (const svalue *char_sval
1305 = m_mgr->maybe_get_char_from_string_cst (string_cst,
1306 byte_offset_cst))
1307 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
757bf1df 1308 }
757bf1df 1309
808f4dfe
DM
1310 /* Special-case: read the initial char of a STRING_CST. */
1311 if (const cast_region *cast_reg = reg->dyn_cast_cast_region ())
1312 if (const string_region *str_reg
1313 = cast_reg->get_original_region ()->dyn_cast_string_region ())
1314 {
1315 tree string_cst = str_reg->get_string_cst ();
1316 tree byte_offset_cst = build_int_cst (integer_type_node, 0);
1317 if (const svalue *char_sval
1318 = m_mgr->maybe_get_char_from_string_cst (string_cst,
1319 byte_offset_cst))
1320 return m_mgr->get_or_create_cast (reg->get_type (), char_sval);
1321 }
757bf1df 1322
808f4dfe
DM
1323 /* Otherwise we implicitly have the initial value of the region
1324 (if the cluster had been touched, binding_cluster::get_any_binding,
1325 would have returned UNKNOWN, and we would already have returned
1326 that above). */
757bf1df 1327
623bc027
DM
1328 /* Handle globals. */
1329 if (reg->get_base_region ()->get_parent_region ()->get_kind ()
1330 == RK_GLOBALS)
1331 return get_initial_value_for_global (reg);
757bf1df 1332
808f4dfe 1333 return m_mgr->get_or_create_initial_value (reg);
757bf1df
DM
1334}
1335
808f4dfe
DM
1336/* Return false if REG does not exist, true if it may do.
1337 This is for detecting regions within the stack that don't exist anymore
1338 after frames are popped. */
757bf1df 1339
808f4dfe
DM
1340bool
1341region_model::region_exists_p (const region *reg) const
757bf1df 1342{
808f4dfe
DM
1343 /* If within a stack frame, check that the stack frame is live. */
1344 if (const frame_region *enclosing_frame = reg->maybe_get_frame_region ())
757bf1df 1345 {
808f4dfe
DM
1346 /* Check that the current frame is the enclosing frame, or is called
1347 by it. */
1348 for (const frame_region *iter_frame = get_current_frame (); iter_frame;
1349 iter_frame = iter_frame->get_calling_frame ())
1350 if (iter_frame == enclosing_frame)
1351 return true;
1352 return false;
757bf1df 1353 }
808f4dfe
DM
1354
1355 return true;
757bf1df
DM
1356}
1357
808f4dfe
DM
1358/* Get a region for referencing PTR_SVAL, creating a region if need be, and
1359 potentially generating warnings via CTXT.
1360 PTR_TREE if non-NULL can be used when emitting diagnostics. */
757bf1df 1361
808f4dfe
DM
1362const region *
1363region_model::deref_rvalue (const svalue *ptr_sval, tree ptr_tree,
1364 region_model_context *ctxt)
757bf1df 1365{
808f4dfe 1366 gcc_assert (ptr_sval);
757bf1df 1367
808f4dfe 1368 switch (ptr_sval->get_kind ())
757bf1df 1369 {
808f4dfe 1370 default:
23ebfda0 1371 break;
808f4dfe 1372
757bf1df
DM
1373 case SK_REGION:
1374 {
808f4dfe
DM
1375 const region_svalue *region_sval
1376 = as_a <const region_svalue *> (ptr_sval);
757bf1df
DM
1377 return region_sval->get_pointee ();
1378 }
1379
808f4dfe
DM
1380 case SK_BINOP:
1381 {
1382 const binop_svalue *binop_sval
1383 = as_a <const binop_svalue *> (ptr_sval);
1384 switch (binop_sval->get_op ())
1385 {
1386 case POINTER_PLUS_EXPR:
1387 {
1388 /* If we have a symbolic value expressing pointer arithmentic,
1389 try to convert it to a suitable region. */
1390 const region *parent_region
1391 = deref_rvalue (binop_sval->get_arg0 (), NULL_TREE, ctxt);
1392 const svalue *offset = binop_sval->get_arg1 ();
1393 tree type= TREE_TYPE (ptr_sval->get_type ());
1394 return m_mgr->get_offset_region (parent_region, type, offset);
1395 }
1396 default:
23ebfda0 1397 break;
808f4dfe
DM
1398 }
1399 }
23ebfda0 1400 break;
757bf1df
DM
1401
1402 case SK_POISONED:
1403 {
1404 if (ctxt)
808f4dfe
DM
1405 {
1406 tree ptr = get_representative_tree (ptr_sval);
1407 /* If we can't get a representative tree for PTR_SVAL
1408 (e.g. if it hasn't been bound into the store), then
1409 fall back on PTR_TREE, if non-NULL. */
1410 if (!ptr)
1411 ptr = ptr_tree;
1412 if (ptr)
1413 {
1414 const poisoned_svalue *poisoned_sval
1415 = as_a <const poisoned_svalue *> (ptr_sval);
1416 enum poison_kind pkind = poisoned_sval->get_poison_kind ();
1417 ctxt->warn (new poisoned_value_diagnostic (ptr, pkind));
1418 }
1419 }
757bf1df 1420 }
23ebfda0 1421 break;
757bf1df
DM
1422 }
1423
23ebfda0 1424 return m_mgr->get_symbolic_region (ptr_sval);
757bf1df
DM
1425}
1426
808f4dfe
DM
1427/* Set the value of the region given by LHS_REG to the value given
1428 by RHS_SVAL. */
757bf1df 1429
808f4dfe
DM
1430void
1431region_model::set_value (const region *lhs_reg, const svalue *rhs_sval,
1432 region_model_context */*ctxt*/)
757bf1df 1433{
808f4dfe
DM
1434 gcc_assert (lhs_reg);
1435 gcc_assert (rhs_sval);
1436
1437 m_store.set_value (m_mgr->get_store_manager(), lhs_reg, rhs_sval,
1438 BK_direct);
757bf1df
DM
1439}
1440
808f4dfe 1441/* Set the value of the region given by LHS to the value given by RHS. */
757bf1df
DM
1442
1443void
808f4dfe 1444region_model::set_value (tree lhs, tree rhs, region_model_context *ctxt)
757bf1df 1445{
808f4dfe
DM
1446 const region *lhs_reg = get_lvalue (lhs, ctxt);
1447 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
1448 gcc_assert (lhs_reg);
1449 gcc_assert (rhs_sval);
1450 set_value (lhs_reg, rhs_sval, ctxt);
757bf1df
DM
1451}
1452
808f4dfe 1453/* Remove all bindings overlapping REG within the store. */
884d9141
DM
1454
1455void
808f4dfe
DM
1456region_model::clobber_region (const region *reg)
1457{
1458 m_store.clobber_region (m_mgr->get_store_manager(), reg);
1459}
1460
1461/* Remove any bindings for REG within the store. */
1462
1463void
1464region_model::purge_region (const region *reg)
1465{
1466 m_store.purge_region (m_mgr->get_store_manager(), reg);
1467}
1468
1469/* Zero-fill REG. */
1470
1471void
1472region_model::zero_fill_region (const region *reg)
1473{
1474 m_store.zero_fill_region (m_mgr->get_store_manager(), reg);
1475}
1476
1477/* Mark REG as having unknown content. */
1478
1479void
1480region_model::mark_region_as_unknown (const region *reg)
884d9141 1481{
808f4dfe 1482 m_store.mark_region_as_unknown (m_mgr->get_store_manager(), reg);
884d9141
DM
1483}
1484
808f4dfe 1485/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
757bf1df
DM
1486 this model. */
1487
1488tristate
808f4dfe
DM
1489region_model::eval_condition (const svalue *lhs,
1490 enum tree_code op,
1491 const svalue *rhs) const
757bf1df 1492{
e978955d
DM
1493 /* For now, make no attempt to capture constraints on floating-point
1494 values. */
1495 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
1496 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
1497 return tristate::unknown ();
1498
808f4dfe 1499 tristate ts = eval_condition_without_cm (lhs, op, rhs);
757bf1df
DM
1500 if (ts.is_known ())
1501 return ts;
1502
1503 /* Otherwise, try constraints. */
808f4dfe 1504 return m_constraints->eval_condition (lhs, op, rhs);
757bf1df
DM
1505}
1506
808f4dfe 1507/* Determine what is known about the condition "LHS_SVAL OP RHS_SVAL" within
757bf1df
DM
1508 this model, without resorting to the constraint_manager.
1509
1510 This is exposed so that impl_region_model_context::on_state_leak can
1511 check for equality part-way through region_model::purge_unused_svalues
1512 without risking creating new ECs. */
1513
1514tristate
808f4dfe
DM
1515region_model::eval_condition_without_cm (const svalue *lhs,
1516 enum tree_code op,
1517 const svalue *rhs) const
757bf1df 1518{
757bf1df
DM
1519 gcc_assert (lhs);
1520 gcc_assert (rhs);
1521
1522 /* See what we know based on the values. */
808f4dfe
DM
1523
1524 /* For now, make no attempt to capture constraints on floating-point
1525 values. */
1526 if ((lhs->get_type () && FLOAT_TYPE_P (lhs->get_type ()))
1527 || (rhs->get_type () && FLOAT_TYPE_P (rhs->get_type ())))
1528 return tristate::unknown ();
1529
1530 /* Unwrap any unmergeable values. */
1531 lhs = lhs->unwrap_any_unmergeable ();
1532 rhs = rhs->unwrap_any_unmergeable ();
1533
1534 if (lhs == rhs)
757bf1df 1535 {
808f4dfe
DM
1536 /* If we have the same svalue, then we have equality
1537 (apart from NaN-handling).
1538 TODO: should this definitely be the case for poisoned values? */
1539 /* Poisoned and unknown values are "unknowable". */
1540 if (lhs->get_kind () == SK_POISONED
1541 || lhs->get_kind () == SK_UNKNOWN)
1542 return tristate::TS_UNKNOWN;
e978955d 1543
808f4dfe 1544 switch (op)
757bf1df 1545 {
808f4dfe
DM
1546 case EQ_EXPR:
1547 case GE_EXPR:
1548 case LE_EXPR:
1549 return tristate::TS_TRUE;
07c86323 1550
808f4dfe
DM
1551 case NE_EXPR:
1552 case GT_EXPR:
1553 case LT_EXPR:
1554 return tristate::TS_FALSE;
1555
1556 default:
1557 /* For other ops, use the logic below. */
1558 break;
757bf1df 1559 }
808f4dfe 1560 }
757bf1df 1561
808f4dfe
DM
1562 /* If we have a pair of region_svalues, compare them. */
1563 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
1564 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
1565 {
1566 tristate res = region_svalue::eval_condition (lhs_ptr, op, rhs_ptr);
1567 if (res.is_known ())
1568 return res;
1569 /* Otherwise, only known through constraints. */
1570 }
757bf1df 1571
808f4dfe
DM
1572 /* If we have a pair of constants, compare them. */
1573 if (const constant_svalue *cst_lhs = lhs->dyn_cast_constant_svalue ())
1574 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
1575 return constant_svalue::eval_condition (cst_lhs, op, cst_rhs);
757bf1df 1576
808f4dfe
DM
1577 /* Handle comparison of a region_svalue against zero. */
1578
1579 if (const region_svalue *ptr = lhs->dyn_cast_region_svalue ())
1580 if (const constant_svalue *cst_rhs = rhs->dyn_cast_constant_svalue ())
1581 if (zerop (cst_rhs->get_constant ()))
1582 {
1583 /* A region_svalue is a non-NULL pointer, except in certain
1584 special cases (see the comment for region::non_null_p. */
1585 const region *pointee = ptr->get_pointee ();
1586 if (pointee->non_null_p ())
757bf1df 1587 {
808f4dfe 1588 switch (op)
757bf1df 1589 {
808f4dfe
DM
1590 default:
1591 gcc_unreachable ();
1592
1593 case EQ_EXPR:
1594 case GE_EXPR:
1595 case LE_EXPR:
1596 return tristate::TS_FALSE;
1597
1598 case NE_EXPR:
1599 case GT_EXPR:
1600 case LT_EXPR:
1601 return tristate::TS_TRUE;
757bf1df
DM
1602 }
1603 }
808f4dfe
DM
1604 }
1605
1606 /* Handle rejection of equality for comparisons of the initial values of
1607 "external" values (such as params) with the address of locals. */
1608 if (const initial_svalue *init_lhs = lhs->dyn_cast_initial_svalue ())
1609 if (const region_svalue *rhs_ptr = rhs->dyn_cast_region_svalue ())
1610 {
1611 tristate res = compare_initial_and_pointer (init_lhs, rhs_ptr);
1612 if (res.is_known ())
1613 return res;
1614 }
1615 if (const initial_svalue *init_rhs = rhs->dyn_cast_initial_svalue ())
1616 if (const region_svalue *lhs_ptr = lhs->dyn_cast_region_svalue ())
1617 {
1618 tristate res = compare_initial_and_pointer (init_rhs, lhs_ptr);
1619 if (res.is_known ())
1620 return res;
1621 }
1622
1623 if (const widening_svalue *widen_lhs = lhs->dyn_cast_widening_svalue ())
1624 if (tree rhs_cst = rhs->maybe_get_constant ())
1625 {
1626 tristate res = widen_lhs->eval_condition_without_cm (op, rhs_cst);
1627 if (res.is_known ())
1628 return res;
1629 }
1630
1631 return tristate::TS_UNKNOWN;
1632}
1633
1634/* Subroutine of region_model::eval_condition_without_cm, for rejecting
1635 equality of INIT_VAL(PARM) with &LOCAL. */
1636
1637tristate
1638region_model::compare_initial_and_pointer (const initial_svalue *init,
1639 const region_svalue *ptr) const
1640{
1641 const region *pointee = ptr->get_pointee ();
1642
1643 /* If we have a pointer to something within a stack frame, it can't be the
1644 initial value of a param. */
1645 if (pointee->maybe_get_frame_region ())
1646 {
1647 const region *reg = init->get_region ();
1648 if (tree reg_decl = reg->maybe_get_decl ())
1649 if (TREE_CODE (reg_decl) == SSA_NAME)
1650 {
1651 tree ssa_name = reg_decl;
1652 if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
1653 && SSA_NAME_VAR (ssa_name)
1654 && TREE_CODE (SSA_NAME_VAR (ssa_name)) == PARM_DECL)
1655 return tristate::TS_FALSE;
1656 }
757bf1df
DM
1657 }
1658
1659 return tristate::TS_UNKNOWN;
1660}
1661
1662/* Attempt to add the constraint "LHS OP RHS" to this region_model.
1663 If it is consistent with existing constraints, add it, and return true.
1664 Return false if it contradicts existing constraints.
1665 Use CTXT for reporting any diagnostics associated with the accesses. */
1666
1667bool
1668region_model::add_constraint (tree lhs, enum tree_code op, tree rhs,
1669 region_model_context *ctxt)
1670{
e978955d
DM
1671 /* For now, make no attempt to capture constraints on floating-point
1672 values. */
1673 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
1674 return true;
1675
808f4dfe
DM
1676 const svalue *lhs_sval = get_rvalue (lhs, ctxt);
1677 const svalue *rhs_sval = get_rvalue (rhs, ctxt);
757bf1df 1678
808f4dfe 1679 tristate t_cond = eval_condition (lhs_sval, op, rhs_sval);
757bf1df
DM
1680
1681 /* If we already have the condition, do nothing. */
1682 if (t_cond.is_true ())
1683 return true;
1684
1685 /* Reject a constraint that would contradict existing knowledge, as
1686 unsatisfiable. */
1687 if (t_cond.is_false ())
1688 return false;
1689
1690 /* Store the constraint. */
808f4dfe 1691 m_constraints->add_constraint (lhs_sval, op, rhs_sval);
757bf1df
DM
1692
1693 add_any_constraints_from_ssa_def_stmt (lhs, op, rhs, ctxt);
1694
1695 /* Notify the context, if any. This exists so that the state machines
1696 in a program_state can be notified about the condition, and so can
1697 set sm-state for e.g. unchecked->checked, both for cfg-edges, and
1698 when synthesizing constraints as above. */
1699 if (ctxt)
1700 ctxt->on_condition (lhs, op, rhs);
1701
1702 return true;
1703}
1704
1705/* Subroutine of region_model::add_constraint for handling optimized
1706 && and || conditionals.
1707
1708 If we have an SSA_NAME for a boolean compared against 0,
1709 look at anything implied by the def stmt and call add_constraint
1710 for it (which could recurse).
1711
1712 For example, if we have
1713 _1 = p_6 == 0B;
1714 _2 = p_8 == 0B
1715 _3 = _1 | _2
1716 and add the constraint
1717 (_3 == 0),
1718 then the def stmt for _3 implies that _1 and _2 are both false,
1719 and hence we can add the constraints:
1720 p_6 != 0B
1721 p_8 != 0B. */
1722
1723void
1724region_model::add_any_constraints_from_ssa_def_stmt (tree lhs,
1725 enum tree_code op,
1726 tree rhs,
1727 region_model_context *ctxt)
1728{
1729 if (TREE_CODE (lhs) != SSA_NAME)
1730 return;
1731
e516294a 1732 if (!zerop (rhs))
757bf1df
DM
1733 return;
1734
1735 if (op != NE_EXPR && op != EQ_EXPR)
1736 return;
1737
e516294a
DM
1738 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
1739 if (const gassign *assign = dyn_cast<gassign *> (def_stmt))
1740 add_any_constraints_from_gassign (op, rhs, assign, ctxt);
1741 else if (gcall *call = dyn_cast<gcall *> (def_stmt))
1742 add_any_constraints_from_gcall (op, rhs, call, ctxt);
1743}
1744
1745/* Add any constraints for an SSA_NAME defined by ASSIGN
1746 where the result OP RHS. */
1747
1748void
1749region_model::add_any_constraints_from_gassign (enum tree_code op,
1750 tree rhs,
1751 const gassign *assign,
1752 region_model_context *ctxt)
1753{
757bf1df
DM
1754 /* We have either
1755 - "LHS != false" (i.e. LHS is true), or
1756 - "LHS == false" (i.e. LHS is false). */
1757 bool is_true = op == NE_EXPR;
1758
757bf1df
DM
1759 enum tree_code rhs_code = gimple_assign_rhs_code (assign);
1760
1761 switch (rhs_code)
1762 {
1763 default:
1764 break;
e516294a
DM
1765
1766 case NOP_EXPR:
1767 {
1768 add_constraint (gimple_assign_rhs1 (assign), op, rhs, ctxt);
1769 }
1770 break;
1771
757bf1df
DM
1772 case BIT_AND_EXPR:
1773 {
1774 if (is_true)
1775 {
1776 /* ...and "LHS == (rhs1 & rhs2) i.e. "(rhs1 & rhs2)" is true
1777 then both rhs1 and rhs2 must be true. */
1778 tree rhs1 = gimple_assign_rhs1 (assign);
1779 tree rhs2 = gimple_assign_rhs2 (assign);
1780 add_constraint (rhs1, NE_EXPR, boolean_false_node, ctxt);
1781 add_constraint (rhs2, NE_EXPR, boolean_false_node, ctxt);
1782 }
1783 }
1784 break;
1785
1786 case BIT_IOR_EXPR:
1787 {
1788 if (!is_true)
1789 {
1790 /* ...and "LHS == (rhs1 | rhs2)
1791 i.e. "(rhs1 | rhs2)" is false
1792 then both rhs1 and rhs2 must be false. */
1793 tree rhs1 = gimple_assign_rhs1 (assign);
1794 tree rhs2 = gimple_assign_rhs2 (assign);
1795 add_constraint (rhs1, EQ_EXPR, boolean_false_node, ctxt);
1796 add_constraint (rhs2, EQ_EXPR, boolean_false_node, ctxt);
1797 }
1798 }
1799 break;
1800
1801 case EQ_EXPR:
1802 case NE_EXPR:
1803 {
1804 /* ...and "LHS == (rhs1 OP rhs2)"
1805 then rhs1 OP rhs2 must have the same logical value as LHS. */
1806 tree rhs1 = gimple_assign_rhs1 (assign);
1807 tree rhs2 = gimple_assign_rhs2 (assign);
1808 if (!is_true)
1809 rhs_code
1810 = invert_tree_comparison (rhs_code, false /* honor_nans */);
1811 add_constraint (rhs1, rhs_code, rhs2, ctxt);
1812 }
1813 break;
1814 }
1815}
1816
e516294a
DM
1817/* Add any constraints for an SSA_NAME defined by CALL
1818 where the result OP RHS. */
1819
1820void
1821region_model::add_any_constraints_from_gcall (enum tree_code op,
1822 tree rhs,
1823 const gcall *call,
1824 region_model_context *ctxt)
1825{
1826 if (gimple_call_builtin_p (call, BUILT_IN_EXPECT)
1827 || gimple_call_builtin_p (call, BUILT_IN_EXPECT_WITH_PROBABILITY)
1828 || gimple_call_internal_p (call, IFN_BUILTIN_EXPECT))
1829 {
1830 /* __builtin_expect's return value is its initial argument. */
1831 add_constraint (gimple_call_arg (call, 0), op, rhs, ctxt);
1832 }
1833}
1834
757bf1df
DM
1835/* Determine what is known about the condition "LHS OP RHS" within
1836 this model.
1837 Use CTXT for reporting any diagnostics associated with the accesses. */
1838
1839tristate
1840region_model::eval_condition (tree lhs,
1841 enum tree_code op,
1842 tree rhs,
1843 region_model_context *ctxt)
1844{
e978955d
DM
1845 /* For now, make no attempt to model constraints on floating-point
1846 values. */
1847 if (FLOAT_TYPE_P (TREE_TYPE (lhs)) || FLOAT_TYPE_P (TREE_TYPE (rhs)))
1848 return tristate::unknown ();
1849
757bf1df
DM
1850 return eval_condition (get_rvalue (lhs, ctxt), op, get_rvalue (rhs, ctxt));
1851}
1852
808f4dfe
DM
1853/* Attempt to return a path_var that represents SVAL, or return NULL_TREE.
1854 Use VISITED to prevent infinite mutual recursion with the overload for
1855 regions. */
757bf1df 1856
808f4dfe
DM
1857path_var
1858region_model::get_representative_path_var (const svalue *sval,
1859 svalue_set *visited) const
757bf1df 1860{
808f4dfe
DM
1861 if (sval == NULL)
1862 return path_var (NULL_TREE, 0);
757bf1df 1863
808f4dfe
DM
1864 if (const svalue *cast_sval = sval->maybe_undo_cast ())
1865 sval = cast_sval;
757bf1df 1866
808f4dfe
DM
1867 /* Prevent infinite recursion. */
1868 if (visited->contains (sval))
1869 return path_var (NULL_TREE, 0);
1870 visited->add (sval);
757bf1df 1871
808f4dfe
DM
1872 auto_vec<path_var> pvs;
1873 m_store.get_representative_path_vars (this, visited, sval, &pvs);
757bf1df 1874
808f4dfe
DM
1875 if (tree cst = sval->maybe_get_constant ())
1876 pvs.safe_push (path_var (cst, 0));
757bf1df 1877
90f7c300 1878 /* Handle string literals and various other pointers. */
808f4dfe
DM
1879 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
1880 {
1881 const region *reg = ptr_sval->get_pointee ();
1882 if (path_var pv = get_representative_path_var (reg, visited))
1883 return path_var (build1 (ADDR_EXPR,
1884 TREE_TYPE (sval->get_type ()),
1885 pv.m_tree),
1886 pv.m_stack_depth);
1887 }
1888
1889 /* If we have a sub_svalue, look for ways to represent the parent. */
1890 if (const sub_svalue *sub_sval = sval->dyn_cast_sub_svalue ())
90f7c300 1891 {
808f4dfe
DM
1892 const svalue *parent_sval = sub_sval->get_parent ();
1893 const region *subreg = sub_sval->get_subregion ();
1894 if (path_var parent_pv
1895 = get_representative_path_var (parent_sval, visited))
1896 if (const field_region *field_reg = subreg->dyn_cast_field_region ())
1897 return path_var (build3 (COMPONENT_REF,
1898 sval->get_type (),
1899 parent_pv.m_tree,
1900 field_reg->get_field (),
1901 NULL_TREE),
1902 parent_pv.m_stack_depth);
90f7c300
DM
1903 }
1904
808f4dfe
DM
1905 if (pvs.length () < 1)
1906 return path_var (NULL_TREE, 0);
1907
1908 pvs.qsort (readability_comparator);
1909 return pvs[0];
757bf1df
DM
1910}
1911
808f4dfe 1912/* Attempt to return a tree that represents SVAL, or return NULL_TREE. */
757bf1df 1913
808f4dfe
DM
1914tree
1915region_model::get_representative_tree (const svalue *sval) const
757bf1df 1916{
808f4dfe
DM
1917 svalue_set visited;
1918 return get_representative_path_var (sval, &visited).m_tree;
1919}
1920
1921/* Attempt to return a path_var that represents REG, or return
1922 the NULL path_var.
1923 For example, a region for a field of a local would be a path_var
1924 wrapping a COMPONENT_REF.
1925 Use VISITED to prevent infinite mutual recursion with the overload for
1926 svalues. */
757bf1df 1927
808f4dfe
DM
1928path_var
1929region_model::get_representative_path_var (const region *reg,
1930 svalue_set *visited) const
1931{
1932 switch (reg->get_kind ())
757bf1df 1933 {
808f4dfe
DM
1934 default:
1935 gcc_unreachable ();
e516294a 1936
808f4dfe
DM
1937 case RK_FRAME:
1938 case RK_GLOBALS:
1939 case RK_CODE:
1940 case RK_HEAP:
1941 case RK_STACK:
1942 case RK_ROOT:
1943 /* Regions that represent memory spaces are not expressible as trees. */
1944 return path_var (NULL_TREE, 0);
757bf1df 1945
808f4dfe 1946 case RK_FUNCTION:
884d9141 1947 {
808f4dfe
DM
1948 const function_region *function_reg
1949 = as_a <const function_region *> (reg);
1950 return path_var (function_reg->get_fndecl (), 0);
884d9141 1951 }
808f4dfe
DM
1952 case RK_LABEL:
1953 gcc_unreachable (); // TODO
90f7c300 1954
808f4dfe
DM
1955 case RK_SYMBOLIC:
1956 {
1957 const symbolic_region *symbolic_reg
1958 = as_a <const symbolic_region *> (reg);
1959 const svalue *pointer = symbolic_reg->get_pointer ();
1960 path_var pointer_pv = get_representative_path_var (pointer, visited);
1961 if (!pointer_pv)
1962 return path_var (NULL_TREE, 0);
1963 tree offset = build_int_cst (pointer->get_type (), 0);
1964 return path_var (build2 (MEM_REF,
1965 reg->get_type (),
1966 pointer_pv.m_tree,
1967 offset),
1968 pointer_pv.m_stack_depth);
1969 }
1970 case RK_DECL:
1971 {
1972 const decl_region *decl_reg = as_a <const decl_region *> (reg);
1973 return path_var (decl_reg->get_decl (), decl_reg->get_stack_depth ());
1974 }
1975 case RK_FIELD:
1976 {
1977 const field_region *field_reg = as_a <const field_region *> (reg);
1978 path_var parent_pv
1979 = get_representative_path_var (reg->get_parent_region (), visited);
1980 if (!parent_pv)
1981 return path_var (NULL_TREE, 0);
1982 return path_var (build3 (COMPONENT_REF,
1983 reg->get_type (),
1984 parent_pv.m_tree,
1985 field_reg->get_field (),
1986 NULL_TREE),
1987 parent_pv.m_stack_depth);
1988 }
757bf1df 1989
808f4dfe
DM
1990 case RK_ELEMENT:
1991 {
1992 const element_region *element_reg
1993 = as_a <const element_region *> (reg);
1994 path_var parent_pv
1995 = get_representative_path_var (reg->get_parent_region (), visited);
1996 if (!parent_pv)
1997 return path_var (NULL_TREE, 0);
1998 path_var index_pv
1999 = get_representative_path_var (element_reg->get_index (), visited);
2000 if (!index_pv)
2001 return path_var (NULL_TREE, 0);
2002 return path_var (build4 (ARRAY_REF,
2003 reg->get_type (),
2004 parent_pv.m_tree, index_pv.m_tree,
2005 NULL_TREE, NULL_TREE),
2006 parent_pv.m_stack_depth);
2007 }
757bf1df 2008
808f4dfe 2009 case RK_OFFSET:
757bf1df 2010 {
808f4dfe
DM
2011 const offset_region *offset_reg
2012 = as_a <const offset_region *> (reg);
2013 path_var parent_pv
2014 = get_representative_path_var (reg->get_parent_region (), visited);
2015 if (!parent_pv)
2016 return path_var (NULL_TREE, 0);
2017 path_var offset_pv
2018 = get_representative_path_var (offset_reg->get_byte_offset (),
2019 visited);
2020 if (!offset_pv)
2021 return path_var (NULL_TREE, 0);
2022 return path_var (build2 (MEM_REF,
2023 reg->get_type (),
2024 parent_pv.m_tree, offset_pv.m_tree),
2025 parent_pv.m_stack_depth);
757bf1df 2026 }
757bf1df 2027
808f4dfe
DM
2028 case RK_CAST:
2029 {
2030 path_var parent_pv
2031 = get_representative_path_var (reg->get_parent_region (), visited);
2032 if (!parent_pv)
2033 return path_var (NULL_TREE, 0);
2034 return path_var (build1 (NOP_EXPR,
2035 reg->get_type (),
2036 parent_pv.m_tree),
2037 parent_pv.m_stack_depth);
2038 }
757bf1df 2039
808f4dfe
DM
2040 case RK_HEAP_ALLOCATED:
2041 case RK_ALLOCA:
2042 /* No good way to express heap-allocated/alloca regions as trees. */
2043 return path_var (NULL_TREE, 0);
757bf1df 2044
808f4dfe
DM
2045 case RK_STRING:
2046 {
2047 const string_region *string_reg = as_a <const string_region *> (reg);
2048 return path_var (string_reg->get_string_cst (), 0);
2049 }
757bf1df 2050
808f4dfe
DM
2051 case RK_UNKNOWN:
2052 return path_var (NULL_TREE, 0);
2053 }
757bf1df
DM
2054}
2055
2056/* Update this model for any phis in SNODE, assuming we came from
2057 LAST_CFG_SUPEREDGE. */
2058
2059void
2060region_model::update_for_phis (const supernode *snode,
2061 const cfg_superedge *last_cfg_superedge,
2062 region_model_context *ctxt)
2063{
2064 gcc_assert (last_cfg_superedge);
2065
2066 for (gphi_iterator gpi = const_cast<supernode *>(snode)->start_phis ();
2067 !gsi_end_p (gpi); gsi_next (&gpi))
2068 {
2069 gphi *phi = gpi.phi ();
2070
2071 tree src = last_cfg_superedge->get_phi_arg (phi);
2072 tree lhs = gimple_phi_result (phi);
2073
2074 /* Update next_state based on phi. */
808f4dfe 2075 handle_phi (phi, lhs, src, ctxt);
757bf1df
DM
2076 }
2077}
2078
2079/* Attempt to update this model for taking EDGE (where the last statement
2080 was LAST_STMT), returning true if the edge can be taken, false
2081 otherwise.
2082
2083 For CFG superedges where LAST_STMT is a conditional or a switch
2084 statement, attempt to add the relevant conditions for EDGE to this
2085 model, returning true if they are feasible, or false if they are
2086 impossible.
2087
2088 For call superedges, push frame information and store arguments
2089 into parameters.
2090
2091 For return superedges, pop frame information and store return
2092 values into any lhs.
2093
2094 Rejection of call/return superedges happens elsewhere, in
2095 program_point::on_edge (i.e. based on program point, rather
2096 than program state). */
2097
2098bool
2099region_model::maybe_update_for_edge (const superedge &edge,
2100 const gimple *last_stmt,
2101 region_model_context *ctxt)
2102{
2103 /* Handle frame updates for interprocedural edges. */
2104 switch (edge.m_kind)
2105 {
2106 default:
2107 break;
2108
2109 case SUPEREDGE_CALL:
2110 {
2111 const call_superedge *call_edge = as_a <const call_superedge *> (&edge);
2112 update_for_call_superedge (*call_edge, ctxt);
2113 }
2114 break;
2115
2116 case SUPEREDGE_RETURN:
2117 {
2118 const return_superedge *return_edge
2119 = as_a <const return_superedge *> (&edge);
2120 update_for_return_superedge (*return_edge, ctxt);
2121 }
2122 break;
2123
2124 case SUPEREDGE_INTRAPROCEDURAL_CALL:
2125 {
2126 const callgraph_superedge *cg_sedge
2127 = as_a <const callgraph_superedge *> (&edge);
2128 update_for_call_summary (*cg_sedge, ctxt);
2129 }
2130 break;
2131 }
2132
2133 if (last_stmt == NULL)
2134 return true;
2135
2136 /* Apply any constraints for conditionals/switch statements. */
2137
2138 if (const gcond *cond_stmt = dyn_cast <const gcond *> (last_stmt))
2139 {
2140 const cfg_superedge *cfg_sedge = as_a <const cfg_superedge *> (&edge);
2141 return apply_constraints_for_gcond (*cfg_sedge, cond_stmt, ctxt);
2142 }
2143
2144 if (const gswitch *switch_stmt = dyn_cast <const gswitch *> (last_stmt))
2145 {
2146 const switch_cfg_superedge *switch_sedge
2147 = as_a <const switch_cfg_superedge *> (&edge);
2148 return apply_constraints_for_gswitch (*switch_sedge, switch_stmt, ctxt);
2149 }
2150
2151 return true;
2152}
2153
2154/* Push a new frame_region on to the stack region.
2155 Populate the frame_region with child regions for the function call's
2156 parameters, using values from the arguments at the callsite in the
2157 caller's frame. */
2158
2159void
2160region_model::update_for_call_superedge (const call_superedge &call_edge,
2161 region_model_context *ctxt)
2162{
808f4dfe 2163 /* Build a vec of argument svalues, using the current top
757bf1df
DM
2164 frame for resolving tree expressions. */
2165 const gcall *call_stmt = call_edge.get_call_stmt ();
808f4dfe 2166 auto_vec<const svalue *> arg_svals (gimple_call_num_args (call_stmt));
757bf1df
DM
2167
2168 for (unsigned i = 0; i < gimple_call_num_args (call_stmt); i++)
2169 {
2170 tree arg = gimple_call_arg (call_stmt, i);
808f4dfe 2171 arg_svals.quick_push (get_rvalue (arg, ctxt));
757bf1df
DM
2172 }
2173
808f4dfe 2174 push_frame (call_edge.get_callee_function (), &arg_svals, ctxt);
757bf1df
DM
2175}
2176
a96f1c38
DM
2177/* Pop the top-most frame_region from the stack, and copy the return
2178 region's values (if any) into the region for the lvalue of the LHS of
757bf1df 2179 the call (if any). */
757bf1df
DM
2180void
2181region_model::update_for_return_superedge (const return_superedge &return_edge,
2182 region_model_context *ctxt)
2183{
a96f1c38 2184 /* Get the region for the result of the call, within the caller frame. */
808f4dfe 2185 const region *result_dst_reg = NULL;
757bf1df
DM
2186 const gcall *call_stmt = return_edge.get_call_stmt ();
2187 tree lhs = gimple_call_lhs (call_stmt);
2188 if (lhs)
a96f1c38
DM
2189 {
2190 /* Normally we access the top-level frame, which is:
808f4dfe 2191 path_var (expr, get_stack_depth () - 1)
a96f1c38 2192 whereas here we need the caller frame, hence "- 2" here. */
808f4dfe
DM
2193 gcc_assert (get_stack_depth () >= 2);
2194 result_dst_reg = get_lvalue (path_var (lhs, get_stack_depth () - 2),
a96f1c38
DM
2195 ctxt);
2196 }
2197
808f4dfe 2198 pop_frame (result_dst_reg, NULL, ctxt);
757bf1df
DM
2199}
2200
2201/* Update this region_model with a summary of the effect of calling
2202 and returning from CG_SEDGE.
2203
2204 TODO: Currently this is extremely simplistic: we merely set the
2205 return value to "unknown". A proper implementation would e.g. update
2206 sm-state, and presumably be reworked to support multiple outcomes. */
2207
2208void
2209region_model::update_for_call_summary (const callgraph_superedge &cg_sedge,
2210 region_model_context *ctxt)
2211{
2212 /* For now, set any return value to "unknown". */
2213 const gcall *call_stmt = cg_sedge.get_call_stmt ();
2214 tree lhs = gimple_call_lhs (call_stmt);
2215 if (lhs)
808f4dfe 2216 mark_region_as_unknown (get_lvalue (lhs, ctxt));
757bf1df
DM
2217
2218 // TODO: actually implement some kind of summary here
2219}
2220
2221/* Given a true or false edge guarded by conditional statement COND_STMT,
2222 determine appropriate constraints for the edge to be taken.
2223
2224 If they are feasible, add the constraints and return true.
2225
2226 Return false if the constraints contradict existing knowledge
2227 (and so the edge should not be taken). */
2228
2229bool
2230region_model::apply_constraints_for_gcond (const cfg_superedge &sedge,
2231 const gcond *cond_stmt,
2232 region_model_context *ctxt)
2233{
2234 ::edge cfg_edge = sedge.get_cfg_edge ();
2235 gcc_assert (cfg_edge != NULL);
2236 gcc_assert (cfg_edge->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE));
2237
2238 enum tree_code op = gimple_cond_code (cond_stmt);
2239 tree lhs = gimple_cond_lhs (cond_stmt);
2240 tree rhs = gimple_cond_rhs (cond_stmt);
2241 if (cfg_edge->flags & EDGE_FALSE_VALUE)
2242 op = invert_tree_comparison (op, false /* honor_nans */);
2243 return add_constraint (lhs, op, rhs, ctxt);
2244}
2245
2246/* Given an EDGE guarded by SWITCH_STMT, determine appropriate constraints
2247 for the edge to be taken.
2248
2249 If they are feasible, add the constraints and return true.
2250
2251 Return false if the constraints contradict existing knowledge
2252 (and so the edge should not be taken). */
2253
2254bool
2255region_model::apply_constraints_for_gswitch (const switch_cfg_superedge &edge,
2256 const gswitch *switch_stmt,
2257 region_model_context *ctxt)
2258{
2259 tree index = gimple_switch_index (switch_stmt);
2260 tree case_label = edge.get_case_label ();
2261 gcc_assert (TREE_CODE (case_label) == CASE_LABEL_EXPR);
2262 tree lower_bound = CASE_LOW (case_label);
2263 tree upper_bound = CASE_HIGH (case_label);
2264 if (lower_bound)
2265 {
2266 if (upper_bound)
2267 {
2268 /* Range. */
2269 if (!add_constraint (index, GE_EXPR, lower_bound, ctxt))
2270 return false;
2271 return add_constraint (index, LE_EXPR, upper_bound, ctxt);
2272 }
2273 else
2274 /* Single-value. */
2275 return add_constraint (index, EQ_EXPR, lower_bound, ctxt);
2276 }
2277 else
2278 {
2279 /* The default case.
2280 Add exclusions based on the other cases. */
2281 for (unsigned other_idx = 1;
2282 other_idx < gimple_switch_num_labels (switch_stmt);
2283 other_idx++)
2284 {
2285 tree other_label = gimple_switch_label (switch_stmt,
2286 other_idx);
2287 tree other_lower_bound = CASE_LOW (other_label);
2288 tree other_upper_bound = CASE_HIGH (other_label);
2289 gcc_assert (other_lower_bound);
2290 if (other_upper_bound)
2291 {
2292 /* Exclude this range-valued case.
2293 For now, we just exclude the boundary values.
2294 TODO: exclude the values within the region. */
2295 if (!add_constraint (index, NE_EXPR, other_lower_bound, ctxt))
2296 return false;
2297 if (!add_constraint (index, NE_EXPR, other_upper_bound, ctxt))
2298 return false;
2299 }
2300 else
2301 /* Exclude this single-valued case. */
2302 if (!add_constraint (index, NE_EXPR, other_lower_bound, ctxt))
2303 return false;
2304 }
2305 return true;
2306 }
2307}
2308
808f4dfe
DM
2309/* For use with push_frame when handling a top-level call within the analysis.
2310 PARAM has a defined but unknown initial value.
2311 Anything it points to has escaped, since the calling context "knows"
2312 the pointer, and thus calls to unknown functions could read/write into
2313 the region. */
757bf1df
DM
2314
2315void
808f4dfe 2316region_model::on_top_level_param (tree param,
3a25f345 2317 region_model_context *ctxt)
757bf1df 2318{
808f4dfe 2319 if (POINTER_TYPE_P (TREE_TYPE (param)))
5eae0ac7 2320 {
808f4dfe
DM
2321 const region *param_reg = get_lvalue (param, ctxt);
2322 const svalue *init_ptr_sval
2323 = m_mgr->get_or_create_initial_value (param_reg);
2324 const region *pointee_reg = m_mgr->get_symbolic_region (init_ptr_sval);
2325 m_store.mark_as_escaped (pointee_reg);
5eae0ac7 2326 }
757bf1df
DM
2327}
2328
808f4dfe
DM
2329/* Update this region_model to reflect pushing a frame onto the stack
2330 for a call to FUN.
757bf1df 2331
808f4dfe
DM
2332 If ARG_SVALS is non-NULL, use it to populate the parameters
2333 in the new frame.
2334 Otherwise, the params have their initial_svalues.
757bf1df 2335
808f4dfe 2336 Return the frame_region for the new frame. */
757bf1df 2337
808f4dfe
DM
2338const region *
2339region_model::push_frame (function *fun, const vec<const svalue *> *arg_svals,
2340 region_model_context *ctxt)
757bf1df 2341{
808f4dfe
DM
2342 m_current_frame = m_mgr->get_frame_region (m_current_frame, fun);
2343 if (arg_svals)
757bf1df 2344 {
808f4dfe
DM
2345 /* Arguments supplied from a caller frame. */
2346 tree fndecl = fun->decl;
2347 unsigned idx = 0;
2348 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
2349 iter_parm = DECL_CHAIN (iter_parm), ++idx)
757bf1df 2350 {
808f4dfe
DM
2351 /* If there's a mismatching declaration, the call stmt might
2352 not have enough args. Handle this case by leaving the
2353 rest of the params as uninitialized. */
2354 if (idx >= arg_svals->length ())
2355 break;
2356 const svalue *arg_sval = (*arg_svals)[idx];
2357 const region *parm_reg = get_lvalue (iter_parm, ctxt);
2358 set_value (parm_reg, arg_sval, ctxt);
757bf1df 2359
808f4dfe
DM
2360 /* Also do it for default SSA name (sharing the same value). */
2361 tree parm_default_ssa = ssa_default_def (fun, iter_parm);
2362 if (parm_default_ssa)
2363 {
2364 const region *defssa_reg = get_lvalue (parm_default_ssa, ctxt);
2365 set_value (defssa_reg, arg_sval, ctxt);
2366 }
757bf1df 2367 }
757bf1df 2368 }
808f4dfe 2369 else
757bf1df 2370 {
808f4dfe
DM
2371 /* Otherwise we have a top-level call within the analysis. The params
2372 have defined but unknown initial values.
2373 Anything they point to has escaped. */
2374 tree fndecl = fun->decl;
2375 for (tree iter_parm = DECL_ARGUMENTS (fndecl); iter_parm;
2376 iter_parm = DECL_CHAIN (iter_parm))
757bf1df 2377 {
808f4dfe
DM
2378 on_top_level_param (iter_parm, ctxt);
2379 tree parm_default_ssa = ssa_default_def (fun, iter_parm);
2380 if (parm_default_ssa)
2381 on_top_level_param (parm_default_ssa, ctxt);
757bf1df
DM
2382 }
2383 }
757bf1df 2384
808f4dfe 2385 return m_current_frame;
757bf1df
DM
2386}
2387
808f4dfe
DM
2388/* Get the function of the top-most frame in this region_model's stack.
2389 There must be such a frame. */
757bf1df 2390
808f4dfe
DM
2391function *
2392region_model::get_current_function () const
757bf1df 2393{
808f4dfe
DM
2394 const frame_region *frame = get_current_frame ();
2395 gcc_assert (frame);
2396 return frame->get_function ();
757bf1df
DM
2397}
2398
808f4dfe 2399/* Pop the topmost frame_region from this region_model's stack;
757bf1df 2400
808f4dfe
DM
2401 If RESULT_DST_REG is non-null, copy any return value from the frame
2402 into RESULT_DST_REG's region.
2403 If OUT_RESULT is non-null, copy any return value from the frame
2404 into *OUT_RESULT.
757bf1df 2405
808f4dfe
DM
2406 Purge the frame region and all its descendent regions.
2407 Convert any pointers that point into such regions into
2408 POISON_KIND_POPPED_STACK svalues. */
757bf1df 2409
808f4dfe
DM
2410void
2411region_model::pop_frame (const region *result_dst_reg,
2412 const svalue **out_result,
2413 region_model_context *ctxt)
2414{
2415 gcc_assert (m_current_frame);
757bf1df 2416
808f4dfe
DM
2417 /* Evaluate the result, within the callee frame. */
2418 const frame_region *frame_reg = m_current_frame;
2419 tree fndecl = m_current_frame->get_function ()->decl;
2420 tree result = DECL_RESULT (fndecl);
2421 if (result && TREE_TYPE (result) != void_type_node)
2422 {
2423 if (result_dst_reg)
2424 {
2425 /* Copy the result to RESULT_DST_REG. */
2426 copy_region (result_dst_reg,
2427 get_lvalue (result, ctxt),
2428 ctxt);
2429 }
2430 if (out_result)
2431 *out_result = get_rvalue (result, ctxt);
2432 }
757bf1df 2433
808f4dfe
DM
2434 /* Pop the frame. */
2435 m_current_frame = m_current_frame->get_calling_frame ();
757bf1df 2436
808f4dfe 2437 unbind_region_and_descendents (frame_reg,POISON_KIND_POPPED_STACK);
757bf1df
DM
2438}
2439
808f4dfe 2440/* Get the number of frames in this region_model's stack. */
757bf1df 2441
808f4dfe
DM
2442int
2443region_model::get_stack_depth () const
757bf1df 2444{
808f4dfe
DM
2445 const frame_region *frame = get_current_frame ();
2446 if (frame)
2447 return frame->get_stack_depth ();
2448 else
2449 return 0;
757bf1df
DM
2450}
2451
808f4dfe
DM
2452/* Get the frame_region with the given index within the stack.
2453 The frame_region must exist. */
757bf1df 2454
808f4dfe
DM
2455const frame_region *
2456region_model::get_frame_at_index (int index) const
757bf1df 2457{
808f4dfe
DM
2458 const frame_region *frame = get_current_frame ();
2459 gcc_assert (frame);
2460 gcc_assert (index >= 0);
2461 gcc_assert (index <= frame->get_index ());
2462 while (index != frame->get_index ())
2463 {
2464 frame = frame->get_calling_frame ();
2465 gcc_assert (frame);
2466 }
2467 return frame;
757bf1df
DM
2468}
2469
808f4dfe
DM
2470/* Unbind svalues for any regions in REG and below.
2471 Find any pointers to such regions; convert them to
2472 poisoned values of kind PKIND. */
757bf1df 2473
808f4dfe
DM
2474void
2475region_model::unbind_region_and_descendents (const region *reg,
2476 enum poison_kind pkind)
757bf1df 2477{
808f4dfe
DM
2478 /* Gather a set of base regions to be unbound. */
2479 hash_set<const region *> base_regs;
2480 for (store::cluster_map_t::iterator iter = m_store.begin ();
2481 iter != m_store.end (); ++iter)
757bf1df 2482 {
808f4dfe
DM
2483 const region *iter_base_reg = (*iter).first;
2484 if (iter_base_reg->descendent_of_p (reg))
2485 base_regs.add (iter_base_reg);
757bf1df 2486 }
808f4dfe
DM
2487 for (hash_set<const region *>::iterator iter = base_regs.begin ();
2488 iter != base_regs.end (); ++iter)
2489 m_store.purge_cluster (*iter);
757bf1df 2490
808f4dfe
DM
2491 /* Find any pointers to REG or its descendents; convert to poisoned. */
2492 poison_any_pointers_to_descendents (reg, pkind);
757bf1df
DM
2493}
2494
808f4dfe
DM
2495/* Implementation of BindingVisitor.
2496 Update the bound svalues for regions below REG to use poisoned
2497 values instead. */
757bf1df 2498
808f4dfe 2499struct bad_pointer_finder
757bf1df 2500{
808f4dfe
DM
2501 bad_pointer_finder (const region *reg, enum poison_kind pkind,
2502 region_model_manager *mgr)
2503 : m_reg (reg), m_pkind (pkind), m_mgr (mgr), m_count (0)
2504 {}
757bf1df 2505
808f4dfe
DM
2506 void on_binding (const binding_key *, const svalue *&sval)
2507 {
2508 if (const region_svalue *ptr_sval = sval->dyn_cast_region_svalue ())
2509 {
2510 const region *ptr_dst = ptr_sval->get_pointee ();
2511 /* Poison ptrs to descendents of REG, but not to REG itself,
2512 otherwise double-free detection doesn't work (since sm-state
2513 for "free" is stored on the original ptr svalue). */
2514 if (ptr_dst->descendent_of_p (m_reg)
2515 && ptr_dst != m_reg)
2516 {
2517 sval = m_mgr->get_or_create_poisoned_svalue (m_pkind,
2518 sval->get_type ());
2519 ++m_count;
2520 }
2521 }
2522 }
757bf1df 2523
808f4dfe
DM
2524 const region *m_reg;
2525 enum poison_kind m_pkind;
2526 region_model_manager *const m_mgr;
2527 int m_count;
2528};
757bf1df 2529
808f4dfe
DM
2530/* Find any pointers to REG or its descendents; convert them to
2531 poisoned values of kind PKIND.
2532 Return the number of pointers that were poisoned. */
757bf1df 2533
808f4dfe
DM
2534int
2535region_model::poison_any_pointers_to_descendents (const region *reg,
2536 enum poison_kind pkind)
2537{
2538 bad_pointer_finder bv (reg, pkind, m_mgr);
2539 m_store.for_each_binding (bv);
2540 return bv.m_count;
757bf1df
DM
2541}
2542
808f4dfe
DM
2543/* Attempt to merge THIS with OTHER_MODEL, writing the result
2544 to OUT_MODEL. Use POINT to distinguish values created as a
2545 result of merging. */
757bf1df 2546
808f4dfe
DM
2547bool
2548region_model::can_merge_with_p (const region_model &other_model,
2549 const program_point &point,
2550 region_model *out_model) const
757bf1df 2551{
808f4dfe
DM
2552 gcc_assert (out_model);
2553 gcc_assert (m_mgr == other_model.m_mgr);
2554 gcc_assert (m_mgr == out_model->m_mgr);
757bf1df 2555
808f4dfe
DM
2556 if (m_current_frame != other_model.m_current_frame)
2557 return false;
2558 out_model->m_current_frame = m_current_frame;
757bf1df 2559
808f4dfe 2560 model_merger m (this, &other_model, point, out_model);
757bf1df 2561
808f4dfe
DM
2562 if (!store::can_merge_p (&m_store, &other_model.m_store,
2563 &out_model->m_store, m_mgr->get_store_manager (),
2564 &m))
2565 return false;
2566
2567 /* Merge constraints. */
2568 constraint_manager::merge (*m_constraints,
2569 *other_model.m_constraints,
2570 out_model->m_constraints,
2571 m);
757bf1df 2572
808f4dfe 2573 return true;
757bf1df
DM
2574}
2575
2576/* Attempt to get the fndecl used at CALL, if known, or NULL_TREE
2577 otherwise. */
2578
2579tree
2580region_model::get_fndecl_for_call (const gcall *call,
2581 region_model_context *ctxt)
2582{
2583 tree fn_ptr = gimple_call_fn (call);
2584 if (fn_ptr == NULL_TREE)
2585 return NULL_TREE;
808f4dfe
DM
2586 const svalue *fn_ptr_sval = get_rvalue (fn_ptr, ctxt);
2587 if (const region_svalue *fn_ptr_ptr
2588 = fn_ptr_sval->dyn_cast_region_svalue ())
757bf1df 2589 {
808f4dfe
DM
2590 const region *reg = fn_ptr_ptr->get_pointee ();
2591 if (const function_region *fn_reg = reg->dyn_cast_function_region ())
757bf1df 2592 {
808f4dfe 2593 tree fn_decl = fn_reg->get_fndecl ();
0ba70d1b
DM
2594 cgraph_node *node = cgraph_node::get (fn_decl);
2595 if (!node)
2596 return NULL_TREE;
2597 const cgraph_node *ultimate_node = node->ultimate_alias_target ();
91f993b7
DM
2598 if (ultimate_node)
2599 return ultimate_node->decl;
757bf1df
DM
2600 }
2601 }
2602
2603 return NULL_TREE;
2604}
2605
808f4dfe 2606/* Would be much simpler to use a lambda here, if it were supported. */
757bf1df 2607
808f4dfe 2608struct append_ssa_names_cb_data
757bf1df 2609{
808f4dfe
DM
2610 const region_model *model;
2611 auto_vec<const decl_region *> *out;
2612};
757bf1df 2613
808f4dfe
DM
2614/* Populate *OUT with all decl_regions for SSA names in the current
2615 frame that have clusters within the store. */
757bf1df
DM
2616
2617void
808f4dfe
DM
2618region_model::
2619get_ssa_name_regions_for_current_frame (auto_vec<const decl_region *> *out)
2620 const
757bf1df 2621{
808f4dfe
DM
2622 append_ssa_names_cb_data data;
2623 data.model = this;
2624 data.out = out;
2625 m_store.for_each_cluster (append_ssa_names_cb, &data);
757bf1df
DM
2626}
2627
808f4dfe 2628/* Implementation detail of get_ssa_name_regions_for_current_frame. */
757bf1df 2629
808f4dfe
DM
2630void
2631region_model::append_ssa_names_cb (const region *base_reg,
2632 append_ssa_names_cb_data *cb_data)
757bf1df 2633{
808f4dfe
DM
2634 if (base_reg->get_parent_region () != cb_data->model->m_current_frame)
2635 return;
2636 if (const decl_region *decl_reg = base_reg->dyn_cast_decl_region ())
757bf1df 2637 {
808f4dfe
DM
2638 if (TREE_CODE (decl_reg->get_decl ()) == SSA_NAME)
2639 cb_data->out->safe_push (decl_reg);
757bf1df
DM
2640 }
2641}
2642
808f4dfe 2643/* Return a new region describing a heap-allocated block of memory. */
757bf1df 2644
808f4dfe
DM
2645const region *
2646region_model::create_region_for_heap_alloc (const svalue *size_in_bytes)
757bf1df 2647{
808f4dfe
DM
2648 const region *reg = m_mgr->create_region_for_heap_alloc ();
2649 record_dynamic_extents (reg, size_in_bytes);
2650 return reg;
757bf1df
DM
2651}
2652
808f4dfe
DM
2653/* Return a new region describing a block of memory allocated within the
2654 current frame. */
757bf1df 2655
808f4dfe
DM
2656const region *
2657region_model::create_region_for_alloca (const svalue *size_in_bytes)
757bf1df 2658{
808f4dfe
DM
2659 const region *reg = m_mgr->create_region_for_alloca (m_current_frame);
2660 record_dynamic_extents (reg, size_in_bytes);
2661 return reg;
757bf1df
DM
2662}
2663
808f4dfe
DM
2664/* Placeholder hook for recording that the size of REG is SIZE_IN_BYTES.
2665 Currently does nothing. */
757bf1df
DM
2666
2667void
808f4dfe
DM
2668region_model::
2669record_dynamic_extents (const region *reg ATTRIBUTE_UNUSED,
2670 const svalue *size_in_bytes ATTRIBUTE_UNUSED)
757bf1df
DM
2671{
2672}
2673
808f4dfe 2674/* struct model_merger. */
757bf1df 2675
808f4dfe 2676/* Dump a multiline representation of this merger to PP. */
757bf1df
DM
2677
2678void
808f4dfe 2679model_merger::dump_to_pp (pretty_printer *pp, bool simple) const
757bf1df 2680{
808f4dfe
DM
2681 pp_string (pp, "model A:");
2682 pp_newline (pp);
2683 m_model_a->dump_to_pp (pp, simple, true);
2684 pp_newline (pp);
757bf1df 2685
808f4dfe 2686 pp_string (pp, "model B:");
757bf1df 2687 pp_newline (pp);
808f4dfe 2688 m_model_b->dump_to_pp (pp, simple, true);
757bf1df
DM
2689 pp_newline (pp);
2690
808f4dfe 2691 pp_string (pp, "merged model:");
757bf1df 2692 pp_newline (pp);
808f4dfe 2693 m_merged_model->dump_to_pp (pp, simple, true);
757bf1df
DM
2694 pp_newline (pp);
2695}
2696
808f4dfe 2697/* Dump a multiline representation of this merger to FILE. */
757bf1df
DM
2698
2699void
808f4dfe 2700model_merger::dump (FILE *fp, bool simple) const
757bf1df
DM
2701{
2702 pretty_printer pp;
2703 pp_format_decoder (&pp) = default_tree_printer;
2704 pp_show_color (&pp) = pp_show_color (global_dc->printer);
2705 pp.buffer->stream = fp;
808f4dfe 2706 dump_to_pp (&pp, simple);
757bf1df
DM
2707 pp_flush (&pp);
2708}
2709
808f4dfe 2710/* Dump a multiline representation of this merger to stderr. */
757bf1df
DM
2711
2712DEBUG_FUNCTION void
808f4dfe 2713model_merger::dump (bool simple) const
757bf1df 2714{
808f4dfe 2715 dump (stderr, simple);
757bf1df
DM
2716}
2717
75038aa6
DM
2718} // namespace ana
2719
808f4dfe 2720/* Dump RMODEL fully to stderr (i.e. without summarization). */
757bf1df 2721
808f4dfe
DM
2722DEBUG_FUNCTION void
2723debug (const region_model &rmodel)
757bf1df 2724{
808f4dfe 2725 rmodel.dump (false);
757bf1df
DM
2726}
2727
808f4dfe 2728/* class engine. */
757bf1df 2729
808f4dfe 2730/* Dump the managed objects by class to LOGGER, and the per-class totals. */
757bf1df 2731
808f4dfe
DM
2732void
2733engine::log_stats (logger *logger) const
757bf1df 2734{
808f4dfe 2735 m_mgr.log_stats (logger, true);
757bf1df
DM
2736}
2737
75038aa6
DM
2738namespace ana {
2739
757bf1df
DM
2740#if CHECKING_P
2741
2742namespace selftest {
2743
8c08c983
DM
2744/* Build a constant tree of the given type from STR. */
2745
2746static tree
2747build_real_cst_from_string (tree type, const char *str)
2748{
2749 REAL_VALUE_TYPE real;
2750 real_from_string (&real, str);
2751 return build_real (type, real);
2752}
2753
2754/* Append various "interesting" constants to OUT (e.g. NaN). */
2755
2756static void
2757append_interesting_constants (auto_vec<tree> *out)
2758{
2759 out->safe_push (build_int_cst (integer_type_node, 0));
2760 out->safe_push (build_int_cst (integer_type_node, 42));
2761 out->safe_push (build_int_cst (unsigned_type_node, 0));
2762 out->safe_push (build_int_cst (unsigned_type_node, 42));
2763 out->safe_push (build_real_cst_from_string (float_type_node, "QNaN"));
2764 out->safe_push (build_real_cst_from_string (float_type_node, "-QNaN"));
2765 out->safe_push (build_real_cst_from_string (float_type_node, "SNaN"));
2766 out->safe_push (build_real_cst_from_string (float_type_node, "-SNaN"));
2767 out->safe_push (build_real_cst_from_string (float_type_node, "0.0"));
2768 out->safe_push (build_real_cst_from_string (float_type_node, "-0.0"));
2769 out->safe_push (build_real_cst_from_string (float_type_node, "Inf"));
2770 out->safe_push (build_real_cst_from_string (float_type_node, "-Inf"));
2771}
2772
2773/* Verify that tree_cmp is a well-behaved comparator for qsort, even
2774 if the underlying constants aren't comparable. */
2775
2776static void
2777test_tree_cmp_on_constants ()
2778{
2779 auto_vec<tree> csts;
2780 append_interesting_constants (&csts);
2781
2782 /* Try sorting every triple. */
2783 const unsigned num = csts.length ();
2784 for (unsigned i = 0; i < num; i++)
2785 for (unsigned j = 0; j < num; j++)
2786 for (unsigned k = 0; k < num; k++)
2787 {
2788 auto_vec<tree> v (3);
2789 v.quick_push (csts[i]);
2790 v.quick_push (csts[j]);
2791 v.quick_push (csts[k]);
2792 v.qsort (tree_cmp);
2793 }
2794}
2795
757bf1df
DM
2796/* Implementation detail of the ASSERT_CONDITION_* macros. */
2797
808f4dfe
DM
2798void
2799assert_condition (const location &loc,
2800 region_model &model,
2801 const svalue *lhs, tree_code op, const svalue *rhs,
2802 tristate expected)
2803{
2804 tristate actual = model.eval_condition (lhs, op, rhs);
2805 ASSERT_EQ_AT (loc, actual, expected);
2806}
2807
2808/* Implementation detail of the ASSERT_CONDITION_* macros. */
2809
757bf1df
DM
2810void
2811assert_condition (const location &loc,
2812 region_model &model,
2813 tree lhs, tree_code op, tree rhs,
2814 tristate expected)
2815{
2816 tristate actual = model.eval_condition (lhs, op, rhs, NULL);
2817 ASSERT_EQ_AT (loc, actual, expected);
2818}
2819
90f7c300
DM
2820/* Implementation detail of ASSERT_DUMP_TREE_EQ. */
2821
2822static void
2823assert_dump_tree_eq (const location &loc, tree t, const char *expected)
2824{
2825 auto_fix_quotes sentinel;
2826 pretty_printer pp;
2827 pp_format_decoder (&pp) = default_tree_printer;
2828 dump_tree (&pp, t);
2829 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
2830}
2831
2832/* Assert that dump_tree (T) is EXPECTED. */
2833
2834#define ASSERT_DUMP_TREE_EQ(T, EXPECTED) \
2835 SELFTEST_BEGIN_STMT \
2836 assert_dump_tree_eq ((SELFTEST_LOCATION), (T), (EXPECTED)); \
2837 SELFTEST_END_STMT
2838
757bf1df
DM
2839/* Implementation detail of ASSERT_DUMP_EQ. */
2840
2841static void
2842assert_dump_eq (const location &loc,
2843 const region_model &model,
2844 bool summarize,
2845 const char *expected)
2846{
2847 auto_fix_quotes sentinel;
2848 pretty_printer pp;
2849 pp_format_decoder (&pp) = default_tree_printer;
808f4dfe
DM
2850
2851 model.dump_to_pp (&pp, summarize, true);
757bf1df
DM
2852 ASSERT_STREQ_AT (loc, pp_formatted_text (&pp), expected);
2853}
2854
2855/* Assert that MODEL.dump_to_pp (SUMMARIZE) is EXPECTED. */
2856
2857#define ASSERT_DUMP_EQ(MODEL, SUMMARIZE, EXPECTED) \
2858 SELFTEST_BEGIN_STMT \
2859 assert_dump_eq ((SELFTEST_LOCATION), (MODEL), (SUMMARIZE), (EXPECTED)); \
2860 SELFTEST_END_STMT
2861
2862/* Smoketest for region_model::dump_to_pp. */
2863
2864static void
2865test_dump ()
2866{
808f4dfe
DM
2867 region_model_manager mgr;
2868 region_model model (&mgr);
757bf1df
DM
2869
2870 ASSERT_DUMP_EQ (model, false,
808f4dfe
DM
2871 "stack depth: 0\n"
2872 "m_called_unknown_fn: FALSE\n"
2873 "constraint_manager:\n"
2874 " equiv classes:\n"
2875 " constraints:\n");
2876 ASSERT_DUMP_EQ (model, true,
2877 "stack depth: 0\n"
2878 "m_called_unknown_fn: FALSE\n"
2879 "constraint_manager:\n"
757bf1df
DM
2880 " equiv classes:\n"
2881 " constraints:\n");
757bf1df
DM
2882}
2883
884d9141
DM
2884/* Helper function for selftests. Create a struct or union type named NAME,
2885 with the fields given by the FIELD_DECLS in FIELDS.
2886 If IS_STRUCT is true create a RECORD_TYPE (aka a struct), otherwise
2887 create a UNION_TYPE. */
2888
2889static tree
2890make_test_compound_type (const char *name, bool is_struct,
2891 const auto_vec<tree> *fields)
2892{
2893 tree t = make_node (is_struct ? RECORD_TYPE : UNION_TYPE);
2894 TYPE_NAME (t) = get_identifier (name);
2895 TYPE_SIZE (t) = 0;
2896
2897 tree fieldlist = NULL;
2898 int i;
2899 tree field;
2900 FOR_EACH_VEC_ELT (*fields, i, field)
2901 {
2902 gcc_assert (TREE_CODE (field) == FIELD_DECL);
2903 DECL_CONTEXT (field) = t;
2904 fieldlist = chainon (field, fieldlist);
2905 }
2906 fieldlist = nreverse (fieldlist);
2907 TYPE_FIELDS (t) = fieldlist;
2908
2909 layout_type (t);
2910 return t;
2911}
2912
a96f1c38
DM
2913/* Selftest fixture for creating the type "struct coord {int x; int y; };". */
2914
2915struct coord_test
2916{
2917 coord_test ()
2918 {
2919 auto_vec<tree> fields;
2920 m_x_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2921 get_identifier ("x"), integer_type_node);
2922 fields.safe_push (m_x_field);
2923 m_y_field = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2924 get_identifier ("y"), integer_type_node);
2925 fields.safe_push (m_y_field);
2926 m_coord_type = make_test_compound_type ("coord", true, &fields);
2927 }
2928
2929 tree m_x_field;
2930 tree m_y_field;
2931 tree m_coord_type;
2932};
2933
808f4dfe 2934/* Verify usage of a struct. */
884d9141
DM
2935
2936static void
808f4dfe 2937test_struct ()
884d9141 2938{
a96f1c38
DM
2939 coord_test ct;
2940
2941 tree c = build_global_decl ("c", ct.m_coord_type);
2942 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
2943 c, ct.m_x_field, NULL_TREE);
2944 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
2945 c, ct.m_y_field, NULL_TREE);
884d9141
DM
2946
2947 tree int_17 = build_int_cst (integer_type_node, 17);
2948 tree int_m3 = build_int_cst (integer_type_node, -3);
2949
808f4dfe
DM
2950 region_model_manager mgr;
2951 region_model model (&mgr);
884d9141
DM
2952 model.set_value (c_x, int_17, NULL);
2953 model.set_value (c_y, int_m3, NULL);
2954
808f4dfe
DM
2955 /* Verify get_offset for "c.x". */
2956 {
2957 const region *c_x_reg = model.get_lvalue (c_x, NULL);
2958 region_offset offset = c_x_reg->get_offset ();
2959 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
2960 ASSERT_EQ (offset.get_bit_offset (), 0);
2961 }
2962
2963 /* Verify get_offset for "c.y". */
2964 {
2965 const region *c_y_reg = model.get_lvalue (c_y, NULL);
2966 region_offset offset = c_y_reg->get_offset ();
2967 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (c, NULL));
2968 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
2969 }
884d9141
DM
2970}
2971
808f4dfe 2972/* Verify usage of an array element. */
884d9141
DM
2973
2974static void
808f4dfe 2975test_array_1 ()
884d9141
DM
2976{
2977 tree tlen = size_int (10);
2978 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
2979
2980 tree a = build_global_decl ("a", arr_type);
2981
808f4dfe
DM
2982 region_model_manager mgr;
2983 region_model model (&mgr);
884d9141
DM
2984 tree int_0 = build_int_cst (integer_type_node, 0);
2985 tree a_0 = build4 (ARRAY_REF, char_type_node,
2986 a, int_0, NULL_TREE, NULL_TREE);
2987 tree char_A = build_int_cst (char_type_node, 'A');
2988 model.set_value (a_0, char_A, NULL);
884d9141
DM
2989}
2990
90f7c300
DM
2991/* Verify that region_model::get_representative_tree works as expected. */
2992
2993static void
2994test_get_representative_tree ()
2995{
808f4dfe
DM
2996 region_model_manager mgr;
2997
90f7c300
DM
2998 /* STRING_CST. */
2999 {
3000 tree string_cst = build_string (4, "foo");
808f4dfe
DM
3001 region_model m (&mgr);
3002 const svalue *str_sval = m.get_rvalue (string_cst, NULL);
3003 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
3004 ASSERT_EQ (rep, string_cst);
3005 }
3006
3007 /* String literal. */
3008 {
3009 tree string_cst_ptr = build_string_literal (4, "foo");
808f4dfe
DM
3010 region_model m (&mgr);
3011 const svalue *str_sval = m.get_rvalue (string_cst_ptr, NULL);
3012 tree rep = m.get_representative_tree (str_sval);
90f7c300
DM
3013 ASSERT_DUMP_TREE_EQ (rep, "&\"foo\"[0]");
3014 }
808f4dfe
DM
3015
3016 /* Value of an element within an array. */
3017 {
3018 tree tlen = size_int (10);
3019 tree arr_type = build_array_type (char_type_node, build_index_type (tlen));
3020 tree a = build_global_decl ("a", arr_type);
3021 placeholder_svalue test_sval (char_type_node, "test value");
3022
3023 /* Value of a[3]. */
3024 {
3025 test_region_model_context ctxt;
3026 region_model model (&mgr);
3027 tree int_3 = build_int_cst (integer_type_node, 3);
3028 tree a_3 = build4 (ARRAY_REF, char_type_node,
3029 a, int_3, NULL_TREE, NULL_TREE);
3030 const region *a_3_reg = model.get_lvalue (a_3, &ctxt);
3031 model.set_value (a_3_reg, &test_sval, &ctxt);
3032 tree rep = model.get_representative_tree (&test_sval);
3033 ASSERT_DUMP_TREE_EQ (rep, "a[3]");
3034 }
3035
3036 /* Value of a[0]. */
3037 {
3038 test_region_model_context ctxt;
3039 region_model model (&mgr);
3040 tree idx = build_int_cst (integer_type_node, 0);
3041 tree a_0 = build4 (ARRAY_REF, char_type_node,
3042 a, idx, NULL_TREE, NULL_TREE);
3043 const region *a_0_reg = model.get_lvalue (a_0, &ctxt);
3044 model.set_value (a_0_reg, &test_sval, &ctxt);
3045 tree rep = model.get_representative_tree (&test_sval);
3046 ASSERT_DUMP_TREE_EQ (rep, "a[0]");
3047 }
3048 }
3049
3050 /* Value of a field within a struct. */
3051 {
3052 coord_test ct;
3053
3054 tree c = build_global_decl ("c", ct.m_coord_type);
3055 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
3056 c, ct.m_x_field, NULL_TREE);
3057 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
3058 c, ct.m_y_field, NULL_TREE);
3059
3060 test_region_model_context ctxt;
3061
3062 /* Value of initial field. */
3063 {
3064 region_model m (&mgr);
3065 const region *c_x_reg = m.get_lvalue (c_x, &ctxt);
3066 placeholder_svalue test_sval_x (integer_type_node, "test x val");
3067 m.set_value (c_x_reg, &test_sval_x, &ctxt);
3068 tree rep = m.get_representative_tree (&test_sval_x);
3069 ASSERT_DUMP_TREE_EQ (rep, "c.x");
3070 }
3071
3072 /* Value of non-initial field. */
3073 {
3074 region_model m (&mgr);
3075 const region *c_y_reg = m.get_lvalue (c_y, &ctxt);
3076 placeholder_svalue test_sval_y (integer_type_node, "test y val");
3077 m.set_value (c_y_reg, &test_sval_y, &ctxt);
3078 tree rep = m.get_representative_tree (&test_sval_y);
3079 ASSERT_DUMP_TREE_EQ (rep, "c.y");
3080 }
3081 }
90f7c300
DM
3082}
3083
757bf1df 3084/* Verify that calling region_model::get_rvalue repeatedly on the same
808f4dfe 3085 tree constant retrieves the same svalue *. */
757bf1df
DM
3086
3087static void
3088test_unique_constants ()
3089{
3090 tree int_0 = build_int_cst (integer_type_node, 0);
3091 tree int_42 = build_int_cst (integer_type_node, 42);
3092
3093 test_region_model_context ctxt;
808f4dfe
DM
3094 region_model_manager mgr;
3095 region_model model (&mgr);
757bf1df
DM
3096 ASSERT_EQ (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_0, &ctxt));
3097 ASSERT_EQ (model.get_rvalue (int_42, &ctxt),
3098 model.get_rvalue (int_42, &ctxt));
3099 ASSERT_NE (model.get_rvalue (int_0, &ctxt), model.get_rvalue (int_42, &ctxt));
3100 ASSERT_EQ (ctxt.get_num_diagnostics (), 0);
757bf1df 3101
808f4dfe
DM
3102 /* A "(const int)42" will be a different tree from "(int)42)"... */
3103 tree const_int_type_node
3104 = build_qualified_type (integer_type_node, TYPE_QUAL_CONST);
3105 tree const_int_42 = build_int_cst (const_int_type_node, 42);
3106 ASSERT_NE (int_42, const_int_42);
3107 /* It should have a different const_svalue. */
3108 const svalue *int_42_sval = model.get_rvalue (int_42, &ctxt);
3109 const svalue *const_int_42_sval = model.get_rvalue (const_int_42, &ctxt);
3110 ASSERT_NE (int_42_sval, const_int_42_sval);
3111 /* But they should compare as equal. */
3112 ASSERT_CONDITION_TRUE (model, int_42_sval, EQ_EXPR, const_int_42_sval);
3113 ASSERT_CONDITION_FALSE (model, int_42_sval, NE_EXPR, const_int_42_sval);
757bf1df
DM
3114}
3115
808f4dfe
DM
3116/* Verify that each type gets its own singleton unknown_svalue within a
3117 region_model_manager, and that NULL_TREE gets its own singleton. */
757bf1df
DM
3118
3119static void
808f4dfe 3120test_unique_unknowns ()
757bf1df 3121{
808f4dfe
DM
3122 region_model_manager mgr;
3123 const svalue *unknown_int
3124 = mgr.get_or_create_unknown_svalue (integer_type_node);
3125 /* Repeated calls with the same type should get the same "unknown"
3126 svalue. */
3127 const svalue *unknown_int_2
3128 = mgr.get_or_create_unknown_svalue (integer_type_node);
3129 ASSERT_EQ (unknown_int, unknown_int_2);
757bf1df 3130
808f4dfe
DM
3131 /* Different types (or the NULL type) should have different
3132 unknown_svalues. */
3133 const svalue *unknown_NULL_type = mgr.get_or_create_unknown_svalue (NULL);
3134 ASSERT_NE (unknown_NULL_type, unknown_int);
757bf1df 3135
808f4dfe
DM
3136 /* Repeated calls with NULL for the type should get the same "unknown"
3137 svalue. */
3138 const svalue *unknown_NULL_type_2 = mgr.get_or_create_unknown_svalue (NULL);
3139 ASSERT_EQ (unknown_NULL_type, unknown_NULL_type_2);
757bf1df
DM
3140}
3141
808f4dfe 3142/* Verify that initial_svalue are handled as expected. */
757bf1df 3143
808f4dfe
DM
3144static void
3145test_initial_svalue_folding ()
757bf1df 3146{
808f4dfe
DM
3147 region_model_manager mgr;
3148 tree x = build_global_decl ("x", integer_type_node);
3149 tree y = build_global_decl ("y", integer_type_node);
757bf1df 3150
808f4dfe
DM
3151 test_region_model_context ctxt;
3152 region_model model (&mgr);
3153 const svalue *x_init = model.get_rvalue (x, &ctxt);
3154 const svalue *y_init = model.get_rvalue (y, &ctxt);
3155 ASSERT_NE (x_init, y_init);
3156 const region *x_reg = model.get_lvalue (x, &ctxt);
3157 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
757bf1df 3158
808f4dfe 3159}
757bf1df 3160
808f4dfe 3161/* Verify that unary ops are folded as expected. */
757bf1df
DM
3162
3163static void
808f4dfe 3164test_unaryop_svalue_folding ()
757bf1df 3165{
808f4dfe 3166 region_model_manager mgr;
757bf1df
DM
3167 tree x = build_global_decl ("x", integer_type_node);
3168 tree y = build_global_decl ("y", integer_type_node);
3169
808f4dfe
DM
3170 test_region_model_context ctxt;
3171 region_model model (&mgr);
3172 const svalue *x_init = model.get_rvalue (x, &ctxt);
3173 const svalue *y_init = model.get_rvalue (y, &ctxt);
3174 const region *x_reg = model.get_lvalue (x, &ctxt);
3175 ASSERT_EQ (x_init, mgr.get_or_create_initial_value (x_reg));
3176
3177 /* "(int)x" -> "x". */
3178 ASSERT_EQ (x_init, mgr.get_or_create_cast (integer_type_node, x_init));
3179
3180 /* "(void *)x" -> something other than "x". */
3181 ASSERT_NE (x_init, mgr.get_or_create_cast (ptr_type_node, x_init));
3182
3183 /* "!(x == y)" -> "x != y". */
3184 ASSERT_EQ (mgr.get_or_create_unaryop
3185 (boolean_type_node, TRUTH_NOT_EXPR,
3186 mgr.get_or_create_binop (boolean_type_node, EQ_EXPR,
3187 x_init, y_init)),
3188 mgr.get_or_create_binop (boolean_type_node, NE_EXPR,
3189 x_init, y_init));
3190 /* "!(x > y)" -> "x <= y". */
3191 ASSERT_EQ (mgr.get_or_create_unaryop
3192 (boolean_type_node, TRUTH_NOT_EXPR,
3193 mgr.get_or_create_binop (boolean_type_node, GT_EXPR,
3194 x_init, y_init)),
3195 mgr.get_or_create_binop (boolean_type_node, LE_EXPR,
3196 x_init, y_init));
3197}
3198
3199/* Verify that binops on constant svalues are folded. */
757bf1df 3200
808f4dfe
DM
3201static void
3202test_binop_svalue_folding ()
3203{
3204#define NUM_CSTS 10
3205 tree cst_int[NUM_CSTS];
3206 region_model_manager mgr;
3207 const svalue *cst_sval[NUM_CSTS];
3208 for (int i = 0; i < NUM_CSTS; i++)
3209 {
3210 cst_int[i] = build_int_cst (integer_type_node, i);
3211 cst_sval[i] = mgr.get_or_create_constant_svalue (cst_int[i]);
3212 ASSERT_EQ (cst_sval[i]->get_kind (), SK_CONSTANT);
3213 ASSERT_EQ (cst_sval[i]->maybe_get_constant (), cst_int[i]);
3214 }
757bf1df 3215
808f4dfe
DM
3216 for (int i = 0; i < NUM_CSTS; i++)
3217 for (int j = 0; j < NUM_CSTS; j++)
3218 {
3219 if (i != j)
3220 ASSERT_NE (cst_sval[i], cst_sval[j]);
3221 if (i + j < NUM_CSTS)
3222 {
3223 const svalue *sum
3224 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
3225 cst_sval[i], cst_sval[j]);
3226 ASSERT_EQ (sum, cst_sval[i + j]);
3227 }
3228 if (i - j >= 0)
3229 {
3230 const svalue *difference
3231 = mgr.get_or_create_binop (integer_type_node, MINUS_EXPR,
3232 cst_sval[i], cst_sval[j]);
3233 ASSERT_EQ (difference, cst_sval[i - j]);
3234 }
3235 if (i * j < NUM_CSTS)
3236 {
3237 const svalue *product
3238 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
3239 cst_sval[i], cst_sval[j]);
3240 ASSERT_EQ (product, cst_sval[i * j]);
3241 }
3242 const svalue *eq = mgr.get_or_create_binop (integer_type_node, EQ_EXPR,
3243 cst_sval[i], cst_sval[j]);
3244 ASSERT_EQ (eq, i == j ? cst_sval[1] : cst_sval [0]);
3245 const svalue *neq = mgr.get_or_create_binop (integer_type_node, NE_EXPR,
3246 cst_sval[i], cst_sval[j]);
3247 ASSERT_EQ (neq, i != j ? cst_sval[1] : cst_sval [0]);
3248 // etc
3249 }
757bf1df 3250
808f4dfe 3251 tree x = build_global_decl ("x", integer_type_node);
757bf1df 3252
808f4dfe
DM
3253 test_region_model_context ctxt;
3254 region_model model (&mgr);
3255 const svalue *x_init = model.get_rvalue (x, &ctxt);
3256
3257 /* PLUS_EXPR folding. */
3258 const svalue *x_init_plus_zero
3259 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
3260 x_init, cst_sval[0]);
3261 ASSERT_EQ (x_init_plus_zero, x_init);
3262 const svalue *zero_plus_x_init
3263 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
3264 cst_sval[0], x_init);
3265 ASSERT_EQ (zero_plus_x_init, x_init);
3266
3267 /* MULT_EXPR folding. */
3268 const svalue *x_init_times_zero
3269 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
3270 x_init, cst_sval[0]);
3271 ASSERT_EQ (x_init_times_zero, cst_sval[0]);
3272 const svalue *zero_times_x_init
3273 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
3274 cst_sval[0], x_init);
3275 ASSERT_EQ (zero_times_x_init, cst_sval[0]);
3276
3277 const svalue *x_init_times_one
3278 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
3279 x_init, cst_sval[1]);
3280 ASSERT_EQ (x_init_times_one, x_init);
3281 const svalue *one_times_x_init
3282 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
3283 cst_sval[1], x_init);
3284 ASSERT_EQ (one_times_x_init, x_init);
3285
3286 // etc
3287 // TODO: do we want to use the match-and-simplify DSL for this?
3288
3289 /* Verify that binops put any constants on the RHS. */
3290 const svalue *four_times_x_init
3291 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
3292 cst_sval[4], x_init);
3293 const svalue *x_init_times_four
3294 = mgr.get_or_create_binop (integer_type_node, MULT_EXPR,
3295 x_init, cst_sval[4]);
3296 ASSERT_EQ (four_times_x_init, x_init_times_four);
3297 const binop_svalue *binop = four_times_x_init->dyn_cast_binop_svalue ();
3298 ASSERT_EQ (binop->get_op (), MULT_EXPR);
3299 ASSERT_EQ (binop->get_arg0 (), x_init);
3300 ASSERT_EQ (binop->get_arg1 (), cst_sval[4]);
3301
3302 /* Verify that ((x + 1) + 1) == (x + 2). */
3303 const svalue *x_init_plus_one
3304 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
3305 x_init, cst_sval[1]);
3306 const svalue *x_init_plus_two
3307 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
3308 x_init, cst_sval[2]);
3309 const svalue *x_init_plus_one_plus_one
3310 = mgr.get_or_create_binop (integer_type_node, PLUS_EXPR,
3311 x_init_plus_one, cst_sval[1]);
3312 ASSERT_EQ (x_init_plus_one_plus_one, x_init_plus_two);
3313}
3314
3315/* Verify that sub_svalues are folded as expected. */
757bf1df 3316
808f4dfe
DM
3317static void
3318test_sub_svalue_folding ()
3319{
3320 coord_test ct;
3321 tree c = build_global_decl ("c", ct.m_coord_type);
3322 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
3323 c, ct.m_x_field, NULL_TREE);
757bf1df 3324
808f4dfe
DM
3325 region_model_manager mgr;
3326 region_model model (&mgr);
3327 test_region_model_context ctxt;
3328 const region *c_x_reg = model.get_lvalue (c_x, &ctxt);
757bf1df 3329
808f4dfe
DM
3330 /* Verify that sub_svalue of "unknown" simply
3331 yields an unknown. */
757bf1df 3332
808f4dfe
DM
3333 const svalue *unknown = mgr.get_or_create_unknown_svalue (ct.m_coord_type);
3334 const svalue *sub = mgr.get_or_create_sub_svalue (TREE_TYPE (ct.m_x_field),
3335 unknown, c_x_reg);
3336 ASSERT_EQ (sub->get_kind (), SK_UNKNOWN);
3337 ASSERT_EQ (sub->get_type (), TREE_TYPE (ct.m_x_field));
757bf1df
DM
3338}
3339
808f4dfe 3340/* Test that region::descendent_of_p works as expected. */
757bf1df
DM
3341
3342static void
808f4dfe 3343test_descendent_of_p ()
757bf1df 3344{
808f4dfe
DM
3345 region_model_manager mgr;
3346 const region *stack = mgr.get_stack_region ();
3347 const region *heap = mgr.get_heap_region ();
3348 const region *code = mgr.get_code_region ();
3349 const region *globals = mgr.get_globals_region ();
757bf1df 3350
808f4dfe
DM
3351 /* descendent_of_p should return true when used on the region itself. */
3352 ASSERT_TRUE (stack->descendent_of_p (stack));
3353 ASSERT_FALSE (stack->descendent_of_p (heap));
3354 ASSERT_FALSE (stack->descendent_of_p (code));
3355 ASSERT_FALSE (stack->descendent_of_p (globals));
757bf1df 3356
808f4dfe
DM
3357 tree x = build_global_decl ("x", integer_type_node);
3358 const region *x_reg = mgr.get_region_for_global (x);
3359 ASSERT_TRUE (x_reg->descendent_of_p (globals));
757bf1df 3360
808f4dfe
DM
3361 /* A cast_region should be a descendent of the original region. */
3362 const region *cast_reg = mgr.get_cast_region (x_reg, ptr_type_node);
3363 ASSERT_TRUE (cast_reg->descendent_of_p (x_reg));
757bf1df
DM
3364}
3365
3366/* Verify that simple assignments work as expected. */
3367
3368static void
3369test_assignment ()
3370{
3371 tree int_0 = build_int_cst (integer_type_node, 0);
3372 tree x = build_global_decl ("x", integer_type_node);
3373 tree y = build_global_decl ("y", integer_type_node);
3374
3375 /* "x == 0", then use of y, then "y = 0;". */
808f4dfe
DM
3376 region_model_manager mgr;
3377 region_model model (&mgr);
757bf1df
DM
3378 ADD_SAT_CONSTRAINT (model, x, EQ_EXPR, int_0);
3379 ASSERT_CONDITION_UNKNOWN (model, y, EQ_EXPR, int_0);
3380 model.set_value (model.get_lvalue (y, NULL),
3381 model.get_rvalue (int_0, NULL),
3382 NULL);
3383 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, int_0);
3384 ASSERT_CONDITION_TRUE (model, y, EQ_EXPR, x);
757bf1df
DM
3385}
3386
a96f1c38
DM
3387/* Verify that compound assignments work as expected. */
3388
3389static void
3390test_compound_assignment ()
3391{
3392 coord_test ct;
3393
3394 tree c = build_global_decl ("c", ct.m_coord_type);
3395 tree c_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
3396 c, ct.m_x_field, NULL_TREE);
3397 tree c_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
3398 c, ct.m_y_field, NULL_TREE);
3399 tree d = build_global_decl ("d", ct.m_coord_type);
3400 tree d_x = build3 (COMPONENT_REF, TREE_TYPE (ct.m_x_field),
3401 d, ct.m_x_field, NULL_TREE);
3402 tree d_y = build3 (COMPONENT_REF, TREE_TYPE (ct.m_y_field),
3403 d, ct.m_y_field, NULL_TREE);
3404
3405 tree int_17 = build_int_cst (integer_type_node, 17);
3406 tree int_m3 = build_int_cst (integer_type_node, -3);
3407
808f4dfe
DM
3408 region_model_manager mgr;
3409 region_model model (&mgr);
a96f1c38
DM
3410 model.set_value (c_x, int_17, NULL);
3411 model.set_value (c_y, int_m3, NULL);
3412
a96f1c38
DM
3413 /* Copy c to d. */
3414 model.copy_region (model.get_lvalue (d, NULL), model.get_lvalue (c, NULL),
3415 NULL);
3416 /* Check that the fields have the same svalues. */
3417 ASSERT_EQ (model.get_rvalue (c_x, NULL), model.get_rvalue (d_x, NULL));
3418 ASSERT_EQ (model.get_rvalue (c_y, NULL), model.get_rvalue (d_y, NULL));
3419}
3420
757bf1df
DM
3421/* Verify the details of pushing and popping stack frames. */
3422
3423static void
3424test_stack_frames ()
3425{
3426 tree int_42 = build_int_cst (integer_type_node, 42);
3427 tree int_10 = build_int_cst (integer_type_node, 10);
3428 tree int_5 = build_int_cst (integer_type_node, 5);
3429 tree int_0 = build_int_cst (integer_type_node, 0);
3430
3431 auto_vec <tree> param_types;
3432 tree parent_fndecl = make_fndecl (integer_type_node,
3433 "parent_fn",
3434 param_types);
3435 allocate_struct_function (parent_fndecl, true);
3436
3437 tree child_fndecl = make_fndecl (integer_type_node,
3438 "child_fn",
3439 param_types);
3440 allocate_struct_function (child_fndecl, true);
3441
3442 /* "a" and "b" in the parent frame. */
3443 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
3444 get_identifier ("a"),
3445 integer_type_node);
3446 tree b = build_decl (UNKNOWN_LOCATION, PARM_DECL,
3447 get_identifier ("b"),
3448 integer_type_node);
3449 /* "x" and "y" in a child frame. */
3450 tree x = build_decl (UNKNOWN_LOCATION, PARM_DECL,
3451 get_identifier ("x"),
3452 integer_type_node);
3453 tree y = build_decl (UNKNOWN_LOCATION, PARM_DECL,
3454 get_identifier ("y"),
3455 integer_type_node);
3456
3457 /* "p" global. */
3458 tree p = build_global_decl ("p", ptr_type_node);
3459
3460 /* "q" global. */
3461 tree q = build_global_decl ("q", ptr_type_node);
3462
808f4dfe 3463 region_model_manager mgr;
757bf1df 3464 test_region_model_context ctxt;
808f4dfe 3465 region_model model (&mgr);
757bf1df
DM
3466
3467 /* Push stack frame for "parent_fn". */
808f4dfe
DM
3468 const region *parent_frame_reg
3469 = model.push_frame (DECL_STRUCT_FUNCTION (parent_fndecl),
3470 NULL, &ctxt);
3471 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
3472 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
3473 const region *a_in_parent_reg = model.get_lvalue (a, &ctxt);
3474 model.set_value (a_in_parent_reg,
3475 model.get_rvalue (int_42, &ctxt),
3476 &ctxt);
3477 ASSERT_EQ (a_in_parent_reg->maybe_get_frame_region (), parent_frame_reg);
3478
757bf1df
DM
3479 model.add_constraint (b, LT_EXPR, int_10, &ctxt);
3480 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
3481 tristate (tristate::TS_TRUE));
3482
3483 /* Push stack frame for "child_fn". */
808f4dfe 3484 const region *child_frame_reg
757bf1df 3485 = model.push_frame (DECL_STRUCT_FUNCTION (child_fndecl), NULL, &ctxt);
808f4dfe
DM
3486 ASSERT_EQ (model.get_current_frame (), child_frame_reg);
3487 ASSERT_TRUE (model.region_exists_p (child_frame_reg));
3488 const region *x_in_child_reg = model.get_lvalue (x, &ctxt);
3489 model.set_value (x_in_child_reg,
3490 model.get_rvalue (int_0, &ctxt),
3491 &ctxt);
3492 ASSERT_EQ (x_in_child_reg->maybe_get_frame_region (), child_frame_reg);
3493
757bf1df
DM
3494 model.add_constraint (y, NE_EXPR, int_5, &ctxt);
3495 ASSERT_EQ (model.eval_condition (y, NE_EXPR, int_5, &ctxt),
3496 tristate (tristate::TS_TRUE));
3497
3498 /* Point a global pointer at a local in the child frame: p = &x. */
808f4dfe
DM
3499 const region *p_in_globals_reg = model.get_lvalue (p, &ctxt);
3500 model.set_value (p_in_globals_reg,
3501 mgr.get_ptr_svalue (ptr_type_node, x_in_child_reg),
757bf1df 3502 &ctxt);
808f4dfe 3503 ASSERT_EQ (p_in_globals_reg->maybe_get_frame_region (), NULL);
757bf1df
DM
3504
3505 /* Point another global pointer at p: q = &p. */
808f4dfe
DM
3506 const region *q_in_globals_reg = model.get_lvalue (q, &ctxt);
3507 model.set_value (q_in_globals_reg,
3508 mgr.get_ptr_svalue (ptr_type_node, p_in_globals_reg),
757bf1df
DM
3509 &ctxt);
3510
808f4dfe
DM
3511 /* Test region::descendent_of_p. */
3512 ASSERT_TRUE (child_frame_reg->descendent_of_p (child_frame_reg));
3513 ASSERT_TRUE (x_in_child_reg->descendent_of_p (child_frame_reg));
3514 ASSERT_FALSE (a_in_parent_reg->descendent_of_p (child_frame_reg));
757bf1df
DM
3515
3516 /* Pop the "child_fn" frame from the stack. */
808f4dfe
DM
3517 model.pop_frame (NULL, NULL, &ctxt);
3518 ASSERT_FALSE (model.region_exists_p (child_frame_reg));
3519 ASSERT_TRUE (model.region_exists_p (parent_frame_reg));
757bf1df
DM
3520
3521 /* Verify that p (which was pointing at the local "x" in the popped
3522 frame) has been poisoned. */
808f4dfe 3523 const svalue *new_p_sval = model.get_rvalue (p, &ctxt);
757bf1df
DM
3524 ASSERT_EQ (new_p_sval->get_kind (), SK_POISONED);
3525 ASSERT_EQ (new_p_sval->dyn_cast_poisoned_svalue ()->get_poison_kind (),
3526 POISON_KIND_POPPED_STACK);
3527
3528 /* Verify that q still points to p, in spite of the region
3529 renumbering. */
808f4dfe 3530 const svalue *new_q_sval = model.get_rvalue (q, &ctxt);
757bf1df
DM
3531 ASSERT_EQ (new_q_sval->get_kind (), SK_REGION);
3532 ASSERT_EQ (new_q_sval->dyn_cast_region_svalue ()->get_pointee (),
3533 model.get_lvalue (p, &ctxt));
3534
3535 /* Verify that top of stack has been updated. */
808f4dfe 3536 ASSERT_EQ (model.get_current_frame (), parent_frame_reg);
757bf1df
DM
3537
3538 /* Verify locals in parent frame. */
3539 /* Verify "a" still has its value. */
808f4dfe 3540 const svalue *new_a_sval = model.get_rvalue (a, &ctxt);
757bf1df
DM
3541 ASSERT_EQ (new_a_sval->get_kind (), SK_CONSTANT);
3542 ASSERT_EQ (new_a_sval->dyn_cast_constant_svalue ()->get_constant (),
3543 int_42);
3544 /* Verify "b" still has its constraint. */
3545 ASSERT_EQ (model.eval_condition (b, LT_EXPR, int_10, &ctxt),
3546 tristate (tristate::TS_TRUE));
3547}
3548
3549/* Verify that get_representative_path_var works as expected, that
808f4dfe 3550 we can map from regions to parms and back within a recursive call
757bf1df
DM
3551 stack. */
3552
3553static void
3554test_get_representative_path_var ()
3555{
3556 auto_vec <tree> param_types;
3557 tree fndecl = make_fndecl (integer_type_node,
3558 "factorial",
3559 param_types);
3560 allocate_struct_function (fndecl, true);
3561
3562 /* Parm "n". */
3563 tree n = build_decl (UNKNOWN_LOCATION, PARM_DECL,
3564 get_identifier ("n"),
3565 integer_type_node);
3566
808f4dfe
DM
3567 region_model_manager mgr;
3568 test_region_model_context ctxt;
3569 region_model model (&mgr);
757bf1df
DM
3570
3571 /* Push 5 stack frames for "factorial", each with a param */
808f4dfe
DM
3572 auto_vec<const region *> parm_regs;
3573 auto_vec<const svalue *> parm_svals;
757bf1df
DM
3574 for (int depth = 0; depth < 5; depth++)
3575 {
808f4dfe
DM
3576 const region *frame_n_reg
3577 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl), NULL, &ctxt);
3578 const region *parm_n_reg = model.get_lvalue (path_var (n, depth), &ctxt);
3579 parm_regs.safe_push (parm_n_reg);
757bf1df 3580
808f4dfe
DM
3581 ASSERT_EQ (parm_n_reg->get_parent_region (), frame_n_reg);
3582 const svalue *sval_n = mgr.get_or_create_initial_value (parm_n_reg);
3583 parm_svals.safe_push (sval_n);
757bf1df
DM
3584 }
3585
3586 /* Verify that we can recognize that the regions are the parms,
3587 at every depth. */
3588 for (int depth = 0; depth < 5; depth++)
3589 {
808f4dfe
DM
3590 {
3591 svalue_set visited;
3592 ASSERT_EQ (model.get_representative_path_var (parm_regs[depth],
3593 &visited),
3594 path_var (n, depth + 1));
3595 }
757bf1df
DM
3596 /* ...and that we can lookup lvalues for locals for all frames,
3597 not just the top. */
3598 ASSERT_EQ (model.get_lvalue (path_var (n, depth), NULL),
808f4dfe 3599 parm_regs[depth]);
757bf1df 3600 /* ...and that we can locate the svalues. */
808f4dfe
DM
3601 {
3602 svalue_set visited;
3603 ASSERT_EQ (model.get_representative_path_var (parm_svals[depth],
3604 &visited),
3605 path_var (n, depth + 1));
3606 }
757bf1df
DM
3607 }
3608}
3609
808f4dfe 3610/* Ensure that region_model::operator== works as expected. */
757bf1df
DM
3611
3612static void
808f4dfe 3613test_equality_1 ()
757bf1df 3614{
808f4dfe
DM
3615 tree int_42 = build_int_cst (integer_type_node, 42);
3616 tree int_17 = build_int_cst (integer_type_node, 17);
757bf1df 3617
808f4dfe
DM
3618/* Verify that "empty" region_model instances are equal to each other. */
3619 region_model_manager mgr;
3620 region_model model0 (&mgr);
3621 region_model model1 (&mgr);
757bf1df 3622 ASSERT_EQ (model0, model1);
808f4dfe
DM
3623
3624 /* Verify that setting state in model1 makes the models non-equal. */
3625 tree x = build_global_decl ("x", integer_type_node);
3626 model0.set_value (x, int_42, NULL);
3627 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
3628 ASSERT_NE (model0, model1);
3629
3630 /* Verify the copy-ctor. */
3631 region_model model2 (model0);
3632 ASSERT_EQ (model0, model2);
3633 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
3634 ASSERT_NE (model1, model2);
3635
3636 /* Verify that models obtained from copy-ctor are independently editable
3637 w/o affecting the original model. */
3638 model2.set_value (x, int_17, NULL);
3639 ASSERT_NE (model0, model2);
3640 ASSERT_EQ (model2.get_rvalue (x, NULL)->maybe_get_constant (), int_17);
3641 ASSERT_EQ (model0.get_rvalue (x, NULL)->maybe_get_constant (), int_42);
757bf1df
DM
3642}
3643
3644/* Verify that region models for
3645 x = 42; y = 113;
3646 and
3647 y = 113; x = 42;
808f4dfe 3648 are equal. */
757bf1df
DM
3649
3650static void
3651test_canonicalization_2 ()
3652{
3653 tree int_42 = build_int_cst (integer_type_node, 42);
3654 tree int_113 = build_int_cst (integer_type_node, 113);
3655 tree x = build_global_decl ("x", integer_type_node);
3656 tree y = build_global_decl ("y", integer_type_node);
3657
808f4dfe
DM
3658 region_model_manager mgr;
3659 region_model model0 (&mgr);
757bf1df
DM
3660 model0.set_value (model0.get_lvalue (x, NULL),
3661 model0.get_rvalue (int_42, NULL),
3662 NULL);
3663 model0.set_value (model0.get_lvalue (y, NULL),
3664 model0.get_rvalue (int_113, NULL),
3665 NULL);
3666
808f4dfe 3667 region_model model1 (&mgr);
757bf1df
DM
3668 model1.set_value (model1.get_lvalue (y, NULL),
3669 model1.get_rvalue (int_113, NULL),
3670 NULL);
3671 model1.set_value (model1.get_lvalue (x, NULL),
3672 model1.get_rvalue (int_42, NULL),
3673 NULL);
3674
757bf1df
DM
3675 ASSERT_EQ (model0, model1);
3676}
3677
3678/* Verify that constraints for
3679 x > 3 && y > 42
3680 and
3681 y > 42 && x > 3
3682 are equal after canonicalization. */
3683
3684static void
3685test_canonicalization_3 ()
3686{
3687 tree int_3 = build_int_cst (integer_type_node, 3);
3688 tree int_42 = build_int_cst (integer_type_node, 42);
3689 tree x = build_global_decl ("x", integer_type_node);
3690 tree y = build_global_decl ("y", integer_type_node);
3691
808f4dfe
DM
3692 region_model_manager mgr;
3693 region_model model0 (&mgr);
757bf1df
DM
3694 model0.add_constraint (x, GT_EXPR, int_3, NULL);
3695 model0.add_constraint (y, GT_EXPR, int_42, NULL);
3696
808f4dfe 3697 region_model model1 (&mgr);
757bf1df
DM
3698 model1.add_constraint (y, GT_EXPR, int_42, NULL);
3699 model1.add_constraint (x, GT_EXPR, int_3, NULL);
3700
808f4dfe
DM
3701 model0.canonicalize ();
3702 model1.canonicalize ();
757bf1df
DM
3703 ASSERT_EQ (model0, model1);
3704}
3705
8c08c983
DM
3706/* Verify that we can canonicalize a model containing NaN and other real
3707 constants. */
3708
3709static void
3710test_canonicalization_4 ()
3711{
3712 auto_vec<tree> csts;
3713 append_interesting_constants (&csts);
3714
808f4dfe
DM
3715 region_model_manager mgr;
3716 region_model model (&mgr);
8c08c983
DM
3717
3718 unsigned i;
3719 tree cst;
3720 FOR_EACH_VEC_ELT (csts, i, cst)
3721 model.get_rvalue (cst, NULL);
3722
808f4dfe 3723 model.canonicalize ();
8c08c983
DM
3724}
3725
757bf1df
DM
3726/* Assert that if we have two region_model instances
3727 with values VAL_A and VAL_B for EXPR that they are
3728 mergable. Write the merged model to *OUT_MERGED_MODEL,
3729 and the merged svalue ptr to *OUT_MERGED_SVALUE.
3730 If VAL_A or VAL_B are NULL_TREE, don't populate EXPR
3731 for that region_model. */
3732
3733static void
3734assert_region_models_merge (tree expr, tree val_a, tree val_b,
808f4dfe
DM
3735 region_model *out_merged_model,
3736 const svalue **out_merged_svalue)
757bf1df 3737{
808f4dfe 3738 program_point point (program_point::origin ());
757bf1df 3739 test_region_model_context ctxt;
808f4dfe
DM
3740 region_model_manager *mgr = out_merged_model->get_manager ();
3741 region_model model0 (mgr);
3742 region_model model1 (mgr);
757bf1df
DM
3743 if (val_a)
3744 model0.set_value (model0.get_lvalue (expr, &ctxt),
3745 model0.get_rvalue (val_a, &ctxt),
3746 &ctxt);
3747 if (val_b)
3748 model1.set_value (model1.get_lvalue (expr, &ctxt),
3749 model1.get_rvalue (val_b, &ctxt),
3750 &ctxt);
3751
3752 /* They should be mergeable. */
808f4dfe
DM
3753 ASSERT_TRUE (model0.can_merge_with_p (model1, point, out_merged_model));
3754 *out_merged_svalue = out_merged_model->get_rvalue (expr, &ctxt);
757bf1df
DM
3755}
3756
3757/* Verify that we can merge region_model instances. */
3758
3759static void
3760test_state_merging ()
3761{
3762 tree int_42 = build_int_cst (integer_type_node, 42);
3763 tree int_113 = build_int_cst (integer_type_node, 113);
3764 tree x = build_global_decl ("x", integer_type_node);
3765 tree y = build_global_decl ("y", integer_type_node);
3766 tree z = build_global_decl ("z", integer_type_node);
3767 tree p = build_global_decl ("p", ptr_type_node);
3768
3769 tree addr_of_y = build1 (ADDR_EXPR, ptr_type_node, y);
3770 tree addr_of_z = build1 (ADDR_EXPR, ptr_type_node, z);
3771
3772 auto_vec <tree> param_types;
3773 tree test_fndecl = make_fndecl (integer_type_node, "test_fn", param_types);
3774 allocate_struct_function (test_fndecl, true);
3775
3776 /* Param "a". */
3777 tree a = build_decl (UNKNOWN_LOCATION, PARM_DECL,
3778 get_identifier ("a"),
3779 integer_type_node);
3780 tree addr_of_a = build1 (ADDR_EXPR, ptr_type_node, a);
3781
455f58ec
DM
3782 /* Param "q", a pointer. */
3783 tree q = build_decl (UNKNOWN_LOCATION, PARM_DECL,
3784 get_identifier ("q"),
3785 ptr_type_node);
3786
808f4dfe
DM
3787 program_point point (program_point::origin ());
3788 region_model_manager mgr;
3789
757bf1df 3790 {
808f4dfe
DM
3791 region_model model0 (&mgr);
3792 region_model model1 (&mgr);
3793 region_model merged (&mgr);
757bf1df 3794 /* Verify empty models can be merged. */
808f4dfe 3795 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
3796 ASSERT_EQ (model0, merged);
3797 }
3798
3799 /* Verify that we can merge two contradictory constraints on the
3800 value for a global. */
3801 /* TODO: verify that the merged model doesn't have a value for
3802 the global */
3803 {
808f4dfe
DM
3804 region_model model0 (&mgr);
3805 region_model model1 (&mgr);
3806 region_model merged (&mgr);
757bf1df
DM
3807 test_region_model_context ctxt;
3808 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
3809 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe 3810 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
3811 ASSERT_NE (model0, merged);
3812 ASSERT_NE (model1, merged);
3813 }
3814
3815 /* Verify handling of a PARM_DECL. */
3816 {
3817 test_region_model_context ctxt;
808f4dfe
DM
3818 region_model model0 (&mgr);
3819 region_model model1 (&mgr);
757bf1df
DM
3820 ASSERT_EQ (model0.get_stack_depth (), 0);
3821 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
3822 ASSERT_EQ (model0.get_stack_depth (), 1);
757bf1df
DM
3823 model1.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, &ctxt);
3824
808f4dfe
DM
3825 placeholder_svalue test_sval (integer_type_node, "test sval");
3826 model0.set_value (model0.get_lvalue (a, &ctxt), &test_sval, &ctxt);
3827 model1.set_value (model1.get_lvalue (a, &ctxt), &test_sval, &ctxt);
757bf1df
DM
3828 ASSERT_EQ (model0, model1);
3829
757bf1df 3830 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
3831 region_model merged (&mgr);
3832 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 3833 ASSERT_EQ (model0, merged);
808f4dfe
DM
3834 /* In particular, "a" should have the placeholder value. */
3835 ASSERT_EQ (merged.get_rvalue (a, &ctxt), &test_sval);
757bf1df
DM
3836 }
3837
3838 /* Verify handling of a global. */
3839 {
3840 test_region_model_context ctxt;
808f4dfe
DM
3841 region_model model0 (&mgr);
3842 region_model model1 (&mgr);
757bf1df 3843
808f4dfe
DM
3844 placeholder_svalue test_sval (integer_type_node, "test sval");
3845 model0.set_value (model0.get_lvalue (x, &ctxt), &test_sval, &ctxt);
3846 model1.set_value (model1.get_lvalue (x, &ctxt), &test_sval, &ctxt);
3847 ASSERT_EQ (model0, model1);
757bf1df
DM
3848
3849 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
3850 region_model merged (&mgr);
3851 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 3852 ASSERT_EQ (model0, merged);
808f4dfe
DM
3853 /* In particular, "x" should have the placeholder value. */
3854 ASSERT_EQ (merged.get_rvalue (x, &ctxt), &test_sval);
757bf1df
DM
3855 }
3856
3857 /* Use global-handling to verify various combinations of values. */
3858
3859 /* Two equal constant values. */
3860 {
808f4dfe
DM
3861 region_model merged (&mgr);
3862 const svalue *merged_x_sval;
757bf1df
DM
3863 assert_region_models_merge (x, int_42, int_42, &merged, &merged_x_sval);
3864
3865 /* In particular, there should be a constant value for "x". */
3866 ASSERT_EQ (merged_x_sval->get_kind (), SK_CONSTANT);
3867 ASSERT_EQ (merged_x_sval->dyn_cast_constant_svalue ()->get_constant (),
3868 int_42);
3869 }
3870
3871 /* Two non-equal constant values. */
3872 {
808f4dfe
DM
3873 region_model merged (&mgr);
3874 const svalue *merged_x_sval;
757bf1df
DM
3875 assert_region_models_merge (x, int_42, int_113, &merged, &merged_x_sval);
3876
808f4dfe
DM
3877 /* In particular, there should be a "widening" value for "x". */
3878 ASSERT_EQ (merged_x_sval->get_kind (), SK_WIDENING);
757bf1df
DM
3879 }
3880
808f4dfe 3881 /* Initial and constant. */
757bf1df 3882 {
808f4dfe
DM
3883 region_model merged (&mgr);
3884 const svalue *merged_x_sval;
757bf1df
DM
3885 assert_region_models_merge (x, NULL_TREE, int_113, &merged, &merged_x_sval);
3886
3887 /* In particular, there should be an unknown value for "x". */
3888 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
3889 }
3890
808f4dfe 3891 /* Constant and initial. */
757bf1df 3892 {
808f4dfe
DM
3893 region_model merged (&mgr);
3894 const svalue *merged_x_sval;
757bf1df
DM
3895 assert_region_models_merge (x, int_42, NULL_TREE, &merged, &merged_x_sval);
3896
3897 /* In particular, there should be an unknown value for "x". */
3898 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
3899 }
3900
3901 /* Unknown and constant. */
3902 // TODO
3903
3904 /* Pointers: NULL and NULL. */
3905 // TODO
3906
3907 /* Pointers: NULL and non-NULL. */
3908 // TODO
3909
3910 /* Pointers: non-NULL and non-NULL: ptr to a local. */
3911 {
808f4dfe 3912 region_model model0 (&mgr);
757bf1df 3913 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
757bf1df
DM
3914 model0.set_value (model0.get_lvalue (p, NULL),
3915 model0.get_rvalue (addr_of_a, NULL), NULL);
3916
3917 region_model model1 (model0);
3918 ASSERT_EQ (model0, model1);
3919
3920 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
3921 region_model merged (&mgr);
3922 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
3923 ASSERT_EQ (model0, merged);
3924 }
3925
3926 /* Pointers: non-NULL and non-NULL: ptr to a global. */
3927 {
808f4dfe 3928 region_model merged (&mgr);
757bf1df 3929 /* p == &y in both input models. */
808f4dfe 3930 const svalue *merged_p_sval;
757bf1df
DM
3931 assert_region_models_merge (p, addr_of_y, addr_of_y, &merged,
3932 &merged_p_sval);
3933
3934 /* We should get p == &y in the merged model. */
3935 ASSERT_EQ (merged_p_sval->get_kind (), SK_REGION);
808f4dfe
DM
3936 const region_svalue *merged_p_ptr
3937 = merged_p_sval->dyn_cast_region_svalue ();
3938 const region *merged_p_star_reg = merged_p_ptr->get_pointee ();
3939 ASSERT_EQ (merged_p_star_reg, merged.get_lvalue (y, NULL));
757bf1df
DM
3940 }
3941
3942 /* Pointers: non-NULL ptrs to different globals: should be unknown. */
3943 {
808f4dfe
DM
3944 region_model merged (&mgr);
3945 /* x == &y vs x == &z in the input models; these are actually casts
3946 of the ptrs to "int". */
3947 const svalue *merged_x_sval;
3948 // TODO:
757bf1df
DM
3949 assert_region_models_merge (x, addr_of_y, addr_of_z, &merged,
3950 &merged_x_sval);
3951
3952 /* We should get x == unknown in the merged model. */
3953 ASSERT_EQ (merged_x_sval->get_kind (), SK_UNKNOWN);
3954 }
3955
3956 /* Pointers: non-NULL and non-NULL: ptr to a heap region. */
3957 {
3958 test_region_model_context ctxt;
808f4dfe
DM
3959 region_model model0 (&mgr);
3960 tree size = build_int_cst (integer_type_node, 1024);
3961 const svalue *size_sval = mgr.get_or_create_constant_svalue (size);
3962 const region *new_reg = model0.create_region_for_heap_alloc (size_sval);
3963 const svalue *ptr_sval = mgr.get_ptr_svalue (ptr_type_node, new_reg);
757bf1df 3964 model0.set_value (model0.get_lvalue (p, &ctxt),
808f4dfe 3965 ptr_sval, &ctxt);
757bf1df
DM
3966
3967 region_model model1 (model0);
3968
3969 ASSERT_EQ (model0, model1);
3970
808f4dfe
DM
3971 region_model merged (&mgr);
3972 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 3973
808f4dfe 3974 /* The merged model ought to be identical. */
757bf1df
DM
3975 ASSERT_EQ (model0, merged);
3976 }
3977
808f4dfe
DM
3978 /* Two regions sharing the same placeholder svalue should continue sharing
3979 it after self-merger. */
757bf1df
DM
3980 {
3981 test_region_model_context ctxt;
808f4dfe
DM
3982 region_model model0 (&mgr);
3983 placeholder_svalue placeholder_sval (integer_type_node, "test");
3984 model0.set_value (model0.get_lvalue (x, &ctxt),
3985 &placeholder_sval, &ctxt);
3986 model0.set_value (model0.get_lvalue (y, &ctxt), &placeholder_sval, &ctxt);
757bf1df
DM
3987 region_model model1 (model0);
3988
3989 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
3990 region_model merged (&mgr);
3991 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
3992 ASSERT_EQ (model0, merged);
3993
3994 /* In particular, we should have x == y. */
3995 ASSERT_EQ (merged.eval_condition (x, EQ_EXPR, y, &ctxt),
3996 tristate (tristate::TS_TRUE));
3997 }
3998
757bf1df 3999 {
808f4dfe
DM
4000 region_model model0 (&mgr);
4001 region_model model1 (&mgr);
757bf1df
DM
4002 test_region_model_context ctxt;
4003 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
4004 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
808f4dfe
DM
4005 region_model merged (&mgr);
4006 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
4007 }
4008
4009 {
808f4dfe
DM
4010 region_model model0 (&mgr);
4011 region_model model1 (&mgr);
757bf1df
DM
4012 test_region_model_context ctxt;
4013 model0.add_constraint (x, EQ_EXPR, int_42, &ctxt);
4014 model1.add_constraint (x, NE_EXPR, int_42, &ctxt);
4015 model1.add_constraint (x, EQ_EXPR, int_113, &ctxt);
808f4dfe
DM
4016 region_model merged (&mgr);
4017 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 4018 }
757bf1df
DM
4019
4020 // TODO: what can't we merge? need at least one such test
4021
4022 /* TODO: various things
4023 - heap regions
4024 - value merging:
4025 - every combination, but in particular
808f4dfe 4026 - pairs of regions
757bf1df
DM
4027 */
4028
4029 /* Views. */
4030 {
4031 test_region_model_context ctxt;
808f4dfe 4032 region_model model0 (&mgr);
757bf1df 4033
808f4dfe
DM
4034 const region *x_reg = model0.get_lvalue (x, &ctxt);
4035 const region *x_as_ptr = mgr.get_cast_region (x_reg, ptr_type_node);
757bf1df
DM
4036 model0.set_value (x_as_ptr, model0.get_rvalue (addr_of_y, &ctxt), &ctxt);
4037
4038 region_model model1 (model0);
4039 ASSERT_EQ (model1, model0);
4040
4041 /* They should be mergeable, and the result should be the same. */
808f4dfe
DM
4042 region_model merged (&mgr);
4043 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df 4044 }
455f58ec
DM
4045
4046 /* Verify that we can merge a model in which a local in an older stack
4047 frame points to a local in a more recent stack frame. */
4048 {
808f4dfe 4049 region_model model0 (&mgr);
455f58ec 4050 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
808f4dfe 4051 const region *q_in_first_frame = model0.get_lvalue (q, NULL);
455f58ec
DM
4052
4053 /* Push a second frame. */
808f4dfe 4054 const region *reg_2nd_frame
455f58ec
DM
4055 = model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
4056
4057 /* Have a pointer in the older frame point to a local in the
4058 more recent frame. */
808f4dfe
DM
4059 const svalue *sval_ptr = model0.get_rvalue (addr_of_a, NULL);
4060 model0.set_value (q_in_first_frame, sval_ptr, NULL);
455f58ec
DM
4061
4062 /* Verify that it's pointing at the newer frame. */
808f4dfe
DM
4063 const region *reg_pointee
4064 = sval_ptr->dyn_cast_region_svalue ()->get_pointee ();
4065 ASSERT_EQ (reg_pointee->get_parent_region (), reg_2nd_frame);
455f58ec 4066
808f4dfe 4067 model0.canonicalize ();
455f58ec
DM
4068
4069 region_model model1 (model0);
4070 ASSERT_EQ (model0, model1);
4071
4072 /* They should be mergeable, and the result should be the same
4073 (after canonicalization, at least). */
808f4dfe
DM
4074 region_model merged (&mgr);
4075 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
4076 merged.canonicalize ();
455f58ec
DM
4077 ASSERT_EQ (model0, merged);
4078 }
4079
4080 /* Verify that we can merge a model in which a local points to a global. */
4081 {
808f4dfe 4082 region_model model0 (&mgr);
455f58ec
DM
4083 model0.push_frame (DECL_STRUCT_FUNCTION (test_fndecl), NULL, NULL);
4084 model0.set_value (model0.get_lvalue (q, NULL),
4085 model0.get_rvalue (addr_of_y, NULL), NULL);
4086
455f58ec
DM
4087 region_model model1 (model0);
4088 ASSERT_EQ (model0, model1);
4089
4090 /* They should be mergeable, and the result should be the same
4091 (after canonicalization, at least). */
808f4dfe
DM
4092 region_model merged (&mgr);
4093 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
455f58ec
DM
4094 ASSERT_EQ (model0, merged);
4095 }
757bf1df
DM
4096}
4097
4098/* Verify that constraints are correctly merged when merging region_model
4099 instances. */
4100
4101static void
4102test_constraint_merging ()
4103{
4104 tree int_0 = build_int_cst (integer_type_node, 0);
4105 tree int_5 = build_int_cst (integer_type_node, 5);
4106 tree x = build_global_decl ("x", integer_type_node);
4107 tree y = build_global_decl ("y", integer_type_node);
4108 tree z = build_global_decl ("z", integer_type_node);
4109 tree n = build_global_decl ("n", integer_type_node);
4110
808f4dfe 4111 region_model_manager mgr;
757bf1df
DM
4112 test_region_model_context ctxt;
4113
4114 /* model0: 0 <= (x == y) < n. */
808f4dfe 4115 region_model model0 (&mgr);
757bf1df
DM
4116 model0.add_constraint (x, EQ_EXPR, y, &ctxt);
4117 model0.add_constraint (x, GE_EXPR, int_0, NULL);
4118 model0.add_constraint (x, LT_EXPR, n, NULL);
4119
4120 /* model1: z != 5 && (0 <= x < n). */
808f4dfe 4121 region_model model1 (&mgr);
757bf1df
DM
4122 model1.add_constraint (z, NE_EXPR, int_5, NULL);
4123 model1.add_constraint (x, GE_EXPR, int_0, NULL);
4124 model1.add_constraint (x, LT_EXPR, n, NULL);
4125
4126 /* They should be mergeable; the merged constraints should
4127 be: (0 <= x < n). */
808f4dfe
DM
4128 program_point point (program_point::origin ());
4129 region_model merged (&mgr);
4130 ASSERT_TRUE (model0.can_merge_with_p (model1, point, &merged));
757bf1df
DM
4131
4132 ASSERT_EQ (merged.eval_condition (x, GE_EXPR, int_0, &ctxt),
4133 tristate (tristate::TS_TRUE));
4134 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, n, &ctxt),
4135 tristate (tristate::TS_TRUE));
4136
4137 ASSERT_EQ (merged.eval_condition (z, NE_EXPR, int_5, &ctxt),
4138 tristate (tristate::TS_UNKNOWN));
4139 ASSERT_EQ (merged.eval_condition (x, LT_EXPR, y, &ctxt),
4140 tristate (tristate::TS_UNKNOWN));
4141}
4142
808f4dfe
DM
4143/* Verify that widening_svalue::eval_condition_without_cm works as
4144 expected. */
4145
4146static void
4147test_widening_constraints ()
4148{
4149 program_point point (program_point::origin ());
4150 tree int_0 = build_int_cst (integer_type_node, 0);
4151 tree int_m1 = build_int_cst (integer_type_node, -1);
4152 tree int_1 = build_int_cst (integer_type_node, 1);
4153 tree int_256 = build_int_cst (integer_type_node, 256);
4154 region_model_manager mgr;
4155 test_region_model_context ctxt;
4156 const svalue *int_0_sval = mgr.get_or_create_constant_svalue (int_0);
4157 const svalue *int_1_sval = mgr.get_or_create_constant_svalue (int_1);
4158 const svalue *w_zero_then_one_sval
4159 = mgr.get_or_create_widening_svalue (integer_type_node, point,
4160 int_0_sval, int_1_sval);
4161 const widening_svalue *w_zero_then_one
4162 = w_zero_then_one_sval->dyn_cast_widening_svalue ();
4163 ASSERT_EQ (w_zero_then_one->get_direction (),
4164 widening_svalue::DIR_ASCENDING);
4165 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_m1),
4166 tristate::TS_FALSE);
4167 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_0),
4168 tristate::TS_FALSE);
4169 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_1),
4170 tristate::TS_UNKNOWN);
4171 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LT_EXPR, int_256),
4172 tristate::TS_UNKNOWN);
4173
4174 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_m1),
4175 tristate::TS_FALSE);
4176 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_0),
4177 tristate::TS_UNKNOWN);
4178 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_1),
4179 tristate::TS_UNKNOWN);
4180 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (LE_EXPR, int_256),
4181 tristate::TS_UNKNOWN);
4182
4183 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_m1),
4184 tristate::TS_TRUE);
4185 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_0),
4186 tristate::TS_UNKNOWN);
4187 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_1),
4188 tristate::TS_UNKNOWN);
4189 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GT_EXPR, int_256),
4190 tristate::TS_UNKNOWN);
4191
4192 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_m1),
4193 tristate::TS_TRUE);
4194 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_0),
4195 tristate::TS_TRUE);
4196 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_1),
4197 tristate::TS_UNKNOWN);
4198 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (GE_EXPR, int_256),
4199 tristate::TS_UNKNOWN);
4200
4201 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_m1),
4202 tristate::TS_FALSE);
4203 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_0),
4204 tristate::TS_UNKNOWN);
4205 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_1),
4206 tristate::TS_UNKNOWN);
4207 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (EQ_EXPR, int_256),
4208 tristate::TS_UNKNOWN);
4209
4210 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_m1),
4211 tristate::TS_TRUE);
4212 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_0),
4213 tristate::TS_UNKNOWN);
4214 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_1),
4215 tristate::TS_UNKNOWN);
4216 ASSERT_EQ (w_zero_then_one->eval_condition_without_cm (NE_EXPR, int_256),
4217 tristate::TS_UNKNOWN);
4218}
4219
4220/* Verify merging constraints for states simulating successive iterations
4221 of a loop.
4222 Simulate:
4223 for (i = 0; i < 256; i++)
4224 [...body...]
4225 i.e. this gimple:.
4226 i_15 = 0;
4227 goto <bb 4>;
4228
4229 <bb 4> :
4230 i_11 = PHI <i_15(2), i_23(3)>
4231 if (i_11 <= 255)
4232 goto <bb 3>;
4233 else
4234 goto [AFTER LOOP]
4235
4236 <bb 3> :
4237 [LOOP BODY]
4238 i_23 = i_11 + 1;
4239
4240 and thus these ops (and resultant states):
4241 i_11 = PHI()
4242 {i_11: 0}
4243 add_constraint (i_11 <= 255) [for the true edge]
4244 {i_11: 0} [constraint was a no-op]
4245 i_23 = i_11 + 1;
4246 {i_22: 1}
4247 i_11 = PHI()
4248 {i_11: WIDENED (at phi, 0, 1)}
4249 add_constraint (i_11 <= 255) [for the true edge]
4250 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}
4251 i_23 = i_11 + 1;
4252 {i_23: (WIDENED (at phi, 0, 1) + 1); WIDENED <= 255}
4253 i_11 = PHI(); merge with state at phi above
4254 {i_11: WIDENED (at phi, 0, 1); WIDENED <= 256}
4255 [changing meaning of "WIDENED" here]
4256 if (i_11 <= 255)
4257 T: {i_11: WIDENED (at phi, 0, 1); WIDENED <= 255}; cache hit
4258 F: {i_11: 256}
4259 */
4260
4261static void
4262test_iteration_1 ()
4263{
4264 program_point point (program_point::origin ());
4265
4266 tree int_0 = build_int_cst (integer_type_node, 0);
4267 tree int_1 = build_int_cst (integer_type_node, 1);
4268 tree int_256 = build_int_cst (integer_type_node, 256);
4269 tree int_257 = build_int_cst (integer_type_node, 257);
4270 tree i = build_global_decl ("i", integer_type_node);
4271
4272 region_model_manager mgr;
4273 test_region_model_context ctxt;
4274
4275 /* model0: i: 0. */
4276 region_model model0 (&mgr);
4277 model0.set_value (i, int_0, &ctxt);
4278
4279 /* model1: i: 1. */
4280 region_model model1 (&mgr);
4281 model1.set_value (i, int_1, &ctxt);
4282
4283 /* Should merge "i" to a widened value. */
4284 region_model model2 (&mgr);
4285 ASSERT_TRUE (model1.can_merge_with_p (model0, point, &model2));
4286 const svalue *merged_i = model2.get_rvalue (i, &ctxt);
4287 ASSERT_EQ (merged_i->get_kind (), SK_WIDENING);
4288 const widening_svalue *w = merged_i->dyn_cast_widening_svalue ();
4289 ASSERT_EQ (w->get_direction (), widening_svalue::DIR_ASCENDING);
4290
4291 /* Add constraint: i < 256 */
4292 model2.add_constraint (i, LT_EXPR, int_256, &ctxt);
4293 ASSERT_EQ (model2.eval_condition (i, LT_EXPR, int_256, &ctxt),
4294 tristate (tristate::TS_TRUE));
4295 ASSERT_EQ (model2.eval_condition (i, GE_EXPR, int_0, &ctxt),
4296 tristate (tristate::TS_TRUE));
4297
4298 /* Try merging with the initial state. */
4299 region_model model3 (&mgr);
4300 ASSERT_TRUE (model2.can_merge_with_p (model0, point, &model3));
4301 /* Merging the merged value with the initial value should be idempotent,
4302 so that the analysis converges. */
4303 ASSERT_EQ (model3.get_rvalue (i, &ctxt), merged_i);
4304 /* Merger of 0 and a widening value with constraint < CST
4305 should retain the constraint, even though it was implicit
4306 for the 0 case. */
4307 ASSERT_EQ (model3.eval_condition (i, LT_EXPR, int_256, &ctxt),
4308 tristate (tristate::TS_TRUE));
4309 /* ...and we should have equality: the analysis should have converged. */
4310 ASSERT_EQ (model3, model2);
4311
4312 /* "i_23 = i_11 + 1;" */
4313 region_model model4 (model3);
4314 ASSERT_EQ (model4, model2);
4315 model4.set_value (i, build2 (PLUS_EXPR, integer_type_node, i, int_1), &ctxt);
4316 const svalue *plus_one = model4.get_rvalue (i, &ctxt);
4317 ASSERT_EQ (plus_one->get_kind (), SK_BINOP);
4318
4319 /* Try merging with the "i: 1" state. */
4320 region_model model5 (&mgr);
4321 ASSERT_TRUE (model4.can_merge_with_p (model1, point, &model5));
4322 ASSERT_EQ (model5.get_rvalue (i, &ctxt), plus_one);
4323 ASSERT_EQ (model5, model4);
4324
4325 /* "i_11 = PHI();" merge with state at phi above.
4326 For i, we should have a merger of WIDENING with WIDENING + 1,
4327 and this should be WIDENING again. */
4328 region_model model6 (&mgr);
4329 ASSERT_TRUE (model5.can_merge_with_p (model2, point, &model6));
4330 const svalue *merged_widening = model6.get_rvalue (i, &ctxt);
4331 ASSERT_EQ (merged_widening->get_kind (), SK_WIDENING);
4332
4333 ASSERT_CONDITION_TRUE (model6, i, LT_EXPR, int_257);
4334}
4335
6969ac30
DM
4336/* Verify that if we mark a pointer to a malloc-ed region as non-NULL,
4337 all cast pointers to that region are also known to be non-NULL. */
4338
4339static void
4340test_malloc_constraints ()
4341{
808f4dfe
DM
4342 region_model_manager mgr;
4343 region_model model (&mgr);
6969ac30
DM
4344 tree p = build_global_decl ("p", ptr_type_node);
4345 tree char_star = build_pointer_type (char_type_node);
4346 tree q = build_global_decl ("q", char_star);
4347 tree null_ptr = build_int_cst (ptr_type_node, 0);
4348
808f4dfe
DM
4349 const svalue *size_in_bytes
4350 = mgr.get_or_create_unknown_svalue (integer_type_node);
4351 const region *reg = model.create_region_for_heap_alloc (size_in_bytes);
4352 const svalue *sval = mgr.get_ptr_svalue (ptr_type_node, reg);
4353 model.set_value (model.get_lvalue (p, NULL), sval, NULL);
6969ac30
DM
4354 model.set_value (q, p, NULL);
4355
6969ac30
DM
4356 ASSERT_CONDITION_UNKNOWN (model, p, NE_EXPR, null_ptr);
4357 ASSERT_CONDITION_UNKNOWN (model, p, EQ_EXPR, null_ptr);
4358 ASSERT_CONDITION_UNKNOWN (model, q, NE_EXPR, null_ptr);
4359 ASSERT_CONDITION_UNKNOWN (model, q, EQ_EXPR, null_ptr);
4360
4361 model.add_constraint (p, NE_EXPR, null_ptr, NULL);
4362
6969ac30
DM
4363 ASSERT_CONDITION_TRUE (model, p, NE_EXPR, null_ptr);
4364 ASSERT_CONDITION_FALSE (model, p, EQ_EXPR, null_ptr);
4365 ASSERT_CONDITION_TRUE (model, q, NE_EXPR, null_ptr);
4366 ASSERT_CONDITION_FALSE (model, q, EQ_EXPR, null_ptr);
4367}
4368
808f4dfe
DM
4369/* Smoketest of getting and setting the value of a variable. */
4370
4371static void
4372test_var ()
4373{
4374 /* "int i;" */
4375 tree i = build_global_decl ("i", integer_type_node);
4376
4377 tree int_17 = build_int_cst (integer_type_node, 17);
4378 tree int_m3 = build_int_cst (integer_type_node, -3);
4379
4380 region_model_manager mgr;
4381 region_model model (&mgr);
4382
4383 const region *i_reg = model.get_lvalue (i, NULL);
4384 ASSERT_EQ (i_reg->get_kind (), RK_DECL);
4385
4386 /* Reading "i" should give a symbolic "initial value". */
4387 const svalue *sval_init = model.get_rvalue (i, NULL);
4388 ASSERT_EQ (sval_init->get_kind (), SK_INITIAL);
4389 ASSERT_EQ (sval_init->dyn_cast_initial_svalue ()->get_region (), i_reg);
4390 /* ..and doing it again should give the same "initial value". */
4391 ASSERT_EQ (model.get_rvalue (i, NULL), sval_init);
4392
4393 /* "i = 17;". */
4394 model.set_value (i, int_17, NULL);
4395 ASSERT_EQ (model.get_rvalue (i, NULL),
4396 model.get_rvalue (int_17, NULL));
4397
4398 /* "i = -3;". */
4399 model.set_value (i, int_m3, NULL);
4400 ASSERT_EQ (model.get_rvalue (i, NULL),
4401 model.get_rvalue (int_m3, NULL));
4402
4403 /* Verify get_offset for "i". */
4404 {
4405 region_offset offset = i_reg->get_offset ();
4406 ASSERT_EQ (offset.get_base_region (), i_reg);
4407 ASSERT_EQ (offset.get_bit_offset (), 0);
4408 }
4409}
4410
4411static void
4412test_array_2 ()
4413{
4414 /* "int arr[10];" */
4415 tree tlen = size_int (10);
4416 tree arr_type
4417 = build_array_type (integer_type_node, build_index_type (tlen));
4418 tree arr = build_global_decl ("arr", arr_type);
4419
4420 /* "int i;" */
4421 tree i = build_global_decl ("i", integer_type_node);
4422
4423 tree int_0 = build_int_cst (integer_type_node, 0);
4424 tree int_1 = build_int_cst (integer_type_node, 1);
4425
4426 tree arr_0 = build4 (ARRAY_REF, integer_type_node,
4427 arr, int_0, NULL_TREE, NULL_TREE);
4428 tree arr_1 = build4 (ARRAY_REF, integer_type_node,
4429 arr, int_1, NULL_TREE, NULL_TREE);
4430 tree arr_i = build4 (ARRAY_REF, integer_type_node,
4431 arr, i, NULL_TREE, NULL_TREE);
4432
4433 tree int_17 = build_int_cst (integer_type_node, 17);
4434 tree int_42 = build_int_cst (integer_type_node, 42);
4435 tree int_m3 = build_int_cst (integer_type_node, -3);
4436
4437 region_model_manager mgr;
4438 region_model model (&mgr);
4439 /* "arr[0] = 17;". */
4440 model.set_value (arr_0, int_17, NULL);
4441 /* "arr[1] = -3;". */
4442 model.set_value (arr_1, int_m3, NULL);
4443
4444 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
4445 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_m3, NULL));
4446
4447 /* Overwrite a pre-existing binding: "arr[1] = 42;". */
4448 model.set_value (arr_1, int_42, NULL);
4449 ASSERT_EQ (model.get_rvalue (arr_1, NULL), model.get_rvalue (int_42, NULL));
4450
4451 /* Verify get_offset for "arr[0]". */
4452 {
4453 const region *arr_0_reg = model.get_lvalue (arr_0, NULL);
4454 region_offset offset = arr_0_reg->get_offset ();
4455 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
4456 ASSERT_EQ (offset.get_bit_offset (), 0);
4457 }
4458
4459 /* Verify get_offset for "arr[1]". */
4460 {
4461 const region *arr_1_reg = model.get_lvalue (arr_1, NULL);
4462 region_offset offset = arr_1_reg->get_offset ();
4463 ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
4464 ASSERT_EQ (offset.get_bit_offset (), INT_TYPE_SIZE);
4465 }
4466
4467 /* "arr[i] = i;" - this should remove the earlier bindings. */
4468 model.set_value (arr_i, i, NULL);
4469 ASSERT_EQ (model.get_rvalue (arr_i, NULL), model.get_rvalue (i, NULL));
4470 ASSERT_EQ (model.get_rvalue (arr_0, NULL)->get_kind (), SK_UNKNOWN);
4471
4472 /* "arr[0] = 17;" - this should remove the arr[i] binding. */
4473 model.set_value (arr_0, int_17, NULL);
4474 ASSERT_EQ (model.get_rvalue (arr_0, NULL), model.get_rvalue (int_17, NULL));
4475 ASSERT_EQ (model.get_rvalue (arr_i, NULL)->get_kind (), SK_UNKNOWN);
4476}
4477
4478/* Smoketest of dereferencing a pointer via MEM_REF. */
4479
4480static void
4481test_mem_ref ()
4482{
4483 /*
4484 x = 17;
4485 p = &x;
4486 *p;
4487 */
4488 tree x = build_global_decl ("x", integer_type_node);
4489 tree int_star = build_pointer_type (integer_type_node);
4490 tree p = build_global_decl ("p", int_star);
4491
4492 tree int_17 = build_int_cst (integer_type_node, 17);
4493 tree addr_of_x = build1 (ADDR_EXPR, int_star, x);
4494 tree offset_0 = build_int_cst (integer_type_node, 0);
4495 tree star_p = build2 (MEM_REF, integer_type_node, p, offset_0);
4496
4497 region_model_manager mgr;
4498 region_model model (&mgr);
4499
4500 /* "x = 17;". */
4501 model.set_value (x, int_17, NULL);
4502
4503 /* "p = &x;". */
4504 model.set_value (p, addr_of_x, NULL);
4505
4506 const svalue *sval = model.get_rvalue (star_p, NULL);
4507 ASSERT_EQ (sval->maybe_get_constant (), int_17);
4508}
4509
4510/* Test for a POINTER_PLUS_EXPR followed by a MEM_REF.
4511 Analogous to this code:
4512 void test_6 (int a[10])
4513 {
4514 __analyzer_eval (a[3] == 42); [should be UNKNOWN]
4515 a[3] = 42;
4516 __analyzer_eval (a[3] == 42); [should be TRUE]
4517 }
4518 from data-model-1.c, which looks like this at the gimple level:
4519 # __analyzer_eval (a[3] == 42); [should be UNKNOWN]
4520 int *_1 = a_10(D) + 12; # POINTER_PLUS_EXPR
4521 int _2 = *_1; # MEM_REF
4522 _Bool _3 = _2 == 42;
4523 int _4 = (int) _3;
4524 __analyzer_eval (_4);
4525
4526 # a[3] = 42;
4527 int *_5 = a_10(D) + 12; # POINTER_PLUS_EXPR
4528 *_5 = 42; # MEM_REF
4529
4530 # __analyzer_eval (a[3] == 42); [should be TRUE]
4531 int *_6 = a_10(D) + 12; # POINTER_PLUS_EXPR
4532 int _7 = *_6; # MEM_REF
4533 _Bool _8 = _7 == 42;
4534 int _9 = (int) _8;
4535 __analyzer_eval (_9); */
4536
4537static void
4538test_POINTER_PLUS_EXPR_then_MEM_REF ()
4539{
4540 tree int_star = build_pointer_type (integer_type_node);
4541 tree a = build_global_decl ("a", int_star);
4542 tree offset_12 = build_int_cst (size_type_node, 12);
4543 tree pointer_plus_expr = build2 (POINTER_PLUS_EXPR, int_star, a, offset_12);
4544 tree offset_0 = build_int_cst (integer_type_node, 0);
4545 tree mem_ref = build2 (MEM_REF, integer_type_node,
4546 pointer_plus_expr, offset_0);
4547 region_model_manager mgr;
4548 region_model m (&mgr);
4549
4550 tree int_42 = build_int_cst (integer_type_node, 42);
4551 m.set_value (mem_ref, int_42, NULL);
4552 ASSERT_EQ (m.get_rvalue (mem_ref, NULL)->maybe_get_constant (), int_42);
4553}
4554
4555/* Verify that malloc works. */
4556
4557static void
4558test_malloc ()
4559{
4560 tree int_star = build_pointer_type (integer_type_node);
4561 tree p = build_global_decl ("p", int_star);
4562 tree n = build_global_decl ("n", integer_type_node);
4563 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
4564 n, build_int_cst (size_type_node, 4));
4565
4566 region_model_manager mgr;
4567 test_region_model_context ctxt;
4568 region_model model (&mgr);
4569
4570 /* "p = malloc (n * 4);". */
4571 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
4572 const region *reg = model.create_region_for_heap_alloc (size_sval);
4573 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
4574 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
4575 // TODO: verify dynamic extents
4576}
4577
4578/* Verify that alloca works. */
4579
4580static void
4581test_alloca ()
4582{
4583 auto_vec <tree> param_types;
4584 tree fndecl = make_fndecl (integer_type_node,
4585 "test_fn",
4586 param_types);
4587 allocate_struct_function (fndecl, true);
4588
4589
4590 tree int_star = build_pointer_type (integer_type_node);
4591 tree p = build_global_decl ("p", int_star);
4592 tree n = build_global_decl ("n", integer_type_node);
4593 tree n_times_4 = build2 (MULT_EXPR, size_type_node,
4594 n, build_int_cst (size_type_node, 4));
4595
4596 region_model_manager mgr;
4597 test_region_model_context ctxt;
4598 region_model model (&mgr);
4599
4600 /* Push stack frame. */
4601 const region *frame_reg
4602 = model.push_frame (DECL_STRUCT_FUNCTION (fndecl),
4603 NULL, &ctxt);
4604 /* "p = alloca (n * 4);". */
4605 const svalue *size_sval = model.get_rvalue (n_times_4, &ctxt);
4606 const region *reg = model.create_region_for_alloca (size_sval);
4607 ASSERT_EQ (reg->get_parent_region (), frame_reg);
4608 const svalue *ptr = mgr.get_ptr_svalue (int_star, reg);
4609 model.set_value (model.get_lvalue (p, &ctxt), ptr, &ctxt);
4610 // TODO: verify dynamic extents
4611
4612 /* Verify that the pointers to the alloca region are replaced by
4613 poisoned values when the frame is popped. */
4614 model.pop_frame (NULL, NULL, &ctxt);
4615 ASSERT_EQ (model.get_rvalue (p, &ctxt)->get_kind (), SK_POISONED);
4616}
4617
757bf1df
DM
4618/* Run all of the selftests within this file. */
4619
4620void
4621analyzer_region_model_cc_tests ()
4622{
8c08c983 4623 test_tree_cmp_on_constants ();
757bf1df 4624 test_dump ();
808f4dfe
DM
4625 test_struct ();
4626 test_array_1 ();
90f7c300 4627 test_get_representative_tree ();
757bf1df 4628 test_unique_constants ();
808f4dfe
DM
4629 test_unique_unknowns ();
4630 test_initial_svalue_folding ();
4631 test_unaryop_svalue_folding ();
4632 test_binop_svalue_folding ();
4633 test_sub_svalue_folding ();
4634 test_descendent_of_p ();
757bf1df 4635 test_assignment ();
a96f1c38 4636 test_compound_assignment ();
757bf1df
DM
4637 test_stack_frames ();
4638 test_get_representative_path_var ();
808f4dfe 4639 test_equality_1 ();
757bf1df
DM
4640 test_canonicalization_2 ();
4641 test_canonicalization_3 ();
8c08c983 4642 test_canonicalization_4 ();
757bf1df
DM
4643 test_state_merging ();
4644 test_constraint_merging ();
808f4dfe
DM
4645 test_widening_constraints ();
4646 test_iteration_1 ();
6969ac30 4647 test_malloc_constraints ();
808f4dfe
DM
4648 test_var ();
4649 test_array_2 ();
4650 test_mem_ref ();
4651 test_POINTER_PLUS_EXPR_then_MEM_REF ();
4652 test_malloc ();
4653 test_alloca ();
757bf1df
DM
4654}
4655
4656} // namespace selftest
4657
4658#endif /* CHECKING_P */
4659
75038aa6
DM
4660} // namespace ana
4661
757bf1df 4662#endif /* #if ENABLE_ANALYZER */